diff --git a/src/libraries/vk_video/vulkan_video_codec_av1std.h b/src/libraries/vk_video/vulkan_video_codec_av1std.h new file mode 100644 index 000000000..347e0d2ae --- /dev/null +++ b/src/libraries/vk_video/vulkan_video_codec_av1std.h @@ -0,0 +1,394 @@ +#ifndef VULKAN_VIDEO_CODEC_AV1STD_H_ +#define VULKAN_VIDEO_CODEC_AV1STD_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_av1std is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_av1std 1 +#include "vulkan_video_codecs_common.h" +#define STD_VIDEO_AV1_NUM_REF_FRAMES 8 +#define STD_VIDEO_AV1_REFS_PER_FRAME 7 +#define STD_VIDEO_AV1_TOTAL_REFS_PER_FRAME 8 +#define STD_VIDEO_AV1_MAX_TILE_COLS 64 +#define STD_VIDEO_AV1_MAX_TILE_ROWS 64 +#define STD_VIDEO_AV1_MAX_SEGMENTS 8 +#define STD_VIDEO_AV1_SEG_LVL_MAX 8 +#define STD_VIDEO_AV1_PRIMARY_REF_NONE 7 +#define STD_VIDEO_AV1_SELECT_INTEGER_MV 2 +#define STD_VIDEO_AV1_SELECT_SCREEN_CONTENT_TOOLS 2 +#define STD_VIDEO_AV1_SKIP_MODE_FRAMES 2 +#define STD_VIDEO_AV1_MAX_LOOP_FILTER_STRENGTHS 4 +#define STD_VIDEO_AV1_LOOP_FILTER_ADJUSTMENTS 2 +#define STD_VIDEO_AV1_MAX_CDEF_FILTER_STRENGTHS 8 +#define STD_VIDEO_AV1_MAX_NUM_PLANES 3 +#define STD_VIDEO_AV1_GLOBAL_MOTION_PARAMS 6 +#define STD_VIDEO_AV1_MAX_NUM_Y_POINTS 14 +#define STD_VIDEO_AV1_MAX_NUM_CB_POINTS 10 +#define STD_VIDEO_AV1_MAX_NUM_CR_POINTS 10 +#define STD_VIDEO_AV1_MAX_NUM_POS_LUMA 24 +#define STD_VIDEO_AV1_MAX_NUM_POS_CHROMA 25 + +typedef enum StdVideoAV1Profile { + STD_VIDEO_AV1_PROFILE_MAIN = 0, + STD_VIDEO_AV1_PROFILE_HIGH = 1, + STD_VIDEO_AV1_PROFILE_PROFESSIONAL = 2, + STD_VIDEO_AV1_PROFILE_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_PROFILE_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1Profile; + +typedef enum StdVideoAV1Level { + STD_VIDEO_AV1_LEVEL_2_0 = 0, + STD_VIDEO_AV1_LEVEL_2_1 = 1, + STD_VIDEO_AV1_LEVEL_2_2 = 2, + STD_VIDEO_AV1_LEVEL_2_3 = 3, + STD_VIDEO_AV1_LEVEL_3_0 = 4, + STD_VIDEO_AV1_LEVEL_3_1 = 5, + STD_VIDEO_AV1_LEVEL_3_2 = 6, + STD_VIDEO_AV1_LEVEL_3_3 = 7, + STD_VIDEO_AV1_LEVEL_4_0 = 8, + STD_VIDEO_AV1_LEVEL_4_1 = 9, + STD_VIDEO_AV1_LEVEL_4_2 = 10, + STD_VIDEO_AV1_LEVEL_4_3 = 11, + STD_VIDEO_AV1_LEVEL_5_0 = 12, + STD_VIDEO_AV1_LEVEL_5_1 = 13, + STD_VIDEO_AV1_LEVEL_5_2 = 14, + STD_VIDEO_AV1_LEVEL_5_3 = 15, + STD_VIDEO_AV1_LEVEL_6_0 = 16, + STD_VIDEO_AV1_LEVEL_6_1 = 17, + STD_VIDEO_AV1_LEVEL_6_2 = 18, + STD_VIDEO_AV1_LEVEL_6_3 = 19, + STD_VIDEO_AV1_LEVEL_7_0 = 20, + STD_VIDEO_AV1_LEVEL_7_1 = 21, + STD_VIDEO_AV1_LEVEL_7_2 = 22, + STD_VIDEO_AV1_LEVEL_7_3 = 23, + STD_VIDEO_AV1_LEVEL_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_LEVEL_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1Level; + +typedef enum StdVideoAV1FrameType { + STD_VIDEO_AV1_FRAME_TYPE_KEY = 0, + STD_VIDEO_AV1_FRAME_TYPE_INTER = 1, + STD_VIDEO_AV1_FRAME_TYPE_INTRA_ONLY = 2, + STD_VIDEO_AV1_FRAME_TYPE_SWITCH = 3, + STD_VIDEO_AV1_FRAME_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_FRAME_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1FrameType; + +typedef enum StdVideoAV1ReferenceName { + STD_VIDEO_AV1_REFERENCE_NAME_INTRA_FRAME = 0, + STD_VIDEO_AV1_REFERENCE_NAME_LAST_FRAME = 1, + STD_VIDEO_AV1_REFERENCE_NAME_LAST2_FRAME = 2, + STD_VIDEO_AV1_REFERENCE_NAME_LAST3_FRAME = 3, + STD_VIDEO_AV1_REFERENCE_NAME_GOLDEN_FRAME = 4, + STD_VIDEO_AV1_REFERENCE_NAME_BWDREF_FRAME = 5, + STD_VIDEO_AV1_REFERENCE_NAME_ALTREF2_FRAME = 6, + STD_VIDEO_AV1_REFERENCE_NAME_ALTREF_FRAME = 7, + STD_VIDEO_AV1_REFERENCE_NAME_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_REFERENCE_NAME_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1ReferenceName; + +typedef enum StdVideoAV1InterpolationFilter { + STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP = 0, + STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP_SMOOTH = 1, + STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP_SHARP = 2, + STD_VIDEO_AV1_INTERPOLATION_FILTER_BILINEAR = 3, + STD_VIDEO_AV1_INTERPOLATION_FILTER_SWITCHABLE = 4, + STD_VIDEO_AV1_INTERPOLATION_FILTER_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_INTERPOLATION_FILTER_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1InterpolationFilter; + +typedef enum StdVideoAV1TxMode { + STD_VIDEO_AV1_TX_MODE_ONLY_4X4 = 0, + STD_VIDEO_AV1_TX_MODE_LARGEST = 1, + STD_VIDEO_AV1_TX_MODE_SELECT = 2, + STD_VIDEO_AV1_TX_MODE_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_TX_MODE_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1TxMode; + +typedef enum StdVideoAV1FrameRestorationType { + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_NONE = 0, + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_WIENER = 1, + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_SGRPROJ = 2, + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_SWITCHABLE = 3, + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1FrameRestorationType; + +typedef enum StdVideoAV1ColorPrimaries { + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_709 = 1, + STD_VIDEO_AV1_COLOR_PRIMARIES_UNSPECIFIED = 2, + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_M = 4, + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_B_G = 5, + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_601 = 6, + STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_240 = 7, + STD_VIDEO_AV1_COLOR_PRIMARIES_GENERIC_FILM = 8, + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_2020 = 9, + STD_VIDEO_AV1_COLOR_PRIMARIES_XYZ = 10, + STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_431 = 11, + STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_432 = 12, + STD_VIDEO_AV1_COLOR_PRIMARIES_EBU_3213 = 22, + STD_VIDEO_AV1_COLOR_PRIMARIES_INVALID = 0x7FFFFFFF, + // STD_VIDEO_AV1_COLOR_PRIMARIES_BT_UNSPECIFIED is a deprecated alias + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_UNSPECIFIED = STD_VIDEO_AV1_COLOR_PRIMARIES_UNSPECIFIED, + STD_VIDEO_AV1_COLOR_PRIMARIES_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1ColorPrimaries; + +typedef enum StdVideoAV1TransferCharacteristics { + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_RESERVED_0 = 0, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_709 = 1, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_UNSPECIFIED = 2, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_RESERVED_3 = 3, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_470_M = 4, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_470_B_G = 5, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_601 = 6, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_240 = 7, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LINEAR = 8, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LOG_100 = 9, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LOG_100_SQRT10 = 10, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_IEC_61966 = 11, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_1361 = 12, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SRGB = 13, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_2020_10_BIT = 14, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_2020_12_BIT = 15, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_2084 = 16, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_428 = 17, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_HLG = 18, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1TransferCharacteristics; + +typedef enum StdVideoAV1MatrixCoefficients { + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_IDENTITY = 0, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_709 = 1, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_UNSPECIFIED = 2, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_RESERVED_3 = 3, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_FCC = 4, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_470_B_G = 5, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_601 = 6, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_240 = 7, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_YCGCO = 8, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_2020_NCL = 9, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_2020_CL = 10, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_2085 = 11, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_CHROMAT_NCL = 12, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_CHROMAT_CL = 13, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_ICTCP = 14, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1MatrixCoefficients; + +typedef enum StdVideoAV1ChromaSamplePosition { + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_UNKNOWN = 0, + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_VERTICAL = 1, + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_COLOCATED = 2, + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_RESERVED = 3, + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1ChromaSamplePosition; +typedef struct StdVideoAV1ColorConfigFlags { + uint32_t mono_chrome : 1; + uint32_t color_range : 1; + uint32_t separate_uv_delta_q : 1; + uint32_t color_description_present_flag : 1; + uint32_t reserved : 28; +} StdVideoAV1ColorConfigFlags; + +typedef struct StdVideoAV1ColorConfig { + StdVideoAV1ColorConfigFlags flags; + uint8_t BitDepth; + uint8_t subsampling_x; + uint8_t subsampling_y; + uint8_t reserved1; + StdVideoAV1ColorPrimaries color_primaries; + StdVideoAV1TransferCharacteristics transfer_characteristics; + StdVideoAV1MatrixCoefficients matrix_coefficients; + StdVideoAV1ChromaSamplePosition chroma_sample_position; +} StdVideoAV1ColorConfig; + +typedef struct StdVideoAV1TimingInfoFlags { + uint32_t equal_picture_interval : 1; + uint32_t reserved : 31; +} StdVideoAV1TimingInfoFlags; + +typedef struct StdVideoAV1TimingInfo { + StdVideoAV1TimingInfoFlags flags; + uint32_t num_units_in_display_tick; + uint32_t time_scale; + uint32_t num_ticks_per_picture_minus_1; +} StdVideoAV1TimingInfo; + +typedef struct StdVideoAV1LoopFilterFlags { + uint32_t loop_filter_delta_enabled : 1; + uint32_t loop_filter_delta_update : 1; + uint32_t reserved : 30; +} StdVideoAV1LoopFilterFlags; + +typedef struct StdVideoAV1LoopFilter { + StdVideoAV1LoopFilterFlags flags; + uint8_t loop_filter_level[STD_VIDEO_AV1_MAX_LOOP_FILTER_STRENGTHS]; + uint8_t loop_filter_sharpness; + uint8_t update_ref_delta; + int8_t loop_filter_ref_deltas[STD_VIDEO_AV1_TOTAL_REFS_PER_FRAME]; + uint8_t update_mode_delta; + int8_t loop_filter_mode_deltas[STD_VIDEO_AV1_LOOP_FILTER_ADJUSTMENTS]; +} StdVideoAV1LoopFilter; + +typedef struct StdVideoAV1QuantizationFlags { + uint32_t using_qmatrix : 1; + uint32_t diff_uv_delta : 1; + uint32_t reserved : 30; +} StdVideoAV1QuantizationFlags; + +typedef struct StdVideoAV1Quantization { + StdVideoAV1QuantizationFlags flags; + uint8_t base_q_idx; + int8_t DeltaQYDc; + int8_t DeltaQUDc; + int8_t DeltaQUAc; + int8_t DeltaQVDc; + int8_t DeltaQVAc; + uint8_t qm_y; + uint8_t qm_u; + uint8_t qm_v; +} StdVideoAV1Quantization; + +typedef struct StdVideoAV1Segmentation { + uint8_t FeatureEnabled[STD_VIDEO_AV1_MAX_SEGMENTS]; + int16_t FeatureData[STD_VIDEO_AV1_MAX_SEGMENTS][STD_VIDEO_AV1_SEG_LVL_MAX]; +} StdVideoAV1Segmentation; + +typedef struct StdVideoAV1TileInfoFlags { + uint32_t uniform_tile_spacing_flag : 1; + uint32_t reserved : 31; +} StdVideoAV1TileInfoFlags; + +typedef struct StdVideoAV1TileInfo { + StdVideoAV1TileInfoFlags flags; + uint8_t TileCols; + uint8_t TileRows; + uint16_t context_update_tile_id; + uint8_t tile_size_bytes_minus_1; + uint8_t reserved1[7]; + const uint16_t* pMiColStarts; + const uint16_t* pMiRowStarts; + const uint16_t* pWidthInSbsMinus1; + const uint16_t* pHeightInSbsMinus1; +} StdVideoAV1TileInfo; + +typedef struct StdVideoAV1CDEF { + uint8_t cdef_damping_minus_3; + uint8_t cdef_bits; + uint8_t cdef_y_pri_strength[STD_VIDEO_AV1_MAX_CDEF_FILTER_STRENGTHS]; + uint8_t cdef_y_sec_strength[STD_VIDEO_AV1_MAX_CDEF_FILTER_STRENGTHS]; + uint8_t cdef_uv_pri_strength[STD_VIDEO_AV1_MAX_CDEF_FILTER_STRENGTHS]; + uint8_t cdef_uv_sec_strength[STD_VIDEO_AV1_MAX_CDEF_FILTER_STRENGTHS]; +} StdVideoAV1CDEF; + +typedef struct StdVideoAV1LoopRestoration { + StdVideoAV1FrameRestorationType FrameRestorationType[STD_VIDEO_AV1_MAX_NUM_PLANES]; + uint16_t LoopRestorationSize[STD_VIDEO_AV1_MAX_NUM_PLANES]; +} StdVideoAV1LoopRestoration; + +typedef struct StdVideoAV1GlobalMotion { + uint8_t GmType[STD_VIDEO_AV1_NUM_REF_FRAMES]; + int32_t gm_params[STD_VIDEO_AV1_NUM_REF_FRAMES][STD_VIDEO_AV1_GLOBAL_MOTION_PARAMS]; +} StdVideoAV1GlobalMotion; + +typedef struct StdVideoAV1FilmGrainFlags { + uint32_t chroma_scaling_from_luma : 1; + uint32_t overlap_flag : 1; + uint32_t clip_to_restricted_range : 1; + uint32_t update_grain : 1; + uint32_t reserved : 28; +} StdVideoAV1FilmGrainFlags; + +typedef struct StdVideoAV1FilmGrain { + StdVideoAV1FilmGrainFlags flags; + uint8_t grain_scaling_minus_8; + uint8_t ar_coeff_lag; + uint8_t ar_coeff_shift_minus_6; + uint8_t grain_scale_shift; + uint16_t grain_seed; + uint8_t film_grain_params_ref_idx; + uint8_t num_y_points; + uint8_t point_y_value[STD_VIDEO_AV1_MAX_NUM_Y_POINTS]; + uint8_t point_y_scaling[STD_VIDEO_AV1_MAX_NUM_Y_POINTS]; + uint8_t num_cb_points; + uint8_t point_cb_value[STD_VIDEO_AV1_MAX_NUM_CB_POINTS]; + uint8_t point_cb_scaling[STD_VIDEO_AV1_MAX_NUM_CB_POINTS]; + uint8_t num_cr_points; + uint8_t point_cr_value[STD_VIDEO_AV1_MAX_NUM_CR_POINTS]; + uint8_t point_cr_scaling[STD_VIDEO_AV1_MAX_NUM_CR_POINTS]; + int8_t ar_coeffs_y_plus_128[STD_VIDEO_AV1_MAX_NUM_POS_LUMA]; + int8_t ar_coeffs_cb_plus_128[STD_VIDEO_AV1_MAX_NUM_POS_CHROMA]; + int8_t ar_coeffs_cr_plus_128[STD_VIDEO_AV1_MAX_NUM_POS_CHROMA]; + uint8_t cb_mult; + uint8_t cb_luma_mult; + uint16_t cb_offset; + uint8_t cr_mult; + uint8_t cr_luma_mult; + uint16_t cr_offset; +} StdVideoAV1FilmGrain; + +typedef struct StdVideoAV1SequenceHeaderFlags { + uint32_t still_picture : 1; + uint32_t reduced_still_picture_header : 1; + uint32_t use_128x128_superblock : 1; + uint32_t enable_filter_intra : 1; + uint32_t enable_intra_edge_filter : 1; + uint32_t enable_interintra_compound : 1; + uint32_t enable_masked_compound : 1; + uint32_t enable_warped_motion : 1; + uint32_t enable_dual_filter : 1; + uint32_t enable_order_hint : 1; + uint32_t enable_jnt_comp : 1; + uint32_t enable_ref_frame_mvs : 1; + uint32_t frame_id_numbers_present_flag : 1; + uint32_t enable_superres : 1; + uint32_t enable_cdef : 1; + uint32_t enable_restoration : 1; + uint32_t film_grain_params_present : 1; + uint32_t timing_info_present_flag : 1; + uint32_t initial_display_delay_present_flag : 1; + uint32_t reserved : 13; +} StdVideoAV1SequenceHeaderFlags; + +typedef struct StdVideoAV1SequenceHeader { + StdVideoAV1SequenceHeaderFlags flags; + StdVideoAV1Profile seq_profile; + uint8_t frame_width_bits_minus_1; + uint8_t frame_height_bits_minus_1; + uint16_t max_frame_width_minus_1; + uint16_t max_frame_height_minus_1; + uint8_t delta_frame_id_length_minus_2; + uint8_t additional_frame_id_length_minus_1; + uint8_t order_hint_bits_minus_1; + uint8_t seq_force_integer_mv; + uint8_t seq_force_screen_content_tools; + uint8_t reserved1[5]; + const StdVideoAV1ColorConfig* pColorConfig; + const StdVideoAV1TimingInfo* pTimingInfo; +} StdVideoAV1SequenceHeader; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/libraries/vk_video/vulkan_video_codec_av1std_decode.h b/src/libraries/vk_video/vulkan_video_codec_av1std_decode.h new file mode 100644 index 000000000..522628e86 --- /dev/null +++ b/src/libraries/vk_video/vulkan_video_codec_av1std_decode.h @@ -0,0 +1,109 @@ +#ifndef VULKAN_VIDEO_CODEC_AV1STD_DECODE_H_ +#define VULKAN_VIDEO_CODEC_AV1STD_DECODE_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_av1std_decode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_av1std_decode 1 +#include "vulkan_video_codec_av1std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_AV1_DECODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_av1_decode" +typedef struct StdVideoDecodeAV1PictureInfoFlags { + uint32_t error_resilient_mode : 1; + uint32_t disable_cdf_update : 1; + uint32_t use_superres : 1; + uint32_t render_and_frame_size_different : 1; + uint32_t allow_screen_content_tools : 1; + uint32_t is_filter_switchable : 1; + uint32_t force_integer_mv : 1; + uint32_t frame_size_override_flag : 1; + uint32_t buffer_removal_time_present_flag : 1; + uint32_t allow_intrabc : 1; + uint32_t frame_refs_short_signaling : 1; + uint32_t allow_high_precision_mv : 1; + uint32_t is_motion_mode_switchable : 1; + uint32_t use_ref_frame_mvs : 1; + uint32_t disable_frame_end_update_cdf : 1; + uint32_t allow_warped_motion : 1; + uint32_t reduced_tx_set : 1; + uint32_t reference_select : 1; + uint32_t skip_mode_present : 1; + uint32_t delta_q_present : 1; + uint32_t delta_lf_present : 1; + uint32_t delta_lf_multi : 1; + uint32_t segmentation_enabled : 1; + uint32_t segmentation_update_map : 1; + uint32_t segmentation_temporal_update : 1; + uint32_t segmentation_update_data : 1; + uint32_t UsesLr : 1; + uint32_t usesChromaLr : 1; + uint32_t apply_grain : 1; + uint32_t reserved : 3; +} StdVideoDecodeAV1PictureInfoFlags; + +typedef struct StdVideoDecodeAV1PictureInfo { + StdVideoDecodeAV1PictureInfoFlags flags; + StdVideoAV1FrameType frame_type; + uint32_t current_frame_id; + uint8_t OrderHint; + uint8_t primary_ref_frame; + uint8_t refresh_frame_flags; + uint8_t reserved1; + StdVideoAV1InterpolationFilter interpolation_filter; + StdVideoAV1TxMode TxMode; + uint8_t delta_q_res; + uint8_t delta_lf_res; + uint8_t SkipModeFrame[STD_VIDEO_AV1_SKIP_MODE_FRAMES]; + uint8_t coded_denom; + uint8_t reserved2[3]; + uint8_t OrderHints[STD_VIDEO_AV1_NUM_REF_FRAMES]; + uint32_t expectedFrameId[STD_VIDEO_AV1_NUM_REF_FRAMES]; + const StdVideoAV1TileInfo* pTileInfo; + const StdVideoAV1Quantization* pQuantization; + const StdVideoAV1Segmentation* pSegmentation; + const StdVideoAV1LoopFilter* pLoopFilter; + const StdVideoAV1CDEF* pCDEF; + const StdVideoAV1LoopRestoration* pLoopRestoration; + const StdVideoAV1GlobalMotion* pGlobalMotion; + const StdVideoAV1FilmGrain* pFilmGrain; +} StdVideoDecodeAV1PictureInfo; + +typedef struct StdVideoDecodeAV1ReferenceInfoFlags { + uint32_t disable_frame_end_update_cdf : 1; + uint32_t segmentation_enabled : 1; + uint32_t reserved : 30; +} StdVideoDecodeAV1ReferenceInfoFlags; + +typedef struct StdVideoDecodeAV1ReferenceInfo { + StdVideoDecodeAV1ReferenceInfoFlags flags; + uint8_t frame_type; + uint8_t RefFrameSignBias; + uint8_t OrderHint; + uint8_t SavedOrderHints[STD_VIDEO_AV1_NUM_REF_FRAMES]; +} StdVideoDecodeAV1ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/libraries/vk_video/vulkan_video_codec_av1std_encode.h b/src/libraries/vk_video/vulkan_video_codec_av1std_encode.h new file mode 100644 index 000000000..ca5f6f474 --- /dev/null +++ b/src/libraries/vk_video/vulkan_video_codec_av1std_encode.h @@ -0,0 +1,143 @@ +#ifndef VULKAN_VIDEO_CODEC_AV1STD_ENCODE_H_ +#define VULKAN_VIDEO_CODEC_AV1STD_ENCODE_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_av1std_encode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_av1std_encode 1 +#include "vulkan_video_codec_av1std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_ENCODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_AV1_ENCODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_av1_encode" +typedef struct StdVideoEncodeAV1DecoderModelInfo { + uint8_t buffer_delay_length_minus_1; + uint8_t buffer_removal_time_length_minus_1; + uint8_t frame_presentation_time_length_minus_1; + uint8_t reserved1; + uint32_t num_units_in_decoding_tick; +} StdVideoEncodeAV1DecoderModelInfo; + +typedef struct StdVideoEncodeAV1ExtensionHeader { + uint8_t temporal_id; + uint8_t spatial_id; +} StdVideoEncodeAV1ExtensionHeader; + +typedef struct StdVideoEncodeAV1OperatingPointInfoFlags { + uint32_t decoder_model_present_for_this_op : 1; + uint32_t low_delay_mode_flag : 1; + uint32_t initial_display_delay_present_for_this_op : 1; + uint32_t reserved : 29; +} StdVideoEncodeAV1OperatingPointInfoFlags; + +typedef struct StdVideoEncodeAV1OperatingPointInfo { + StdVideoEncodeAV1OperatingPointInfoFlags flags; + uint16_t operating_point_idc; + uint8_t seq_level_idx; + uint8_t seq_tier; + uint32_t decoder_buffer_delay; + uint32_t encoder_buffer_delay; + uint8_t initial_display_delay_minus_1; +} StdVideoEncodeAV1OperatingPointInfo; + +typedef struct StdVideoEncodeAV1PictureInfoFlags { + uint32_t error_resilient_mode : 1; + uint32_t disable_cdf_update : 1; + uint32_t use_superres : 1; + uint32_t render_and_frame_size_different : 1; + uint32_t allow_screen_content_tools : 1; + uint32_t is_filter_switchable : 1; + uint32_t force_integer_mv : 1; + uint32_t frame_size_override_flag : 1; + uint32_t buffer_removal_time_present_flag : 1; + uint32_t allow_intrabc : 1; + uint32_t frame_refs_short_signaling : 1; + uint32_t allow_high_precision_mv : 1; + uint32_t is_motion_mode_switchable : 1; + uint32_t use_ref_frame_mvs : 1; + uint32_t disable_frame_end_update_cdf : 1; + uint32_t allow_warped_motion : 1; + uint32_t reduced_tx_set : 1; + uint32_t skip_mode_present : 1; + uint32_t delta_q_present : 1; + uint32_t delta_lf_present : 1; + uint32_t delta_lf_multi : 1; + uint32_t segmentation_enabled : 1; + uint32_t segmentation_update_map : 1; + uint32_t segmentation_temporal_update : 1; + uint32_t segmentation_update_data : 1; + uint32_t UsesLr : 1; + uint32_t usesChromaLr : 1; + uint32_t show_frame : 1; + uint32_t showable_frame : 1; + uint32_t reserved : 3; +} StdVideoEncodeAV1PictureInfoFlags; + +typedef struct StdVideoEncodeAV1PictureInfo { + StdVideoEncodeAV1PictureInfoFlags flags; + StdVideoAV1FrameType frame_type; + uint32_t frame_presentation_time; + uint32_t current_frame_id; + uint8_t order_hint; + uint8_t primary_ref_frame; + uint8_t refresh_frame_flags; + uint8_t coded_denom; + uint16_t render_width_minus_1; + uint16_t render_height_minus_1; + StdVideoAV1InterpolationFilter interpolation_filter; + StdVideoAV1TxMode TxMode; + uint8_t delta_q_res; + uint8_t delta_lf_res; + uint8_t ref_order_hint[STD_VIDEO_AV1_NUM_REF_FRAMES]; + int8_t ref_frame_idx[STD_VIDEO_AV1_REFS_PER_FRAME]; + uint8_t reserved1[3]; + uint32_t delta_frame_id_minus_1[STD_VIDEO_AV1_REFS_PER_FRAME]; + const StdVideoAV1TileInfo* pTileInfo; + const StdVideoAV1Quantization* pQuantization; + const StdVideoAV1Segmentation* pSegmentation; + const StdVideoAV1LoopFilter* pLoopFilter; + const StdVideoAV1CDEF* pCDEF; + const StdVideoAV1LoopRestoration* pLoopRestoration; + const StdVideoAV1GlobalMotion* pGlobalMotion; + const StdVideoEncodeAV1ExtensionHeader* pExtensionHeader; + const uint32_t* pBufferRemovalTimes; +} StdVideoEncodeAV1PictureInfo; + +typedef struct StdVideoEncodeAV1ReferenceInfoFlags { + uint32_t disable_frame_end_update_cdf : 1; + uint32_t segmentation_enabled : 1; + uint32_t reserved : 30; +} StdVideoEncodeAV1ReferenceInfoFlags; + +typedef struct StdVideoEncodeAV1ReferenceInfo { + StdVideoEncodeAV1ReferenceInfoFlags flags; + uint32_t RefFrameId; + StdVideoAV1FrameType frame_type; + uint8_t OrderHint; + uint8_t reserved1[3]; + const StdVideoEncodeAV1ExtensionHeader* pExtensionHeader; +} StdVideoEncodeAV1ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/libraries/vk_video/vulkan_video_codec_h264std.h b/src/libraries/vk_video/vulkan_video_codec_h264std.h new file mode 100644 index 000000000..6fd381066 --- /dev/null +++ b/src/libraries/vk_video/vulkan_video_codec_h264std.h @@ -0,0 +1,312 @@ +#ifndef VULKAN_VIDEO_CODEC_H264STD_H_ +#define VULKAN_VIDEO_CODEC_H264STD_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h264std is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h264std 1 +#include "vulkan_video_codecs_common.h" +#define STD_VIDEO_H264_CPB_CNT_LIST_SIZE 32 +#define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS 6 +#define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS 16 +#define STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS 6 +#define STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS 64 +#define STD_VIDEO_H264_MAX_NUM_LIST_REF 32 +#define STD_VIDEO_H264_MAX_CHROMA_PLANES 2 +#define STD_VIDEO_H264_NO_REFERENCE_PICTURE 0xFF + +typedef enum StdVideoH264ChromaFormatIdc { + STD_VIDEO_H264_CHROMA_FORMAT_IDC_MONOCHROME = 0, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_420 = 1, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_422 = 2, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_444 = 3, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264ChromaFormatIdc; + +typedef enum StdVideoH264ProfileIdc { + STD_VIDEO_H264_PROFILE_IDC_BASELINE = 66, + STD_VIDEO_H264_PROFILE_IDC_MAIN = 77, + STD_VIDEO_H264_PROFILE_IDC_HIGH = 100, + STD_VIDEO_H264_PROFILE_IDC_HIGH_444_PREDICTIVE = 244, + STD_VIDEO_H264_PROFILE_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_PROFILE_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264ProfileIdc; + +typedef enum StdVideoH264LevelIdc { + STD_VIDEO_H264_LEVEL_IDC_1_0 = 0, + STD_VIDEO_H264_LEVEL_IDC_1_1 = 1, + STD_VIDEO_H264_LEVEL_IDC_1_2 = 2, + STD_VIDEO_H264_LEVEL_IDC_1_3 = 3, + STD_VIDEO_H264_LEVEL_IDC_2_0 = 4, + STD_VIDEO_H264_LEVEL_IDC_2_1 = 5, + STD_VIDEO_H264_LEVEL_IDC_2_2 = 6, + STD_VIDEO_H264_LEVEL_IDC_3_0 = 7, + STD_VIDEO_H264_LEVEL_IDC_3_1 = 8, + STD_VIDEO_H264_LEVEL_IDC_3_2 = 9, + STD_VIDEO_H264_LEVEL_IDC_4_0 = 10, + STD_VIDEO_H264_LEVEL_IDC_4_1 = 11, + STD_VIDEO_H264_LEVEL_IDC_4_2 = 12, + STD_VIDEO_H264_LEVEL_IDC_5_0 = 13, + STD_VIDEO_H264_LEVEL_IDC_5_1 = 14, + STD_VIDEO_H264_LEVEL_IDC_5_2 = 15, + STD_VIDEO_H264_LEVEL_IDC_6_0 = 16, + STD_VIDEO_H264_LEVEL_IDC_6_1 = 17, + STD_VIDEO_H264_LEVEL_IDC_6_2 = 18, + STD_VIDEO_H264_LEVEL_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_LEVEL_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264LevelIdc; + +typedef enum StdVideoH264PocType { + STD_VIDEO_H264_POC_TYPE_0 = 0, + STD_VIDEO_H264_POC_TYPE_1 = 1, + STD_VIDEO_H264_POC_TYPE_2 = 2, + STD_VIDEO_H264_POC_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_POC_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264PocType; + +typedef enum StdVideoH264AspectRatioIdc { + STD_VIDEO_H264_ASPECT_RATIO_IDC_UNSPECIFIED = 0, + STD_VIDEO_H264_ASPECT_RATIO_IDC_SQUARE = 1, + STD_VIDEO_H264_ASPECT_RATIO_IDC_12_11 = 2, + STD_VIDEO_H264_ASPECT_RATIO_IDC_10_11 = 3, + STD_VIDEO_H264_ASPECT_RATIO_IDC_16_11 = 4, + STD_VIDEO_H264_ASPECT_RATIO_IDC_40_33 = 5, + STD_VIDEO_H264_ASPECT_RATIO_IDC_24_11 = 6, + STD_VIDEO_H264_ASPECT_RATIO_IDC_20_11 = 7, + STD_VIDEO_H264_ASPECT_RATIO_IDC_32_11 = 8, + STD_VIDEO_H264_ASPECT_RATIO_IDC_80_33 = 9, + STD_VIDEO_H264_ASPECT_RATIO_IDC_18_11 = 10, + STD_VIDEO_H264_ASPECT_RATIO_IDC_15_11 = 11, + STD_VIDEO_H264_ASPECT_RATIO_IDC_64_33 = 12, + STD_VIDEO_H264_ASPECT_RATIO_IDC_160_99 = 13, + STD_VIDEO_H264_ASPECT_RATIO_IDC_4_3 = 14, + STD_VIDEO_H264_ASPECT_RATIO_IDC_3_2 = 15, + STD_VIDEO_H264_ASPECT_RATIO_IDC_2_1 = 16, + STD_VIDEO_H264_ASPECT_RATIO_IDC_EXTENDED_SAR = 255, + STD_VIDEO_H264_ASPECT_RATIO_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_ASPECT_RATIO_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264AspectRatioIdc; + +typedef enum StdVideoH264WeightedBipredIdc { + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_DEFAULT = 0, + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_EXPLICIT = 1, + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_IMPLICIT = 2, + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264WeightedBipredIdc; + +typedef enum StdVideoH264ModificationOfPicNumsIdc { + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_SUBTRACT = 0, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_ADD = 1, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_LONG_TERM = 2, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_END = 3, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264ModificationOfPicNumsIdc; + +typedef enum StdVideoH264MemMgmtControlOp { + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_END = 0, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_SHORT_TERM = 1, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_LONG_TERM = 2, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_LONG_TERM = 3, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_SET_MAX_LONG_TERM_INDEX = 4, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_ALL = 5, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_CURRENT_AS_LONG_TERM = 6, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264MemMgmtControlOp; + +typedef enum StdVideoH264CabacInitIdc { + STD_VIDEO_H264_CABAC_INIT_IDC_0 = 0, + STD_VIDEO_H264_CABAC_INIT_IDC_1 = 1, + STD_VIDEO_H264_CABAC_INIT_IDC_2 = 2, + STD_VIDEO_H264_CABAC_INIT_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_CABAC_INIT_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264CabacInitIdc; + +typedef enum StdVideoH264DisableDeblockingFilterIdc { + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_DISABLED = 0, + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_ENABLED = 1, + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_PARTIAL = 2, + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264DisableDeblockingFilterIdc; + +typedef enum StdVideoH264SliceType { + STD_VIDEO_H264_SLICE_TYPE_P = 0, + STD_VIDEO_H264_SLICE_TYPE_B = 1, + STD_VIDEO_H264_SLICE_TYPE_I = 2, + STD_VIDEO_H264_SLICE_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_SLICE_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264SliceType; + +typedef enum StdVideoH264PictureType { + STD_VIDEO_H264_PICTURE_TYPE_P = 0, + STD_VIDEO_H264_PICTURE_TYPE_B = 1, + STD_VIDEO_H264_PICTURE_TYPE_I = 2, + STD_VIDEO_H264_PICTURE_TYPE_IDR = 5, + STD_VIDEO_H264_PICTURE_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_PICTURE_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264PictureType; + +typedef enum StdVideoH264NonVclNaluType { + STD_VIDEO_H264_NON_VCL_NALU_TYPE_SPS = 0, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_PPS = 1, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_AUD = 2, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_PREFIX = 3, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_SEQUENCE = 4, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_STREAM = 5, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_PRECODED = 6, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264NonVclNaluType; +typedef struct StdVideoH264SpsVuiFlags { + uint32_t aspect_ratio_info_present_flag : 1; + uint32_t overscan_info_present_flag : 1; + uint32_t overscan_appropriate_flag : 1; + uint32_t video_signal_type_present_flag : 1; + uint32_t video_full_range_flag : 1; + uint32_t color_description_present_flag : 1; + uint32_t chroma_loc_info_present_flag : 1; + uint32_t timing_info_present_flag : 1; + uint32_t fixed_frame_rate_flag : 1; + uint32_t bitstream_restriction_flag : 1; + uint32_t nal_hrd_parameters_present_flag : 1; + uint32_t vcl_hrd_parameters_present_flag : 1; +} StdVideoH264SpsVuiFlags; + +typedef struct StdVideoH264HrdParameters { + uint8_t cpb_cnt_minus1; + uint8_t bit_rate_scale; + uint8_t cpb_size_scale; + uint8_t reserved1; + uint32_t bit_rate_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE]; + uint32_t cpb_size_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE]; + uint8_t cbr_flag[STD_VIDEO_H264_CPB_CNT_LIST_SIZE]; + uint32_t initial_cpb_removal_delay_length_minus1; + uint32_t cpb_removal_delay_length_minus1; + uint32_t dpb_output_delay_length_minus1; + uint32_t time_offset_length; +} StdVideoH264HrdParameters; + +typedef struct StdVideoH264SequenceParameterSetVui { + StdVideoH264SpsVuiFlags flags; + StdVideoH264AspectRatioIdc aspect_ratio_idc; + uint16_t sar_width; + uint16_t sar_height; + uint8_t video_format; + uint8_t colour_primaries; + uint8_t transfer_characteristics; + uint8_t matrix_coefficients; + uint32_t num_units_in_tick; + uint32_t time_scale; + uint8_t max_num_reorder_frames; + uint8_t max_dec_frame_buffering; + uint8_t chroma_sample_loc_type_top_field; + uint8_t chroma_sample_loc_type_bottom_field; + uint32_t reserved1; + const StdVideoH264HrdParameters* pHrdParameters; +} StdVideoH264SequenceParameterSetVui; + +typedef struct StdVideoH264SpsFlags { + uint32_t constraint_set0_flag : 1; + uint32_t constraint_set1_flag : 1; + uint32_t constraint_set2_flag : 1; + uint32_t constraint_set3_flag : 1; + uint32_t constraint_set4_flag : 1; + uint32_t constraint_set5_flag : 1; + uint32_t direct_8x8_inference_flag : 1; + uint32_t mb_adaptive_frame_field_flag : 1; + uint32_t frame_mbs_only_flag : 1; + uint32_t delta_pic_order_always_zero_flag : 1; + uint32_t separate_colour_plane_flag : 1; + uint32_t gaps_in_frame_num_value_allowed_flag : 1; + uint32_t qpprime_y_zero_transform_bypass_flag : 1; + uint32_t frame_cropping_flag : 1; + uint32_t seq_scaling_matrix_present_flag : 1; + uint32_t vui_parameters_present_flag : 1; +} StdVideoH264SpsFlags; + +typedef struct StdVideoH264ScalingLists { + uint16_t scaling_list_present_mask; + uint16_t use_default_scaling_matrix_mask; + uint8_t ScalingList4x4[STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS]; + uint8_t ScalingList8x8[STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS]; +} StdVideoH264ScalingLists; + +typedef struct StdVideoH264SequenceParameterSet { + StdVideoH264SpsFlags flags; + StdVideoH264ProfileIdc profile_idc; + StdVideoH264LevelIdc level_idc; + StdVideoH264ChromaFormatIdc chroma_format_idc; + uint8_t seq_parameter_set_id; + uint8_t bit_depth_luma_minus8; + uint8_t bit_depth_chroma_minus8; + uint8_t log2_max_frame_num_minus4; + StdVideoH264PocType pic_order_cnt_type; + int32_t offset_for_non_ref_pic; + int32_t offset_for_top_to_bottom_field; + uint8_t log2_max_pic_order_cnt_lsb_minus4; + uint8_t num_ref_frames_in_pic_order_cnt_cycle; + uint8_t max_num_ref_frames; + uint8_t reserved1; + uint32_t pic_width_in_mbs_minus1; + uint32_t pic_height_in_map_units_minus1; + uint32_t frame_crop_left_offset; + uint32_t frame_crop_right_offset; + uint32_t frame_crop_top_offset; + uint32_t frame_crop_bottom_offset; + uint32_t reserved2; + const int32_t* pOffsetForRefFrame; + const StdVideoH264ScalingLists* pScalingLists; + const StdVideoH264SequenceParameterSetVui* pSequenceParameterSetVui; +} StdVideoH264SequenceParameterSet; + +typedef struct StdVideoH264PpsFlags { + uint32_t transform_8x8_mode_flag : 1; + uint32_t redundant_pic_cnt_present_flag : 1; + uint32_t constrained_intra_pred_flag : 1; + uint32_t deblocking_filter_control_present_flag : 1; + uint32_t weighted_pred_flag : 1; + uint32_t bottom_field_pic_order_in_frame_present_flag : 1; + uint32_t entropy_coding_mode_flag : 1; + uint32_t pic_scaling_matrix_present_flag : 1; +} StdVideoH264PpsFlags; + +typedef struct StdVideoH264PictureParameterSet { + StdVideoH264PpsFlags flags; + uint8_t seq_parameter_set_id; + uint8_t pic_parameter_set_id; + uint8_t num_ref_idx_l0_default_active_minus1; + uint8_t num_ref_idx_l1_default_active_minus1; + StdVideoH264WeightedBipredIdc weighted_bipred_idc; + int8_t pic_init_qp_minus26; + int8_t pic_init_qs_minus26; + int8_t chroma_qp_index_offset; + int8_t second_chroma_qp_index_offset; + const StdVideoH264ScalingLists* pScalingLists; +} StdVideoH264PictureParameterSet; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/libraries/vk_video/vulkan_video_codec_h264std_decode.h b/src/libraries/vk_video/vulkan_video_codec_h264std_decode.h new file mode 100644 index 000000000..d6a90b498 --- /dev/null +++ b/src/libraries/vk_video/vulkan_video_codec_h264std_decode.h @@ -0,0 +1,77 @@ +#ifndef VULKAN_VIDEO_CODEC_H264STD_DECODE_H_ +#define VULKAN_VIDEO_CODEC_H264STD_DECODE_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h264std_decode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h264std_decode 1 +#include "vulkan_video_codec_h264std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_decode" +#define STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE 2 + +typedef enum StdVideoDecodeH264FieldOrderCount { + STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_TOP = 0, + STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_BOTTOM = 1, + STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_INVALID = 0x7FFFFFFF, + STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_MAX_ENUM = 0x7FFFFFFF +} StdVideoDecodeH264FieldOrderCount; +typedef struct StdVideoDecodeH264PictureInfoFlags { + uint32_t field_pic_flag : 1; + uint32_t is_intra : 1; + uint32_t IdrPicFlag : 1; + uint32_t bottom_field_flag : 1; + uint32_t is_reference : 1; + uint32_t complementary_field_pair : 1; +} StdVideoDecodeH264PictureInfoFlags; + +typedef struct StdVideoDecodeH264PictureInfo { + StdVideoDecodeH264PictureInfoFlags flags; + uint8_t seq_parameter_set_id; + uint8_t pic_parameter_set_id; + uint8_t reserved1; + uint8_t reserved2; + uint16_t frame_num; + uint16_t idr_pic_id; + int32_t PicOrderCnt[STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE]; +} StdVideoDecodeH264PictureInfo; + +typedef struct StdVideoDecodeH264ReferenceInfoFlags { + uint32_t top_field_flag : 1; + uint32_t bottom_field_flag : 1; + uint32_t used_for_long_term_reference : 1; + uint32_t is_non_existing : 1; +} StdVideoDecodeH264ReferenceInfoFlags; + +typedef struct StdVideoDecodeH264ReferenceInfo { + StdVideoDecodeH264ReferenceInfoFlags flags; + uint16_t FrameNum; + uint16_t reserved; + int32_t PicOrderCnt[STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE]; +} StdVideoDecodeH264ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/libraries/vk_video/vulkan_video_codec_h264std_encode.h b/src/libraries/vk_video/vulkan_video_codec_h264std_encode.h new file mode 100644 index 000000000..410b1b254 --- /dev/null +++ b/src/libraries/vk_video/vulkan_video_codec_h264std_encode.h @@ -0,0 +1,147 @@ +#ifndef VULKAN_VIDEO_CODEC_H264STD_ENCODE_H_ +#define VULKAN_VIDEO_CODEC_H264STD_ENCODE_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h264std_encode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h264std_encode 1 +#include "vulkan_video_codec_h264std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_encode" +typedef struct StdVideoEncodeH264WeightTableFlags { + uint32_t luma_weight_l0_flag; + uint32_t chroma_weight_l0_flag; + uint32_t luma_weight_l1_flag; + uint32_t chroma_weight_l1_flag; +} StdVideoEncodeH264WeightTableFlags; + +typedef struct StdVideoEncodeH264WeightTable { + StdVideoEncodeH264WeightTableFlags flags; + uint8_t luma_log2_weight_denom; + uint8_t chroma_log2_weight_denom; + int8_t luma_weight_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + int8_t luma_offset_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + int8_t chroma_weight_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES]; + int8_t chroma_offset_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES]; + int8_t luma_weight_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + int8_t luma_offset_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + int8_t chroma_weight_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES]; + int8_t chroma_offset_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES]; +} StdVideoEncodeH264WeightTable; + +typedef struct StdVideoEncodeH264SliceHeaderFlags { + uint32_t direct_spatial_mv_pred_flag : 1; + uint32_t num_ref_idx_active_override_flag : 1; + uint32_t reserved : 30; +} StdVideoEncodeH264SliceHeaderFlags; + +typedef struct StdVideoEncodeH264PictureInfoFlags { + uint32_t IdrPicFlag : 1; + uint32_t is_reference : 1; + uint32_t no_output_of_prior_pics_flag : 1; + uint32_t long_term_reference_flag : 1; + uint32_t adaptive_ref_pic_marking_mode_flag : 1; + uint32_t reserved : 27; +} StdVideoEncodeH264PictureInfoFlags; + +typedef struct StdVideoEncodeH264ReferenceInfoFlags { + uint32_t used_for_long_term_reference : 1; + uint32_t reserved : 31; +} StdVideoEncodeH264ReferenceInfoFlags; + +typedef struct StdVideoEncodeH264ReferenceListsInfoFlags { + uint32_t ref_pic_list_modification_flag_l0 : 1; + uint32_t ref_pic_list_modification_flag_l1 : 1; + uint32_t reserved : 30; +} StdVideoEncodeH264ReferenceListsInfoFlags; + +typedef struct StdVideoEncodeH264RefListModEntry { + StdVideoH264ModificationOfPicNumsIdc modification_of_pic_nums_idc; + uint16_t abs_diff_pic_num_minus1; + uint16_t long_term_pic_num; +} StdVideoEncodeH264RefListModEntry; + +typedef struct StdVideoEncodeH264RefPicMarkingEntry { + StdVideoH264MemMgmtControlOp memory_management_control_operation; + uint16_t difference_of_pic_nums_minus1; + uint16_t long_term_pic_num; + uint16_t long_term_frame_idx; + uint16_t max_long_term_frame_idx_plus1; +} StdVideoEncodeH264RefPicMarkingEntry; + +typedef struct StdVideoEncodeH264ReferenceListsInfo { + StdVideoEncodeH264ReferenceListsInfoFlags flags; + uint8_t num_ref_idx_l0_active_minus1; + uint8_t num_ref_idx_l1_active_minus1; + uint8_t RefPicList0[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + uint8_t RefPicList1[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + uint8_t refList0ModOpCount; + uint8_t refList1ModOpCount; + uint8_t refPicMarkingOpCount; + uint8_t reserved1[7]; + const StdVideoEncodeH264RefListModEntry* pRefList0ModOperations; + const StdVideoEncodeH264RefListModEntry* pRefList1ModOperations; + const StdVideoEncodeH264RefPicMarkingEntry* pRefPicMarkingOperations; +} StdVideoEncodeH264ReferenceListsInfo; + +typedef struct StdVideoEncodeH264PictureInfo { + StdVideoEncodeH264PictureInfoFlags flags; + uint8_t seq_parameter_set_id; + uint8_t pic_parameter_set_id; + uint16_t idr_pic_id; + StdVideoH264PictureType primary_pic_type; + uint32_t frame_num; + int32_t PicOrderCnt; + uint8_t temporal_id; + uint8_t reserved1[3]; + const StdVideoEncodeH264ReferenceListsInfo* pRefLists; +} StdVideoEncodeH264PictureInfo; + +typedef struct StdVideoEncodeH264ReferenceInfo { + StdVideoEncodeH264ReferenceInfoFlags flags; + StdVideoH264PictureType primary_pic_type; + uint32_t FrameNum; + int32_t PicOrderCnt; + uint16_t long_term_pic_num; + uint16_t long_term_frame_idx; + uint8_t temporal_id; +} StdVideoEncodeH264ReferenceInfo; + +typedef struct StdVideoEncodeH264SliceHeader { + StdVideoEncodeH264SliceHeaderFlags flags; + uint32_t first_mb_in_slice; + StdVideoH264SliceType slice_type; + int8_t slice_alpha_c0_offset_div2; + int8_t slice_beta_offset_div2; + int8_t slice_qp_delta; + uint8_t reserved1; + StdVideoH264CabacInitIdc cabac_init_idc; + StdVideoH264DisableDeblockingFilterIdc disable_deblocking_filter_idc; + const StdVideoEncodeH264WeightTable* pWeightTable; +} StdVideoEncodeH264SliceHeader; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/libraries/vk_video/vulkan_video_codec_h265std.h b/src/libraries/vk_video/vulkan_video_codec_h265std.h new file mode 100644 index 000000000..3eecd6011 --- /dev/null +++ b/src/libraries/vk_video/vulkan_video_codec_h265std.h @@ -0,0 +1,446 @@ +#ifndef VULKAN_VIDEO_CODEC_H265STD_H_ +#define VULKAN_VIDEO_CODEC_H265STD_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h265std is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h265std 1 +#include "vulkan_video_codecs_common.h" +#define STD_VIDEO_H265_CPB_CNT_LIST_SIZE 32 +#define STD_VIDEO_H265_SUBLAYERS_LIST_SIZE 7 +#define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS 6 +#define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS 16 +#define STD_VIDEO_H265_SCALING_LIST_8X8_NUM_LISTS 6 +#define STD_VIDEO_H265_SCALING_LIST_8X8_NUM_ELEMENTS 64 +#define STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS 6 +#define STD_VIDEO_H265_SCALING_LIST_16X16_NUM_ELEMENTS 64 +#define STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS 2 +#define STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS 64 +#define STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE 6 +#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE 19 +#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE 21 +#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE 3 +#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE 128 +#define STD_VIDEO_H265_MAX_NUM_LIST_REF 15 +#define STD_VIDEO_H265_MAX_CHROMA_PLANES 2 +#define STD_VIDEO_H265_MAX_SHORT_TERM_REF_PIC_SETS 64 +#define STD_VIDEO_H265_MAX_DPB_SIZE 16 +#define STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS 32 +#define STD_VIDEO_H265_MAX_LONG_TERM_PICS 16 +#define STD_VIDEO_H265_MAX_DELTA_POC 48 +#define STD_VIDEO_H265_NO_REFERENCE_PICTURE 0xFF + +typedef enum StdVideoH265ChromaFormatIdc { + STD_VIDEO_H265_CHROMA_FORMAT_IDC_MONOCHROME = 0, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_420 = 1, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_422 = 2, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_444 = 3, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265ChromaFormatIdc; + +typedef enum StdVideoH265ProfileIdc { + STD_VIDEO_H265_PROFILE_IDC_MAIN = 1, + STD_VIDEO_H265_PROFILE_IDC_MAIN_10 = 2, + STD_VIDEO_H265_PROFILE_IDC_MAIN_STILL_PICTURE = 3, + STD_VIDEO_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSIONS = 4, + STD_VIDEO_H265_PROFILE_IDC_SCC_EXTENSIONS = 9, + STD_VIDEO_H265_PROFILE_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_PROFILE_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265ProfileIdc; + +typedef enum StdVideoH265LevelIdc { + STD_VIDEO_H265_LEVEL_IDC_1_0 = 0, + STD_VIDEO_H265_LEVEL_IDC_2_0 = 1, + STD_VIDEO_H265_LEVEL_IDC_2_1 = 2, + STD_VIDEO_H265_LEVEL_IDC_3_0 = 3, + STD_VIDEO_H265_LEVEL_IDC_3_1 = 4, + STD_VIDEO_H265_LEVEL_IDC_4_0 = 5, + STD_VIDEO_H265_LEVEL_IDC_4_1 = 6, + STD_VIDEO_H265_LEVEL_IDC_5_0 = 7, + STD_VIDEO_H265_LEVEL_IDC_5_1 = 8, + STD_VIDEO_H265_LEVEL_IDC_5_2 = 9, + STD_VIDEO_H265_LEVEL_IDC_6_0 = 10, + STD_VIDEO_H265_LEVEL_IDC_6_1 = 11, + STD_VIDEO_H265_LEVEL_IDC_6_2 = 12, + STD_VIDEO_H265_LEVEL_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_LEVEL_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265LevelIdc; + +typedef enum StdVideoH265SliceType { + STD_VIDEO_H265_SLICE_TYPE_B = 0, + STD_VIDEO_H265_SLICE_TYPE_P = 1, + STD_VIDEO_H265_SLICE_TYPE_I = 2, + STD_VIDEO_H265_SLICE_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_SLICE_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265SliceType; + +typedef enum StdVideoH265PictureType { + STD_VIDEO_H265_PICTURE_TYPE_P = 0, + STD_VIDEO_H265_PICTURE_TYPE_B = 1, + STD_VIDEO_H265_PICTURE_TYPE_I = 2, + STD_VIDEO_H265_PICTURE_TYPE_IDR = 3, + STD_VIDEO_H265_PICTURE_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_PICTURE_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265PictureType; + +typedef enum StdVideoH265AspectRatioIdc { + STD_VIDEO_H265_ASPECT_RATIO_IDC_UNSPECIFIED = 0, + STD_VIDEO_H265_ASPECT_RATIO_IDC_SQUARE = 1, + STD_VIDEO_H265_ASPECT_RATIO_IDC_12_11 = 2, + STD_VIDEO_H265_ASPECT_RATIO_IDC_10_11 = 3, + STD_VIDEO_H265_ASPECT_RATIO_IDC_16_11 = 4, + STD_VIDEO_H265_ASPECT_RATIO_IDC_40_33 = 5, + STD_VIDEO_H265_ASPECT_RATIO_IDC_24_11 = 6, + STD_VIDEO_H265_ASPECT_RATIO_IDC_20_11 = 7, + STD_VIDEO_H265_ASPECT_RATIO_IDC_32_11 = 8, + STD_VIDEO_H265_ASPECT_RATIO_IDC_80_33 = 9, + STD_VIDEO_H265_ASPECT_RATIO_IDC_18_11 = 10, + STD_VIDEO_H265_ASPECT_RATIO_IDC_15_11 = 11, + STD_VIDEO_H265_ASPECT_RATIO_IDC_64_33 = 12, + STD_VIDEO_H265_ASPECT_RATIO_IDC_160_99 = 13, + STD_VIDEO_H265_ASPECT_RATIO_IDC_4_3 = 14, + STD_VIDEO_H265_ASPECT_RATIO_IDC_3_2 = 15, + STD_VIDEO_H265_ASPECT_RATIO_IDC_2_1 = 16, + STD_VIDEO_H265_ASPECT_RATIO_IDC_EXTENDED_SAR = 255, + STD_VIDEO_H265_ASPECT_RATIO_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_ASPECT_RATIO_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265AspectRatioIdc; +typedef struct StdVideoH265DecPicBufMgr { + uint32_t max_latency_increase_plus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; + uint8_t max_dec_pic_buffering_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; + uint8_t max_num_reorder_pics[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; +} StdVideoH265DecPicBufMgr; + +typedef struct StdVideoH265SubLayerHrdParameters { + uint32_t bit_rate_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE]; + uint32_t cpb_size_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE]; + uint32_t cpb_size_du_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE]; + uint32_t bit_rate_du_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE]; + uint32_t cbr_flag; +} StdVideoH265SubLayerHrdParameters; + +typedef struct StdVideoH265HrdFlags { + uint32_t nal_hrd_parameters_present_flag : 1; + uint32_t vcl_hrd_parameters_present_flag : 1; + uint32_t sub_pic_hrd_params_present_flag : 1; + uint32_t sub_pic_cpb_params_in_pic_timing_sei_flag : 1; + uint32_t fixed_pic_rate_general_flag : 8; + uint32_t fixed_pic_rate_within_cvs_flag : 8; + uint32_t low_delay_hrd_flag : 8; +} StdVideoH265HrdFlags; + +typedef struct StdVideoH265HrdParameters { + StdVideoH265HrdFlags flags; + uint8_t tick_divisor_minus2; + uint8_t du_cpb_removal_delay_increment_length_minus1; + uint8_t dpb_output_delay_du_length_minus1; + uint8_t bit_rate_scale; + uint8_t cpb_size_scale; + uint8_t cpb_size_du_scale; + uint8_t initial_cpb_removal_delay_length_minus1; + uint8_t au_cpb_removal_delay_length_minus1; + uint8_t dpb_output_delay_length_minus1; + uint8_t cpb_cnt_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; + uint16_t elemental_duration_in_tc_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; + uint16_t reserved[3]; + const StdVideoH265SubLayerHrdParameters* pSubLayerHrdParametersNal; + const StdVideoH265SubLayerHrdParameters* pSubLayerHrdParametersVcl; +} StdVideoH265HrdParameters; + +typedef struct StdVideoH265VpsFlags { + uint32_t vps_temporal_id_nesting_flag : 1; + uint32_t vps_sub_layer_ordering_info_present_flag : 1; + uint32_t vps_timing_info_present_flag : 1; + uint32_t vps_poc_proportional_to_timing_flag : 1; +} StdVideoH265VpsFlags; + +typedef struct StdVideoH265ProfileTierLevelFlags { + uint32_t general_tier_flag : 1; + uint32_t general_progressive_source_flag : 1; + uint32_t general_interlaced_source_flag : 1; + uint32_t general_non_packed_constraint_flag : 1; + uint32_t general_frame_only_constraint_flag : 1; +} StdVideoH265ProfileTierLevelFlags; + +typedef struct StdVideoH265ProfileTierLevel { + StdVideoH265ProfileTierLevelFlags flags; + StdVideoH265ProfileIdc general_profile_idc; + StdVideoH265LevelIdc general_level_idc; +} StdVideoH265ProfileTierLevel; + +typedef struct StdVideoH265VideoParameterSet { + StdVideoH265VpsFlags flags; + uint8_t vps_video_parameter_set_id; + uint8_t vps_max_sub_layers_minus1; + uint8_t reserved1; + uint8_t reserved2; + uint32_t vps_num_units_in_tick; + uint32_t vps_time_scale; + uint32_t vps_num_ticks_poc_diff_one_minus1; + uint32_t reserved3; + const StdVideoH265DecPicBufMgr* pDecPicBufMgr; + const StdVideoH265HrdParameters* pHrdParameters; + const StdVideoH265ProfileTierLevel* pProfileTierLevel; +} StdVideoH265VideoParameterSet; + +typedef struct StdVideoH265ScalingLists { + uint8_t ScalingList4x4[STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS]; + uint8_t ScalingList8x8[STD_VIDEO_H265_SCALING_LIST_8X8_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_8X8_NUM_ELEMENTS]; + uint8_t ScalingList16x16[STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_16X16_NUM_ELEMENTS]; + uint8_t ScalingList32x32[STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS]; + uint8_t ScalingListDCCoef16x16[STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS]; + uint8_t ScalingListDCCoef32x32[STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS]; +} StdVideoH265ScalingLists; + +typedef struct StdVideoH265SpsVuiFlags { + uint32_t aspect_ratio_info_present_flag : 1; + uint32_t overscan_info_present_flag : 1; + uint32_t overscan_appropriate_flag : 1; + uint32_t video_signal_type_present_flag : 1; + uint32_t video_full_range_flag : 1; + uint32_t colour_description_present_flag : 1; + uint32_t chroma_loc_info_present_flag : 1; + uint32_t neutral_chroma_indication_flag : 1; + uint32_t field_seq_flag : 1; + uint32_t frame_field_info_present_flag : 1; + uint32_t default_display_window_flag : 1; + uint32_t vui_timing_info_present_flag : 1; + uint32_t vui_poc_proportional_to_timing_flag : 1; + uint32_t vui_hrd_parameters_present_flag : 1; + uint32_t bitstream_restriction_flag : 1; + uint32_t tiles_fixed_structure_flag : 1; + uint32_t motion_vectors_over_pic_boundaries_flag : 1; + uint32_t restricted_ref_pic_lists_flag : 1; +} StdVideoH265SpsVuiFlags; + +typedef struct StdVideoH265SequenceParameterSetVui { + StdVideoH265SpsVuiFlags flags; + StdVideoH265AspectRatioIdc aspect_ratio_idc; + uint16_t sar_width; + uint16_t sar_height; + uint8_t video_format; + uint8_t colour_primaries; + uint8_t transfer_characteristics; + uint8_t matrix_coeffs; + uint8_t chroma_sample_loc_type_top_field; + uint8_t chroma_sample_loc_type_bottom_field; + uint8_t reserved1; + uint8_t reserved2; + uint16_t def_disp_win_left_offset; + uint16_t def_disp_win_right_offset; + uint16_t def_disp_win_top_offset; + uint16_t def_disp_win_bottom_offset; + uint32_t vui_num_units_in_tick; + uint32_t vui_time_scale; + uint32_t vui_num_ticks_poc_diff_one_minus1; + uint16_t min_spatial_segmentation_idc; + uint16_t reserved3; + uint8_t max_bytes_per_pic_denom; + uint8_t max_bits_per_min_cu_denom; + uint8_t log2_max_mv_length_horizontal; + uint8_t log2_max_mv_length_vertical; + const StdVideoH265HrdParameters* pHrdParameters; +} StdVideoH265SequenceParameterSetVui; + +typedef struct StdVideoH265PredictorPaletteEntries { + uint16_t PredictorPaletteEntries[STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE][STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE]; +} StdVideoH265PredictorPaletteEntries; + +typedef struct StdVideoH265SpsFlags { + uint32_t sps_temporal_id_nesting_flag : 1; + uint32_t separate_colour_plane_flag : 1; + uint32_t conformance_window_flag : 1; + uint32_t sps_sub_layer_ordering_info_present_flag : 1; + uint32_t scaling_list_enabled_flag : 1; + uint32_t sps_scaling_list_data_present_flag : 1; + uint32_t amp_enabled_flag : 1; + uint32_t sample_adaptive_offset_enabled_flag : 1; + uint32_t pcm_enabled_flag : 1; + uint32_t pcm_loop_filter_disabled_flag : 1; + uint32_t long_term_ref_pics_present_flag : 1; + uint32_t sps_temporal_mvp_enabled_flag : 1; + uint32_t strong_intra_smoothing_enabled_flag : 1; + uint32_t vui_parameters_present_flag : 1; + uint32_t sps_extension_present_flag : 1; + uint32_t sps_range_extension_flag : 1; + uint32_t transform_skip_rotation_enabled_flag : 1; + uint32_t transform_skip_context_enabled_flag : 1; + uint32_t implicit_rdpcm_enabled_flag : 1; + uint32_t explicit_rdpcm_enabled_flag : 1; + uint32_t extended_precision_processing_flag : 1; + uint32_t intra_smoothing_disabled_flag : 1; + uint32_t high_precision_offsets_enabled_flag : 1; + uint32_t persistent_rice_adaptation_enabled_flag : 1; + uint32_t cabac_bypass_alignment_enabled_flag : 1; + uint32_t sps_scc_extension_flag : 1; + uint32_t sps_curr_pic_ref_enabled_flag : 1; + uint32_t palette_mode_enabled_flag : 1; + uint32_t sps_palette_predictor_initializers_present_flag : 1; + uint32_t intra_boundary_filtering_disabled_flag : 1; +} StdVideoH265SpsFlags; + +typedef struct StdVideoH265ShortTermRefPicSetFlags { + uint32_t inter_ref_pic_set_prediction_flag : 1; + uint32_t delta_rps_sign : 1; +} StdVideoH265ShortTermRefPicSetFlags; + +typedef struct StdVideoH265ShortTermRefPicSet { + StdVideoH265ShortTermRefPicSetFlags flags; + uint32_t delta_idx_minus1; + uint16_t use_delta_flag; + uint16_t abs_delta_rps_minus1; + uint16_t used_by_curr_pic_flag; + uint16_t used_by_curr_pic_s0_flag; + uint16_t used_by_curr_pic_s1_flag; + uint16_t reserved1; + uint8_t reserved2; + uint8_t reserved3; + uint8_t num_negative_pics; + uint8_t num_positive_pics; + uint16_t delta_poc_s0_minus1[STD_VIDEO_H265_MAX_DPB_SIZE]; + uint16_t delta_poc_s1_minus1[STD_VIDEO_H265_MAX_DPB_SIZE]; +} StdVideoH265ShortTermRefPicSet; + +typedef struct StdVideoH265LongTermRefPicsSps { + uint32_t used_by_curr_pic_lt_sps_flag; + uint32_t lt_ref_pic_poc_lsb_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS]; +} StdVideoH265LongTermRefPicsSps; + +typedef struct StdVideoH265SequenceParameterSet { + StdVideoH265SpsFlags flags; + StdVideoH265ChromaFormatIdc chroma_format_idc; + uint32_t pic_width_in_luma_samples; + uint32_t pic_height_in_luma_samples; + uint8_t sps_video_parameter_set_id; + uint8_t sps_max_sub_layers_minus1; + uint8_t sps_seq_parameter_set_id; + uint8_t bit_depth_luma_minus8; + uint8_t bit_depth_chroma_minus8; + uint8_t log2_max_pic_order_cnt_lsb_minus4; + uint8_t log2_min_luma_coding_block_size_minus3; + uint8_t log2_diff_max_min_luma_coding_block_size; + uint8_t log2_min_luma_transform_block_size_minus2; + uint8_t log2_diff_max_min_luma_transform_block_size; + uint8_t max_transform_hierarchy_depth_inter; + uint8_t max_transform_hierarchy_depth_intra; + uint8_t num_short_term_ref_pic_sets; + uint8_t num_long_term_ref_pics_sps; + uint8_t pcm_sample_bit_depth_luma_minus1; + uint8_t pcm_sample_bit_depth_chroma_minus1; + uint8_t log2_min_pcm_luma_coding_block_size_minus3; + uint8_t log2_diff_max_min_pcm_luma_coding_block_size; + uint8_t reserved1; + uint8_t reserved2; + uint8_t palette_max_size; + uint8_t delta_palette_max_predictor_size; + uint8_t motion_vector_resolution_control_idc; + uint8_t sps_num_palette_predictor_initializers_minus1; + uint32_t conf_win_left_offset; + uint32_t conf_win_right_offset; + uint32_t conf_win_top_offset; + uint32_t conf_win_bottom_offset; + const StdVideoH265ProfileTierLevel* pProfileTierLevel; + const StdVideoH265DecPicBufMgr* pDecPicBufMgr; + const StdVideoH265ScalingLists* pScalingLists; + const StdVideoH265ShortTermRefPicSet* pShortTermRefPicSet; + const StdVideoH265LongTermRefPicsSps* pLongTermRefPicsSps; + const StdVideoH265SequenceParameterSetVui* pSequenceParameterSetVui; + const StdVideoH265PredictorPaletteEntries* pPredictorPaletteEntries; +} StdVideoH265SequenceParameterSet; + +typedef struct StdVideoH265PpsFlags { + uint32_t dependent_slice_segments_enabled_flag : 1; + uint32_t output_flag_present_flag : 1; + uint32_t sign_data_hiding_enabled_flag : 1; + uint32_t cabac_init_present_flag : 1; + uint32_t constrained_intra_pred_flag : 1; + uint32_t transform_skip_enabled_flag : 1; + uint32_t cu_qp_delta_enabled_flag : 1; + uint32_t pps_slice_chroma_qp_offsets_present_flag : 1; + uint32_t weighted_pred_flag : 1; + uint32_t weighted_bipred_flag : 1; + uint32_t transquant_bypass_enabled_flag : 1; + uint32_t tiles_enabled_flag : 1; + uint32_t entropy_coding_sync_enabled_flag : 1; + uint32_t uniform_spacing_flag : 1; + uint32_t loop_filter_across_tiles_enabled_flag : 1; + uint32_t pps_loop_filter_across_slices_enabled_flag : 1; + uint32_t deblocking_filter_control_present_flag : 1; + uint32_t deblocking_filter_override_enabled_flag : 1; + uint32_t pps_deblocking_filter_disabled_flag : 1; + uint32_t pps_scaling_list_data_present_flag : 1; + uint32_t lists_modification_present_flag : 1; + uint32_t slice_segment_header_extension_present_flag : 1; + uint32_t pps_extension_present_flag : 1; + uint32_t cross_component_prediction_enabled_flag : 1; + uint32_t chroma_qp_offset_list_enabled_flag : 1; + uint32_t pps_curr_pic_ref_enabled_flag : 1; + uint32_t residual_adaptive_colour_transform_enabled_flag : 1; + uint32_t pps_slice_act_qp_offsets_present_flag : 1; + uint32_t pps_palette_predictor_initializers_present_flag : 1; + uint32_t monochrome_palette_flag : 1; + uint32_t pps_range_extension_flag : 1; +} StdVideoH265PpsFlags; + +typedef struct StdVideoH265PictureParameterSet { + StdVideoH265PpsFlags flags; + uint8_t pps_pic_parameter_set_id; + uint8_t pps_seq_parameter_set_id; + uint8_t sps_video_parameter_set_id; + uint8_t num_extra_slice_header_bits; + uint8_t num_ref_idx_l0_default_active_minus1; + uint8_t num_ref_idx_l1_default_active_minus1; + int8_t init_qp_minus26; + uint8_t diff_cu_qp_delta_depth; + int8_t pps_cb_qp_offset; + int8_t pps_cr_qp_offset; + int8_t pps_beta_offset_div2; + int8_t pps_tc_offset_div2; + uint8_t log2_parallel_merge_level_minus2; + uint8_t log2_max_transform_skip_block_size_minus2; + uint8_t diff_cu_chroma_qp_offset_depth; + uint8_t chroma_qp_offset_list_len_minus1; + int8_t cb_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE]; + int8_t cr_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE]; + uint8_t log2_sao_offset_scale_luma; + uint8_t log2_sao_offset_scale_chroma; + int8_t pps_act_y_qp_offset_plus5; + int8_t pps_act_cb_qp_offset_plus5; + int8_t pps_act_cr_qp_offset_plus3; + uint8_t pps_num_palette_predictor_initializers; + uint8_t luma_bit_depth_entry_minus8; + uint8_t chroma_bit_depth_entry_minus8; + uint8_t num_tile_columns_minus1; + uint8_t num_tile_rows_minus1; + uint8_t reserved1; + uint8_t reserved2; + uint16_t column_width_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE]; + uint16_t row_height_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE]; + uint32_t reserved3; + const StdVideoH265ScalingLists* pScalingLists; + const StdVideoH265PredictorPaletteEntries* pPredictorPaletteEntries; +} StdVideoH265PictureParameterSet; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/libraries/vk_video/vulkan_video_codec_h265std_decode.h b/src/libraries/vk_video/vulkan_video_codec_h265std_decode.h new file mode 100644 index 000000000..a9e1a0964 --- /dev/null +++ b/src/libraries/vk_video/vulkan_video_codec_h265std_decode.h @@ -0,0 +1,67 @@ +#ifndef VULKAN_VIDEO_CODEC_H265STD_DECODE_H_ +#define VULKAN_VIDEO_CODEC_H265STD_DECODE_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h265std_decode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h265std_decode 1 +#include "vulkan_video_codec_h265std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_decode" +#define STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE 8 +typedef struct StdVideoDecodeH265PictureInfoFlags { + uint32_t IrapPicFlag : 1; + uint32_t IdrPicFlag : 1; + uint32_t IsReference : 1; + uint32_t short_term_ref_pic_set_sps_flag : 1; +} StdVideoDecodeH265PictureInfoFlags; + +typedef struct StdVideoDecodeH265PictureInfo { + StdVideoDecodeH265PictureInfoFlags flags; + uint8_t sps_video_parameter_set_id; + uint8_t pps_seq_parameter_set_id; + uint8_t pps_pic_parameter_set_id; + uint8_t NumDeltaPocsOfRefRpsIdx; + int32_t PicOrderCntVal; + uint16_t NumBitsForSTRefPicSetInSlice; + uint16_t reserved; + uint8_t RefPicSetStCurrBefore[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE]; + uint8_t RefPicSetStCurrAfter[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE]; + uint8_t RefPicSetLtCurr[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE]; +} StdVideoDecodeH265PictureInfo; + +typedef struct StdVideoDecodeH265ReferenceInfoFlags { + uint32_t used_for_long_term_reference : 1; + uint32_t unused_for_reference : 1; +} StdVideoDecodeH265ReferenceInfoFlags; + +typedef struct StdVideoDecodeH265ReferenceInfo { + StdVideoDecodeH265ReferenceInfoFlags flags; + int32_t PicOrderCntVal; +} StdVideoDecodeH265ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/libraries/vk_video/vulkan_video_codec_h265std_encode.h b/src/libraries/vk_video/vulkan_video_codec_h265std_encode.h new file mode 100644 index 000000000..fe2f28d57 --- /dev/null +++ b/src/libraries/vk_video/vulkan_video_codec_h265std_encode.h @@ -0,0 +1,157 @@ +#ifndef VULKAN_VIDEO_CODEC_H265STD_ENCODE_H_ +#define VULKAN_VIDEO_CODEC_H265STD_ENCODE_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h265std_encode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h265std_encode 1 +#include "vulkan_video_codec_h265std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_encode" +typedef struct StdVideoEncodeH265WeightTableFlags { + uint16_t luma_weight_l0_flag; + uint16_t chroma_weight_l0_flag; + uint16_t luma_weight_l1_flag; + uint16_t chroma_weight_l1_flag; +} StdVideoEncodeH265WeightTableFlags; + +typedef struct StdVideoEncodeH265WeightTable { + StdVideoEncodeH265WeightTableFlags flags; + uint8_t luma_log2_weight_denom; + int8_t delta_chroma_log2_weight_denom; + int8_t delta_luma_weight_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + int8_t luma_offset_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + int8_t delta_chroma_weight_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES]; + int8_t delta_chroma_offset_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES]; + int8_t delta_luma_weight_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + int8_t luma_offset_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + int8_t delta_chroma_weight_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES]; + int8_t delta_chroma_offset_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES]; +} StdVideoEncodeH265WeightTable; + +typedef struct StdVideoEncodeH265SliceSegmentHeaderFlags { + uint32_t first_slice_segment_in_pic_flag : 1; + uint32_t dependent_slice_segment_flag : 1; + uint32_t slice_sao_luma_flag : 1; + uint32_t slice_sao_chroma_flag : 1; + uint32_t num_ref_idx_active_override_flag : 1; + uint32_t mvd_l1_zero_flag : 1; + uint32_t cabac_init_flag : 1; + uint32_t cu_chroma_qp_offset_enabled_flag : 1; + uint32_t deblocking_filter_override_flag : 1; + uint32_t slice_deblocking_filter_disabled_flag : 1; + uint32_t collocated_from_l0_flag : 1; + uint32_t slice_loop_filter_across_slices_enabled_flag : 1; + uint32_t reserved : 20; +} StdVideoEncodeH265SliceSegmentHeaderFlags; + +typedef struct StdVideoEncodeH265SliceSegmentHeader { + StdVideoEncodeH265SliceSegmentHeaderFlags flags; + StdVideoH265SliceType slice_type; + uint32_t slice_segment_address; + uint8_t collocated_ref_idx; + uint8_t MaxNumMergeCand; + int8_t slice_cb_qp_offset; + int8_t slice_cr_qp_offset; + int8_t slice_beta_offset_div2; + int8_t slice_tc_offset_div2; + int8_t slice_act_y_qp_offset; + int8_t slice_act_cb_qp_offset; + int8_t slice_act_cr_qp_offset; + int8_t slice_qp_delta; + uint16_t reserved1; + const StdVideoEncodeH265WeightTable* pWeightTable; +} StdVideoEncodeH265SliceSegmentHeader; + +typedef struct StdVideoEncodeH265ReferenceListsInfoFlags { + uint32_t ref_pic_list_modification_flag_l0 : 1; + uint32_t ref_pic_list_modification_flag_l1 : 1; + uint32_t reserved : 30; +} StdVideoEncodeH265ReferenceListsInfoFlags; + +typedef struct StdVideoEncodeH265ReferenceListsInfo { + StdVideoEncodeH265ReferenceListsInfoFlags flags; + uint8_t num_ref_idx_l0_active_minus1; + uint8_t num_ref_idx_l1_active_minus1; + uint8_t RefPicList0[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + uint8_t RefPicList1[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + uint8_t list_entry_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + uint8_t list_entry_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF]; +} StdVideoEncodeH265ReferenceListsInfo; + +typedef struct StdVideoEncodeH265PictureInfoFlags { + uint32_t is_reference : 1; + uint32_t IrapPicFlag : 1; + uint32_t used_for_long_term_reference : 1; + uint32_t discardable_flag : 1; + uint32_t cross_layer_bla_flag : 1; + uint32_t pic_output_flag : 1; + uint32_t no_output_of_prior_pics_flag : 1; + uint32_t short_term_ref_pic_set_sps_flag : 1; + uint32_t slice_temporal_mvp_enabled_flag : 1; + uint32_t reserved : 23; +} StdVideoEncodeH265PictureInfoFlags; + +typedef struct StdVideoEncodeH265LongTermRefPics { + uint8_t num_long_term_sps; + uint8_t num_long_term_pics; + uint8_t lt_idx_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS]; + uint8_t poc_lsb_lt[STD_VIDEO_H265_MAX_LONG_TERM_PICS]; + uint16_t used_by_curr_pic_lt_flag; + uint8_t delta_poc_msb_present_flag[STD_VIDEO_H265_MAX_DELTA_POC]; + uint8_t delta_poc_msb_cycle_lt[STD_VIDEO_H265_MAX_DELTA_POC]; +} StdVideoEncodeH265LongTermRefPics; + +typedef struct StdVideoEncodeH265PictureInfo { + StdVideoEncodeH265PictureInfoFlags flags; + StdVideoH265PictureType pic_type; + uint8_t sps_video_parameter_set_id; + uint8_t pps_seq_parameter_set_id; + uint8_t pps_pic_parameter_set_id; + uint8_t short_term_ref_pic_set_idx; + int32_t PicOrderCntVal; + uint8_t TemporalId; + uint8_t reserved1[7]; + const StdVideoEncodeH265ReferenceListsInfo* pRefLists; + const StdVideoH265ShortTermRefPicSet* pShortTermRefPicSet; + const StdVideoEncodeH265LongTermRefPics* pLongTermRefPics; +} StdVideoEncodeH265PictureInfo; + +typedef struct StdVideoEncodeH265ReferenceInfoFlags { + uint32_t used_for_long_term_reference : 1; + uint32_t unused_for_reference : 1; + uint32_t reserved : 30; +} StdVideoEncodeH265ReferenceInfoFlags; + +typedef struct StdVideoEncodeH265ReferenceInfo { + StdVideoEncodeH265ReferenceInfoFlags flags; + StdVideoH265PictureType pic_type; + int32_t PicOrderCntVal; + uint8_t TemporalId; +} StdVideoEncodeH265ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/libraries/vk_video/vulkan_video_codecs_common.h b/src/libraries/vk_video/vulkan_video_codecs_common.h new file mode 100644 index 000000000..a5f0f3d94 --- /dev/null +++ b/src/libraries/vk_video/vulkan_video_codecs_common.h @@ -0,0 +1,36 @@ +#ifndef VULKAN_VIDEO_CODECS_COMMON_H_ +#define VULKAN_VIDEO_CODECS_COMMON_H_ 1 + +/* +** Copyright 2015-2025 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codecs_common is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codecs_common 1 +#if !defined(VK_NO_STDINT_H) + #include +#endif + +#define VK_MAKE_VIDEO_STD_VERSION(major, minor, patch) \ + ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/src/libraries/vulkanheaders/vk_icd.h b/src/libraries/vulkanheaders/vk_icd.h index 41989ee35..59204a341 100644 --- a/src/libraries/vulkanheaders/vk_icd.h +++ b/src/libraries/vulkanheaders/vk_icd.h @@ -1,27 +1,11 @@ -// -// File: vk_icd.h -// /* - * Copyright (c) 2015-2016 The Khronos Group Inc. - * Copyright (c) 2015-2016 Valve Corporation - * Copyright (c) 2015-2016 LunarG, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Copyright 2015-2023 The Khronos Group Inc. + * Copyright 2015-2023 Valve Corporation + * Copyright 2015-2023 LunarG, Inc. * + * SPDX-License-Identifier: Apache-2.0 */ - -#ifndef VKICD_H -#define VKICD_H +#pragma once #include "vulkan.h" #include @@ -42,7 +26,17 @@ // call for any API version > 1.0. Otherwise, the loader will // manually determine if it can support the expected version. // Version 6 - Add support for vk_icdEnumerateAdapterPhysicalDevices. -#define CURRENT_LOADER_ICD_INTERFACE_VERSION 6 +// Version 7 - If an ICD supports any of the following functions, they must be +// queryable with vk_icdGetInstanceProcAddr: +// vk_icdNegotiateLoaderICDInterfaceVersion +// vk_icdGetPhysicalDeviceProcAddr +// vk_icdEnumerateAdapterPhysicalDevices (Windows only) +// In addition, these functions no longer need to be exported directly. +// This version allows drivers provided through the extension +// VK_LUNARG_direct_driver_loading be able to support the entire +// Driver-Loader interface. + +#define CURRENT_LOADER_ICD_INTERFACE_VERSION 7 #define MIN_SUPPORTED_LOADER_ICD_INTERFACE_VERSION 0 #define MIN_PHYS_DEV_EXTENSION_ICD_INTERFACE_VERSION 4 @@ -70,7 +64,7 @@ extern "C" { #endif VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pVersion); VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName); - VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance isntance, const char* pName); + VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance instance, const char* pName); #if defined(VK_USE_PLATFORM_WIN32_KHR) VKAPI_ATTR VkResult VKAPI_CALL vk_icdEnumerateAdapterPhysicalDevices(VkInstance instance, LUID adapterLUID, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); @@ -123,6 +117,7 @@ typedef enum { VK_ICD_WSI_PLATFORM_VI, VK_ICD_WSI_PLATFORM_GGP, VK_ICD_WSI_PLATFORM_SCREEN, + VK_ICD_WSI_PLATFORM_FUCHSIA, } VkIcdWsiPlatform; typedef struct { @@ -242,4 +237,8 @@ typedef struct { } VkIcdSurfaceScreen; #endif // VK_USE_PLATFORM_SCREEN_QNX -#endif // VKICD_H +#ifdef VK_USE_PLATFORM_FUCHSIA +typedef struct { + VkIcdSurfaceBase base; +} VkIcdSurfaceImagePipe; +#endif // VK_USE_PLATFORM_FUCHSIA diff --git a/src/libraries/vulkanheaders/vk_layer.h b/src/libraries/vulkanheaders/vk_layer.h index 0651870c7..19d88fce4 100644 --- a/src/libraries/vulkanheaders/vk_layer.h +++ b/src/libraries/vulkanheaders/vk_layer.h @@ -1,39 +1,18 @@ -// -// File: vk_layer.h -// /* - * Copyright (c) 2015-2017 The Khronos Group Inc. - * Copyright (c) 2015-2017 Valve Corporation - * Copyright (c) 2015-2017 LunarG, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Copyright 2015-2023 The Khronos Group Inc. + * Copyright 2015-2023 Valve Corporation + * Copyright 2015-2023 LunarG, Inc. * + * SPDX-License-Identifier: Apache-2.0 */ +#pragma once /* Need to define dispatch table * Core struct can then have ptr to dispatch table at the top * Along with object ptrs for current and next OBJ */ -#pragma once -#include "vulkan.h" -#if defined(__GNUC__) && __GNUC__ >= 4 -#define VK_LAYER_EXPORT __attribute__((visibility("default"))) -#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590) -#define VK_LAYER_EXPORT __attribute__((visibility("default"))) -#else -#define VK_LAYER_EXPORT -#endif +#include "vulkan_core.h" #define MAX_NUM_UNKNOWN_EXTS 250 diff --git a/src/libraries/vulkanheaders/vk_platform.h b/src/libraries/vulkanheaders/vk_platform.h index 3ff8c5d14..18e5ca34c 100644 --- a/src/libraries/vulkanheaders/vk_platform.h +++ b/src/libraries/vulkanheaders/vk_platform.h @@ -2,7 +2,7 @@ // File: vk_platform.h // /* -** Copyright 2014-2022 The Khronos Group Inc. +** Copyright 2014-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ diff --git a/src/libraries/vulkanheaders/vk_sdk_platform.h b/src/libraries/vulkanheaders/vk_sdk_platform.h deleted file mode 100644 index 96d867694..000000000 --- a/src/libraries/vulkanheaders/vk_sdk_platform.h +++ /dev/null @@ -1,69 +0,0 @@ -// -// File: vk_sdk_platform.h -// -/* - * Copyright (c) 2015-2016 The Khronos Group Inc. - * Copyright (c) 2015-2016 Valve Corporation - * Copyright (c) 2015-2016 LunarG, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef VK_SDK_PLATFORM_H -#define VK_SDK_PLATFORM_H - -#if defined(_WIN32) -#define NOMINMAX -#ifndef __cplusplus -#undef inline -#define inline __inline -#endif // __cplusplus - -#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/) -// C99: -// Microsoft didn't implement C99 in Visual Studio; but started adding it with -// VS2013. However, VS2013 still didn't have snprintf(). The following is a -// work-around (Note: The _CRT_SECURE_NO_WARNINGS macro must be set in the -// "CMakeLists.txt" file). -// NOTE: This is fixed in Visual Studio 2015. -#define snprintf _snprintf -#endif - -#define strdup _strdup - -#endif // _WIN32 - -// Check for noexcept support using clang, with fallback to Windows or GCC version numbers -#ifndef NOEXCEPT -#if defined(__clang__) -#if __has_feature(cxx_noexcept) -#define HAS_NOEXCEPT -#endif -#else -#if defined(__GXX_EXPERIMENTAL_CXX0X__) && __GNUC__ * 10 + __GNUC_MINOR__ >= 46 -#define HAS_NOEXCEPT -#else -#if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023026 && defined(_HAS_EXCEPTIONS) && _HAS_EXCEPTIONS -#define HAS_NOEXCEPT -#endif -#endif -#endif - -#ifdef HAS_NOEXCEPT -#define NOEXCEPT noexcept -#else -#define NOEXCEPT -#endif -#endif - -#endif // VK_SDK_PLATFORM_H diff --git a/src/libraries/vulkanheaders/vulkan.h b/src/libraries/vulkanheaders/vulkan.h index 3510ac912..906058192 100644 --- a/src/libraries/vulkanheaders/vulkan.h +++ b/src/libraries/vulkanheaders/vulkan.h @@ -2,7 +2,7 @@ #define VULKAN_H_ 1 /* -** Copyright 2015-2022 The Khronos Group Inc. +** Copyright 2015-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -84,6 +84,14 @@ #include "vulkan_screen.h" #endif + +#ifdef VK_USE_PLATFORM_SCI +#include +#include +#include "vulkan_sci.h" +#endif + + #ifdef VK_ENABLE_BETA_EXTENSIONS #include "vulkan_beta.h" #endif diff --git a/src/libraries/vulkanheaders/vulkan.hpp b/src/libraries/vulkanheaders/vulkan.hpp index 18afe6d9a..909a45c49 100644 --- a/src/libraries/vulkanheaders/vulkan.hpp +++ b/src/libraries/vulkanheaders/vulkan.hpp @@ -1,4 +1,4 @@ -// Copyright 2015-2022 The Khronos Group Inc. +// Copyright 2015-2025 The Khronos Group Inc. // // SPDX-License-Identifier: Apache-2.0 OR MIT // @@ -8,89 +8,46 @@ #ifndef VULKAN_HPP #define VULKAN_HPP -#if defined( _MSVC_LANG ) -# define VULKAN_HPP_CPLUSPLUS _MSVC_LANG -#else -# define VULKAN_HPP_CPLUSPLUS __cplusplus -#endif +#include -#if 201703L < VULKAN_HPP_CPLUSPLUS -# define VULKAN_HPP_CPP_VERSION 20 -#elif 201402L < VULKAN_HPP_CPLUSPLUS -# define VULKAN_HPP_CPP_VERSION 17 -#elif 201103L < VULKAN_HPP_CPLUSPLUS -# define VULKAN_HPP_CPP_VERSION 14 -#elif 199711L < VULKAN_HPP_CPLUSPLUS -# define VULKAN_HPP_CPP_VERSION 11 +#if defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) +# include +import VULKAN_HPP_STD_MODULE; #else -# error "vulkan.hpp needs at least c++ standard version 11" +# include +# include // ArrayWrapperND +# include // strnlen +# include // std::string +# include // std::exchange #endif - -#include -#include // ArrayWrapperND -#include // std::string #include -#if 17 <= VULKAN_HPP_CPP_VERSION -# include // std::string_view + +#if 17 <= VULKAN_HPP_CPP_VERSION && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) +# include #endif -#if defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) -# if !defined( VULKAN_HPP_NO_SMART_HANDLE ) -# define VULKAN_HPP_NO_SMART_HANDLE -# endif -#else +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) # include // std::tie # include // std::vector #endif -#if !defined( VULKAN_HPP_NO_EXCEPTIONS ) +#if !defined( VULKAN_HPP_NO_EXCEPTIONS ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) # include // std::is_error_code_enum #endif -#if defined( VULKAN_HPP_NO_CONSTRUCTORS ) -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) -# define VULKAN_HPP_NO_STRUCT_CONSTRUCTORS -# endif -# if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) -# define VULKAN_HPP_NO_UNION_CONSTRUCTORS -# endif -#endif - -#if defined( VULKAN_HPP_NO_SETTERS ) -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) -# define VULKAN_HPP_NO_STRUCT_SETTERS -# endif -# if !defined( VULKAN_HPP_NO_UNION_SETTERS ) -# define VULKAN_HPP_NO_UNION_SETTERS -# endif -#endif - -#if !defined( VULKAN_HPP_ASSERT ) +#if ( VULKAN_HPP_ASSERT == assert ) # include -# define VULKAN_HPP_ASSERT assert -#endif - -#if !defined( VULKAN_HPP_ASSERT_ON_RESULT ) -# define VULKAN_HPP_ASSERT_ON_RESULT VULKAN_HPP_ASSERT -#endif - -#if !defined( VULKAN_HPP_STATIC_ASSERT ) -# define VULKAN_HPP_STATIC_ASSERT static_assert -#endif - -#if !defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL ) -# define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1 #endif #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1 -# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) # include -# elif defined( _WIN32 ) -typedef struct HINSTANCE__ * HINSTANCE; +# elif defined( _WIN32 ) && !defined( VULKAN_HPP_NO_WIN32_PROTOTYPES ) +using HINSTANCE = struct HINSTANCE__ *; # if defined( _WIN64 ) -typedef int64_t( __stdcall * FARPROC )(); +using FARPROC = int64_t( __stdcall * )(); # else -typedef int( __stdcall * FARPROC )(); +using FARPROC = int( __stdcall * )(); # endif extern "C" __declspec( dllimport ) HINSTANCE __stdcall LoadLibraryA( char const * lpLibFileName ); extern "C" __declspec( dllimport ) int __stdcall FreeLibrary( HINSTANCE hLibModule ); @@ -98,31 +55,15 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # endif #endif -#if !defined( __has_include ) -# define __has_include( x ) false -#endif - -#if ( 201907 <= __cpp_lib_three_way_comparison ) && __has_include( ) && !defined( VULKAN_HPP_NO_SPACESHIP_OPERATOR ) -# define VULKAN_HPP_HAS_SPACESHIP_OPERATOR -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) # include #endif -#if ( 201803 <= __cpp_lib_span ) -# define VULKAN_HPP_SUPPORT_SPAN +#if defined( VULKAN_HPP_SUPPORT_SPAN ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) # include #endif -static_assert( VK_HEADER_VERSION == 228, "Wrong VK_HEADER_VERSION!" ); - -// 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default. -// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION -#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) -# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION ) -# define VULKAN_HPP_TYPESAFE_CONVERSION -# endif -#endif +static_assert( VK_HEADER_VERSION == 307, "Wrong VK_HEADER_VERSION!" ); // includes through some other header // this results in major(x) being resolved to gnu_dev_major(x) @@ -140,99 +81,17 @@ static_assert( VK_HEADER_VERSION == 228, "Wrong VK_HEADER_VERSION!" ); # undef MemoryBarrier #endif -#if !defined( VULKAN_HPP_HAS_UNRESTRICTED_UNIONS ) -# if defined( __clang__ ) -# if __has_feature( cxx_unrestricted_unions ) -# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS -# endif -# elif defined( __GNUC__ ) -# define GCC_VERSION ( __GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ ) -# if 40600 <= GCC_VERSION -# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS -# endif -# elif defined( _MSC_VER ) -# if 1900 <= _MSC_VER -# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS -# endif -# endif -#endif - -#if !defined( VULKAN_HPP_INLINE ) -# if defined( __clang__ ) -# if __has_attribute( always_inline ) -# define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__ -# else -# define VULKAN_HPP_INLINE inline -# endif -# elif defined( __GNUC__ ) -# define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__ -# elif defined( _MSC_VER ) -# define VULKAN_HPP_INLINE inline -# else -# define VULKAN_HPP_INLINE inline -# endif -#endif - -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) -# define VULKAN_HPP_TYPESAFE_EXPLICIT -#else -# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit -#endif - -#if defined( __cpp_constexpr ) -# define VULKAN_HPP_CONSTEXPR constexpr -# if __cpp_constexpr >= 201304 -# define VULKAN_HPP_CONSTEXPR_14 constexpr -# else -# define VULKAN_HPP_CONSTEXPR_14 -# endif -# define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr -#else -# define VULKAN_HPP_CONSTEXPR -# define VULKAN_HPP_CONSTEXPR_14 -# define VULKAN_HPP_CONST_OR_CONSTEXPR const -#endif - -#if !defined( VULKAN_HPP_NOEXCEPT ) -# if defined( _MSC_VER ) && ( _MSC_VER <= 1800 ) -# define VULKAN_HPP_NOEXCEPT -# else -# define VULKAN_HPP_NOEXCEPT noexcept -# define VULKAN_HPP_HAS_NOEXCEPT 1 -# if defined( VULKAN_HPP_NO_EXCEPTIONS ) -# define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS noexcept -# else -# define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS -# endif -# endif -#endif - -#if 14 <= VULKAN_HPP_CPP_VERSION -# define VULKAN_HPP_DEPRECATED( msg ) [[deprecated( msg )]] -#else -# define VULKAN_HPP_DEPRECATED( msg ) -#endif - -#if ( 17 <= VULKAN_HPP_CPP_VERSION ) && !defined( VULKAN_HPP_NO_NODISCARD_WARNINGS ) -# define VULKAN_HPP_NODISCARD [[nodiscard]] -# if defined( VULKAN_HPP_NO_EXCEPTIONS ) -# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS [[nodiscard]] -# else -# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS -# endif -#else -# define VULKAN_HPP_NODISCARD -# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS +// XLib.h defines True/False, which collides with our vk::True/vk::False +// -> undef them and provide some namepace-secure constexpr +#if defined( True ) +# undef True +constexpr int True = 1; #endif - -#if !defined( VULKAN_HPP_NAMESPACE ) -# define VULKAN_HPP_NAMESPACE vk +#if defined( False ) +# undef False +constexpr int False = 0; #endif -#define VULKAN_HPP_STRINGIFY2( text ) #text -#define VULKAN_HPP_STRINGIFY( text ) VULKAN_HPP_STRINGIFY2( text ) -#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY( VULKAN_HPP_NAMESPACE ) - namespace VULKAN_HPP_NAMESPACE { template @@ -243,6 +102,20 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR ArrayWrapper1D( std::array const & data ) VULKAN_HPP_NOEXCEPT : std::array( data ) {} + template ::value, int>::type = 0> + VULKAN_HPP_CONSTEXPR_14 ArrayWrapper1D( std::string const & data ) VULKAN_HPP_NOEXCEPT + { + copy( data.data(), data.length() ); + } + +#if 17 <= VULKAN_HPP_CPP_VERSION + template ::value, int>::type = 0> + VULKAN_HPP_CONSTEXPR_14 ArrayWrapper1D( std::string_view data ) VULKAN_HPP_NOEXCEPT + { + copy( data.data(), data.length() ); + } +#endif + #if ( VK_USE_64_BIT_PTR_DEFINES == 0 ) // on 32 bit compiles, needs overloads on index type int to resolve ambiguities VULKAN_HPP_CONSTEXPR T const & operator[]( int index ) const VULKAN_HPP_NOEXCEPT @@ -269,63 +142,83 @@ namespace VULKAN_HPP_NAMESPACE template ::value, int>::type = 0> operator std::string() const { - return std::string( this->data() ); + return std::string( this->data(), strnlen( this->data(), N ) ); } #if 17 <= VULKAN_HPP_CPP_VERSION template ::value, int>::type = 0> operator std::string_view() const { - return std::string_view( this->data() ); + return std::string_view( this->data(), strnlen( this->data(), N ) ); } #endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - template ::value, int>::type = 0> - std::strong_ordering operator<=>( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT + private: + VULKAN_HPP_CONSTEXPR_14 void copy( char const * data, size_t len ) VULKAN_HPP_NOEXCEPT { - return *static_cast const *>( this ) <=> *static_cast const *>( &rhs ); + size_t n = ( std::min )( N - 1, len ); + for ( size_t i = 0; i < n; ++i ) + { + ( *this )[i] = data[i]; + } + ( *this )[n] = 0; } + }; + +// relational operators between ArrayWrapper1D of chars with potentially different sizes +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + template + std::strong_ordering operator<=>( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + int result = strcmp( lhs.data(), rhs.data() ); + return ( result < 0 ) ? std::strong_ordering::less : ( ( result > 0 ) ? std::strong_ordering::greater : std::strong_ordering::equal ); + } #else - template ::value, int>::type = 0> - bool operator<( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return *static_cast const *>( this ) < *static_cast const *>( &rhs ); - } + template + bool operator<( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) < 0; + } - template ::value, int>::type = 0> - bool operator<=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return *static_cast const *>( this ) <= *static_cast const *>( &rhs ); - } + template + bool operator<=( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) <= 0; + } - template ::value, int>::type = 0> - bool operator>( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return *static_cast const *>( this ) > *static_cast const *>( &rhs ); - } + template + bool operator>( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) > 0; + } - template ::value, int>::type = 0> - bool operator>=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return *static_cast const *>( this ) >= *static_cast const *>( &rhs ); - } + template + bool operator>=( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) >= 0; + } #endif - template ::value, int>::type = 0> - bool operator==( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return *static_cast const *>( this ) == *static_cast const *>( &rhs ); - } + template + bool operator==( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) == 0; + } - template ::value, int>::type = 0> - bool operator!=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return *static_cast const *>( this ) != *static_cast const *>( &rhs ); - } - }; + template + bool operator!=( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) != 0; + } - // specialization of relational operators between std::string and arrays of chars +// specialization of relational operators between std::string and arrays of chars +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + template + std::strong_ordering operator<=>( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs <=> rhs.data(); + } +#else template bool operator<( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -349,6 +242,7 @@ namespace VULKAN_HPP_NAMESPACE { return lhs >= rhs.data(); } +#endif template bool operator==( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT @@ -374,186 +268,6 @@ namespace VULKAN_HPP_NAMESPACE } }; - template - struct FlagTraits - { - }; - - template - class Flags - { - public: - using MaskType = typename std::underlying_type::type; - - // constructors - VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT : m_mask( 0 ) {} - - VULKAN_HPP_CONSTEXPR Flags( BitType bit ) VULKAN_HPP_NOEXCEPT : m_mask( static_cast( bit ) ) {} - - VULKAN_HPP_CONSTEXPR Flags( Flags const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - VULKAN_HPP_CONSTEXPR explicit Flags( MaskType flags ) VULKAN_HPP_NOEXCEPT : m_mask( flags ) {} - - // relational operators -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Flags const & ) const = default; -#else - VULKAN_HPP_CONSTEXPR bool operator<( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_mask < rhs.m_mask; - } - - VULKAN_HPP_CONSTEXPR bool operator<=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_mask <= rhs.m_mask; - } - - VULKAN_HPP_CONSTEXPR bool operator>( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_mask > rhs.m_mask; - } - - VULKAN_HPP_CONSTEXPR bool operator>=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_mask >= rhs.m_mask; - } - - VULKAN_HPP_CONSTEXPR bool operator==( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_mask == rhs.m_mask; - } - - VULKAN_HPP_CONSTEXPR bool operator!=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_mask != rhs.m_mask; - } -#endif - - // logical operator - VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT - { - return !m_mask; - } - - // bitwise operators - VULKAN_HPP_CONSTEXPR Flags operator&( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return Flags( m_mask & rhs.m_mask ); - } - - VULKAN_HPP_CONSTEXPR Flags operator|( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return Flags( m_mask | rhs.m_mask ); - } - - VULKAN_HPP_CONSTEXPR Flags operator^( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return Flags( m_mask ^ rhs.m_mask ); - } - - VULKAN_HPP_CONSTEXPR Flags operator~() const VULKAN_HPP_NOEXCEPT - { - return Flags( m_mask ^ FlagTraits::allFlags ); - } - - // assignment operators - VULKAN_HPP_CONSTEXPR_14 Flags & operator=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - VULKAN_HPP_CONSTEXPR_14 Flags & operator|=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT - { - m_mask |= rhs.m_mask; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 Flags & operator&=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT - { - m_mask &= rhs.m_mask; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 Flags & operator^=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT - { - m_mask ^= rhs.m_mask; - return *this; - } - - // cast operators - explicit VULKAN_HPP_CONSTEXPR operator bool() const VULKAN_HPP_NOEXCEPT - { - return !!m_mask; - } - - explicit VULKAN_HPP_CONSTEXPR operator MaskType() const VULKAN_HPP_NOEXCEPT - { - return m_mask; - } - -#if defined( VULKAN_HPP_FLAGS_MASK_TYPE_AS_PUBLIC ) - public: -#else - private: -#endif - MaskType m_mask; - }; - -#if !defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - // relational operators only needed for pre C++20 - template - VULKAN_HPP_CONSTEXPR bool operator<( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT - { - return flags.operator>( bit ); - } - - template - VULKAN_HPP_CONSTEXPR bool operator<=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT - { - return flags.operator>=( bit ); - } - - template - VULKAN_HPP_CONSTEXPR bool operator>( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT - { - return flags.operator<( bit ); - } - - template - VULKAN_HPP_CONSTEXPR bool operator>=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT - { - return flags.operator<=( bit ); - } - - template - VULKAN_HPP_CONSTEXPR bool operator==( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT - { - return flags.operator==( bit ); - } - - template - VULKAN_HPP_CONSTEXPR bool operator!=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT - { - return flags.operator!=( bit ); - } -#endif - - // bitwise operators - template - VULKAN_HPP_CONSTEXPR Flags operator&( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT - { - return flags.operator&( bit ); - } - - template - VULKAN_HPP_CONSTEXPR Flags operator|( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT - { - return flags.operator|( bit ); - } - - template - VULKAN_HPP_CONSTEXPR Flags operator^( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT - { - return flags.operator^( bit ); - } - #if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template class ArrayProxy @@ -681,38 +395,19 @@ namespace VULKAN_HPP_NAMESPACE { } - ArrayProxyNoTemporaries( T & value ) VULKAN_HPP_NOEXCEPT - : m_count( 1 ) - , m_ptr( &value ) - { - } - - template - ArrayProxyNoTemporaries( V && value ) = delete; - - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( typename std::remove_const::type & value ) VULKAN_HPP_NOEXCEPT + template ::value && std::is_lvalue_reference::value, int>::type = 0> + ArrayProxyNoTemporaries( B && value ) VULKAN_HPP_NOEXCEPT : m_count( 1 ) , m_ptr( &value ) { } - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( typename std::remove_const::type && value ) = delete; - ArrayProxyNoTemporaries( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT : m_count( count ) , m_ptr( ptr ) { } - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( uint32_t count, typename std::remove_const::type * ptr ) VULKAN_HPP_NOEXCEPT - : m_count( count ) - , m_ptr( ptr ) - { - } - template ArrayProxyNoTemporaries( T ( &ptr )[C] ) VULKAN_HPP_NOEXCEPT : m_count( C ) @@ -721,71 +416,37 @@ namespace VULKAN_HPP_NAMESPACE } template - ArrayProxyNoTemporaries( T( &&ptr )[C] ) = delete; + ArrayProxyNoTemporaries( T ( &&ptr )[C] ) = delete; - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( typename std::remove_const::type ( &ptr )[C] ) VULKAN_HPP_NOEXCEPT - : m_count( C ) - , m_ptr( ptr ) + // Any l-value reference with a .data() return type implicitly convertible to T*, and a .size() return type implicitly convertible to size_t. + template ().begin() ), T *>::value && + std::is_convertible().data() ), T *>::value && + std::is_convertible().size() ), std::size_t>::value && std::is_lvalue_reference::value, + int>::type = 0> + ArrayProxyNoTemporaries( V && v ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( v.size() ) ) + , m_ptr( v.data() ) { } - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( typename std::remove_const::type( &&ptr )[C] ) = delete; - - ArrayProxyNoTemporaries( std::initializer_list const & list ) VULKAN_HPP_NOEXCEPT - : m_count( static_cast( list.size() ) ) - , m_ptr( list.begin() ) + // Any l-value reference with a .begin() return type implicitly convertible to T*, and a .size() return type implicitly convertible to size_t. + template ().begin() ), T *>::value && + std::is_convertible().size() ), std::size_t>::value && std::is_lvalue_reference::value, + int>::type = 0> + ArrayProxyNoTemporaries( V && v ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( v.size() ) ) + , m_ptr( v.begin() ) { } - ArrayProxyNoTemporaries( std::initializer_list const && list ) = delete; - - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( std::initializer_list::type> const & list ) VULKAN_HPP_NOEXCEPT - : m_count( static_cast( list.size() ) ) - , m_ptr( list.begin() ) + const T * begin() const VULKAN_HPP_NOEXCEPT { + return m_ptr; } - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( std::initializer_list::type> const && list ) = delete; - - ArrayProxyNoTemporaries( std::initializer_list & list ) VULKAN_HPP_NOEXCEPT - : m_count( static_cast( list.size() ) ) - , m_ptr( list.begin() ) - { - } - - ArrayProxyNoTemporaries( std::initializer_list && list ) = delete; - - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( std::initializer_list::type> & list ) VULKAN_HPP_NOEXCEPT - : m_count( static_cast( list.size() ) ) - , m_ptr( list.begin() ) - { - } - - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( std::initializer_list::type> && list ) = delete; - - // Any type with a .data() return type implicitly convertible to T*, and a // .size() return type implicitly - // convertible to size_t. - template ().data() ), T *>::value && - std::is_convertible().size() ), std::size_t>::value>::type * = nullptr> - ArrayProxyNoTemporaries( V & v ) VULKAN_HPP_NOEXCEPT - : m_count( static_cast( v.size() ) ) - , m_ptr( v.data() ) - { - } - - const T * begin() const VULKAN_HPP_NOEXCEPT - { - return m_ptr; - } - - const T * end() const VULKAN_HPP_NOEXCEPT + const T * end() const VULKAN_HPP_NOEXCEPT { return m_ptr + m_count; } @@ -871,10 +532,12 @@ namespace VULKAN_HPP_NAMESPACE { m_ptr = &reference; } + Optional( RefType * ptr ) VULKAN_HPP_NOEXCEPT { m_ptr = ptr; } + Optional( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { m_ptr = nullptr; @@ -884,10 +547,12 @@ namespace VULKAN_HPP_NAMESPACE { return m_ptr; } + RefType const * operator->() const VULKAN_HPP_NOEXCEPT { return m_ptr; } + explicit operator bool() const VULKAN_HPP_NOEXCEPT { return !!m_ptr; @@ -949,6 +614,8 @@ namespace VULKAN_HPP_NAMESPACE template class StructureChain : public std::tuple { + // Note: StructureChain has no move constructor or move assignment operator, as it is not supposed to contain movable containers. + // In order to get a copy-operation on a move-operations, those functions are neither deleted nor defaulted. public: StructureChain() VULKAN_HPP_NOEXCEPT { @@ -965,15 +632,6 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &std::get<0>( rhs ) ) ); } - StructureChain( StructureChain && rhs ) VULKAN_HPP_NOEXCEPT : std::tuple( std::forward>( rhs ) ) - { - static_assert( StructureChainValidation::valid, "The structure chain is not valid!" ); - link( &std::get<0>( *this ), - &std::get<0>( rhs ), - reinterpret_cast( &std::get<0>( *this ) ), - reinterpret_cast( &std::get<0>( rhs ) ) ); - } - StructureChain( ChainElements const &... elems ) VULKAN_HPP_NOEXCEPT : std::tuple( elems... ) { static_assert( StructureChainValidation::valid, "The structure chain is not valid!" ); @@ -990,8 +648,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - StructureChain & operator=( StructureChain && rhs ) = delete; - template >::type, size_t Which = 0> T & get() VULKAN_HPP_NOEXCEPT { @@ -1016,6 +672,17 @@ namespace VULKAN_HPP_NAMESPACE return std::tie( get(), get(), get()... ); } + // assign a complete structure to the StructureChain without modifying the chaining + template >::type, size_t Which = 0> + StructureChain & assign( const T & rhs ) VULKAN_HPP_NOEXCEPT + { + T & lhs = get(); + auto pNext = lhs.pNext; + lhs = rhs; + lhs.pNext = pNext; + return *this; + } + template typename std::enable_if>::type>::value && ( Which == 0 ), bool>::type isLinked() const VULKAN_HPP_NOEXCEPT @@ -1133,6 +800,27 @@ namespace VULKAN_HPP_NAMESPACE } }; + // interupt the VULKAN_HPP_NAMESPACE for a moment to add specializations of std::tuple_size and std::tuple_element for the StructureChain! +} + +namespace std +{ + template + struct tuple_size> + { + static constexpr size_t value = std::tuple_size>::value; + }; + + template + struct tuple_element> + { + using type = typename std::tuple_element>::type; + }; +} // namespace std + +namespace VULKAN_HPP_NAMESPACE +{ + # if !defined( VULKAN_HPP_NO_SMART_HANDLE ) template class UniqueHandleTraits; @@ -1184,6 +872,13 @@ namespace VULKAN_HPP_NAMESPACE return m_value.operator bool(); } +# if defined( VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST ) + operator Type() const VULKAN_HPP_NOEXCEPT + { + return m_value; + } +# endif + Type const * operator->() const VULKAN_HPP_NOEXCEPT { return &m_value; @@ -1259,4970 +954,11137 @@ namespace VULKAN_HPP_NAMESPACE # endif #endif // VULKAN_HPP_DISABLE_ENHANCED_MODE - class DispatchLoaderBase + namespace detail { - public: - DispatchLoaderBase() = default; - DispatchLoaderBase( std::nullptr_t ) + class DispatchLoaderBase + { + public: + DispatchLoaderBase() = default; + DispatchLoaderBase( std::nullptr_t ) #if !defined( NDEBUG ) - : m_valid( false ) + : m_valid( false ) #endif - { - } + { + } #if !defined( NDEBUG ) - size_t getVkHeaderVersion() const - { - VULKAN_HPP_ASSERT( m_valid ); - return vkHeaderVersion; - } + size_t getVkHeaderVersion() const + { + VULKAN_HPP_ASSERT( m_valid ); + return vkHeaderVersion; + } - private: - size_t vkHeaderVersion = VK_HEADER_VERSION; - bool m_valid = true; + private: + size_t vkHeaderVersion = VK_HEADER_VERSION; + bool m_valid = true; #endif - }; - -#if !defined( VK_NO_PROTOTYPES ) - class DispatchLoaderStatic : public DispatchLoaderBase - { - public: - //=== VK_VERSION_1_0 === + }; - VkResult - vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT +#if !defined( VK_NO_PROTOTYPES ) || ( defined( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC ) && ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 0 ) ) + class DispatchLoaderStatic : public DispatchLoaderBase { - return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); - } + public: + //=== VK_VERSION_1_0 === - void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyInstance( instance, pAllocator ); - } + VkResult + vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); + } - VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t * pPhysicalDeviceCount, VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); - } + void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyInstance( instance, pAllocator ); + } - void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); - } + VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t * pPhysicalDeviceCount, VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); + } - void - vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); - } + void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); + } - VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkImageTiling tiling, - VkImageUsageFlags usage, - VkImageCreateFlags flags, - VkImageFormatProperties * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); - } + void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); + } - void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); - } + VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); + } - void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, - uint32_t * pQueueFamilyPropertyCount, - VkQueueFamilyProperties * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); - } + void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); + } - void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); - } + void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } - PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetInstanceProcAddr( instance, pName ); - } + void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); + } - PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceProcAddr( device, pName ); - } + PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetInstanceProcAddr( instance, pName ); + } - VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, - const VkDeviceCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDevice * pDevice ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); - } + PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceProcAddr( device, pName ); + } - void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDevice( device, pAllocator ); - } + VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDevice * pDevice ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); + } + + void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDevice( device, pAllocator ); + } - VkResult vkEnumerateInstanceExtensionProperties( const char * pLayerName, + VkResult vkEnumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); + } + + VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, + const char * pLayerName, uint32_t * pPropertyCount, VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); - } + { + return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); + } - VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, - const char * pLayerName, - uint32_t * pPropertyCount, - VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); - } + VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); + } - VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); - } + VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); + } - VkResult - vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); - } + void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); + } - void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); - } + VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); + } - VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); - } + VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueWaitIdle( queue ); + } - VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueWaitIdle( queue ); - } + VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeviceWaitIdle( device ); + } - VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDeviceWaitIdle( device ); - } + VkResult vkAllocateMemory( VkDevice device, + const VkMemoryAllocateInfo * pAllocateInfo, + const VkAllocationCallbacks * pAllocator, + VkDeviceMemory * pMemory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); + } - VkResult vkAllocateMemory( VkDevice device, - const VkMemoryAllocateInfo * pAllocateInfo, - const VkAllocationCallbacks * pAllocator, - VkDeviceMemory * pMemory ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); - } + void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeMemory( device, memory, pAllocator ); + } - void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFreeMemory( device, memory, pAllocator ); - } + VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void ** ppData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkMapMemory( device, memory, offset, size, flags, ppData ); + } - VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void ** ppData ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkMapMemory( device, memory, offset, size, flags, ppData ); - } + void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnmapMemory( device, memory ); + } - void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUnmapMemory( device, memory ); - } + VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + } - VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); - } + VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + } - VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); - } + void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); + } - void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); - } + VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); + } - VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); - } + VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory( device, image, memory, memoryOffset ); + } - VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory( device, image, memory, memoryOffset ); - } + void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); + } - void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); - } + void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); + } - void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); - } + void vkGetImageSparseMemoryRequirements( VkDevice device, + VkImage image, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } - void vkGetImageSparseMemoryRequirements( VkDevice device, - VkImage image, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } + void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); + } - void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkSampleCountFlagBits samples, - VkImageUsageFlags usage, - VkImageTiling tiling, - uint32_t * pPropertyCount, - VkSparseImageFormatProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); - } + VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo * pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); + } - VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo * pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); - } + VkResult vkCreateFence( VkDevice device, + const VkFenceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); + } - VkResult vkCreateFence( VkDevice device, - const VkFenceCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkFence * pFence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); - } + void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyFence( device, fence, pAllocator ); + } - void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyFence( device, fence, pAllocator ); - } + VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetFences( device, fenceCount, pFences ); + } - VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetFences( device, fenceCount, pFences ); - } + VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceStatus( device, fence ); + } - VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFenceStatus( device, fence ); - } + VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); + } - VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); - } + VkResult vkCreateSemaphore( VkDevice device, + const VkSemaphoreCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSemaphore * pSemaphore ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); + } - VkResult vkCreateSemaphore( VkDevice device, - const VkSemaphoreCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSemaphore * pSemaphore ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); - } + void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySemaphore( device, semaphore, pAllocator ); + } - void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySemaphore( device, semaphore, pAllocator ); - } + VkResult vkCreateEvent( VkDevice device, + const VkEventCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkEvent * pEvent ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); + } - VkResult vkCreateEvent( VkDevice device, - const VkEventCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkEvent * pEvent ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); - } - - void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyEvent( device, event, pAllocator ); - } + void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyEvent( device, event, pAllocator ); + } - VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetEventStatus( device, event ); - } + VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetEventStatus( device, event ); + } - VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetEvent( device, event ); - } + VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetEvent( device, event ); + } - VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetEvent( device, event ); - } + VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetEvent( device, event ); + } - VkResult vkCreateQueryPool( VkDevice device, - const VkQueryPoolCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkQueryPool * pQueryPool ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); - } + VkResult vkCreateQueryPool( VkDevice device, + const VkQueryPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkQueryPool * pQueryPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); + } - void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyQueryPool( device, queryPool, pAllocator ); - } + void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyQueryPool( device, queryPool, pAllocator ); + } - VkResult vkGetQueryPoolResults( VkDevice device, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - size_t dataSize, - void * pData, - VkDeviceSize stride, - VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); - } + VkResult vkGetQueryPoolResults( VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); + } - VkResult vkCreateBuffer( VkDevice device, - const VkBufferCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkBuffer * pBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); - } + VkResult vkCreateBuffer( VkDevice device, + const VkBufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBuffer * pBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); + } - void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyBuffer( device, buffer, pAllocator ); - } + void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBuffer( device, buffer, pAllocator ); + } - VkResult vkCreateBufferView( VkDevice device, - const VkBufferViewCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkBufferView * pView ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); - } + VkResult vkCreateBufferView( VkDevice device, + const VkBufferViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBufferView * pView ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); + } - void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyBufferView( device, bufferView, pAllocator ); - } + void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBufferView( device, bufferView, pAllocator ); + } - VkResult vkCreateImage( VkDevice device, - const VkImageCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkImage * pImage ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); - } + VkResult vkCreateImage( VkDevice device, + const VkImageCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImage * pImage ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); + } - void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyImage( device, image, pAllocator ); - } + void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyImage( device, image, pAllocator ); + } - void vkGetImageSubresourceLayout( VkDevice device, - VkImage image, - const VkImageSubresource * pSubresource, - VkSubresourceLayout * pLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); - } + void vkGetImageSubresourceLayout( VkDevice device, + VkImage image, + const VkImageSubresource * pSubresource, + VkSubresourceLayout * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); + } - VkResult vkCreateImageView( VkDevice device, - const VkImageViewCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkImageView * pView ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); - } + VkResult vkCreateImageView( VkDevice device, + const VkImageViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImageView * pView ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); + } - void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyImageView( device, imageView, pAllocator ); - } + void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyImageView( device, imageView, pAllocator ); + } - VkResult vkCreateShaderModule( VkDevice device, - const VkShaderModuleCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkShaderModule * pShaderModule ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); - } + VkResult vkCreateShaderModule( VkDevice device, + const VkShaderModuleCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkShaderModule * pShaderModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); + } - void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); - } + void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); + } - VkResult vkCreatePipelineCache( VkDevice device, - const VkPipelineCacheCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPipelineCache * pPipelineCache ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); - } + VkResult vkCreatePipelineCache( VkDevice device, + const VkPipelineCacheCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineCache * pPipelineCache ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); + } - void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); - } + void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); + } - VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); - } + VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); + } - VkResult - vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT - { - return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); - } + VkResult + vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); + } - VkResult vkCreateGraphicsPipelines( VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkGraphicsPipelineCreateInfo * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } + VkResult vkCreateGraphicsPipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } - VkResult vkCreateComputePipelines( VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkComputePipelineCreateInfo * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } + VkResult vkCreateComputePipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } - void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipeline( device, pipeline, pAllocator ); - } + void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipeline( device, pipeline, pAllocator ); + } - VkResult vkCreatePipelineLayout( VkDevice device, - const VkPipelineLayoutCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPipelineLayout * pPipelineLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); - } + VkResult vkCreatePipelineLayout( VkDevice device, + const VkPipelineLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineLayout * pPipelineLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); + } - void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); - } + void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); + } - VkResult vkCreateSampler( VkDevice device, - const VkSamplerCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSampler * pSampler ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); - } + VkResult vkCreateSampler( VkDevice device, + const VkSamplerCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSampler * pSampler ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); + } - void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySampler( device, sampler, pAllocator ); - } + void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySampler( device, sampler, pAllocator ); + } - VkResult vkCreateDescriptorSetLayout( VkDevice device, - const VkDescriptorSetLayoutCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDescriptorSetLayout * pSetLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); - } + VkResult vkCreateDescriptorSetLayout( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorSetLayout * pSetLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); + } - void vkDestroyDescriptorSetLayout( VkDevice device, - VkDescriptorSetLayout descriptorSetLayout, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); - } + void vkDestroyDescriptorSetLayout( VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); + } - VkResult vkCreateDescriptorPool( VkDevice device, - const VkDescriptorPoolCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDescriptorPool * pDescriptorPool ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); - } + VkResult vkCreateDescriptorPool( VkDevice device, + const VkDescriptorPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorPool * pDescriptorPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); + } - void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); - } + void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); + } - VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetDescriptorPool( device, descriptorPool, flags ); - } + VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetDescriptorPool( device, descriptorPool, flags ); + } - VkResult vkAllocateDescriptorSets( VkDevice device, - const VkDescriptorSetAllocateInfo * pAllocateInfo, - VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); - } + VkResult vkAllocateDescriptorSets( VkDevice device, + const VkDescriptorSetAllocateInfo * pAllocateInfo, + VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); + } - VkResult vkFreeDescriptorSets( VkDevice device, - VkDescriptorPool descriptorPool, - uint32_t descriptorSetCount, - const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); - } + VkResult vkFreeDescriptorSets( VkDevice device, + VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); + } - void vkUpdateDescriptorSets( VkDevice device, - uint32_t descriptorWriteCount, - const VkWriteDescriptorSet * pDescriptorWrites, - uint32_t descriptorCopyCount, - const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); - } + void vkUpdateDescriptorSets( VkDevice device, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); + } - VkResult vkCreateFramebuffer( VkDevice device, - const VkFramebufferCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkFramebuffer * pFramebuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); - } + VkResult vkCreateFramebuffer( VkDevice device, + const VkFramebufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkFramebuffer * pFramebuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); + } - void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); - } + void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); + } - VkResult vkCreateRenderPass( VkDevice device, - const VkRenderPassCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); - } + VkResult vkCreateRenderPass( VkDevice device, + const VkRenderPassCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); + } - void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyRenderPass( device, renderPass, pAllocator ); - } + void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyRenderPass( device, renderPass, pAllocator ); + } - void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); - } + void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); + } - VkResult vkCreateCommandPool( VkDevice device, - const VkCommandPoolCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCommandPool * pCommandPool ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); - } + VkResult vkCreateCommandPool( VkDevice device, + const VkCommandPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCommandPool * pCommandPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); + } - void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCommandPool( device, commandPool, pAllocator ); - } + void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCommandPool( device, commandPool, pAllocator ); + } - VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetCommandPool( device, commandPool, flags ); - } + VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetCommandPool( device, commandPool, flags ); + } - VkResult vkAllocateCommandBuffers( VkDevice device, - const VkCommandBufferAllocateInfo * pAllocateInfo, - VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); - } + VkResult vkAllocateCommandBuffers( VkDevice device, + const VkCommandBufferAllocateInfo * pAllocateInfo, + VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); + } - void vkFreeCommandBuffers( VkDevice device, - VkCommandPool commandPool, - uint32_t commandBufferCount, - const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); - } + void vkFreeCommandBuffers( VkDevice device, + VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); + } - VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); - } + VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); + } - VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEndCommandBuffer( commandBuffer ); - } + VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEndCommandBuffer( commandBuffer ); + } - VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetCommandBuffer( commandBuffer, flags ); - } + VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetCommandBuffer( commandBuffer, flags ); + } - void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); - } + void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); + } - void - vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); - } + void vkCmdSetViewport( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); + } - void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); - } + void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); + } - void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); - } + void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); + } - void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, - float depthBiasConstantFactor, - float depthBiasClamp, - float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); - } + void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } - void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); - } + void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); + } - void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); - } + void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); + } - void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); - } + void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); + } - void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); - } + void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); + } - void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); - } + void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); + } - void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t firstSet, - uint32_t descriptorSetCount, - const VkDescriptorSet * pDescriptorSets, - uint32_t dynamicOffsetCount, - const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindDescriptorSets( - commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); - } + void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorSets( + commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); + } - void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); - } + void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); + } - void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer * pBuffers, - const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); - } - - void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); - } - - void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, - uint32_t indexCount, - uint32_t instanceCount, - uint32_t firstIndex, - int32_t vertexOffset, - uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); - } + void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); + } - void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); - } + void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + } - void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); - } + void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } - void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); - } + void + vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); + } - void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); - } + void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); + } - void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy * pRegions ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); - } + void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); + } - void vkCmdCopyImage( VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); - } + void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); + } - void vkCmdBlitImage( VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageBlit * pRegions, - VkFilter filter ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); - } + void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy * pRegions ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); + } - void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, - VkBuffer srcBuffer, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); - } + void vkCmdCopyImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } - void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkBuffer dstBuffer, - uint32_t regionCount, - const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); - } + void vkCmdBlitImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit * pRegions, + VkFilter filter ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); + } - void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void * pData ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); - } + void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); + } - void - vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); - } + void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); + } - void vkCmdClearColorImage( VkCommandBuffer commandBuffer, - VkImage image, - VkImageLayout imageLayout, - const VkClearColorValue * pColor, - uint32_t rangeCount, - const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); - } + void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void * pData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); + } - void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, - VkImage image, - VkImageLayout imageLayout, - const VkClearDepthStencilValue * pDepthStencil, - uint32_t rangeCount, - const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); - } + void + vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); + } - void vkCmdClearAttachments( VkCommandBuffer commandBuffer, - uint32_t attachmentCount, - const VkClearAttachment * pAttachments, - uint32_t rectCount, - const VkClearRect * pRects ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); - } + void vkCmdClearColorImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue * pColor, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); + } - void vkCmdResolveImage( VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); - } + void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); + } - void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetEvent( commandBuffer, event, stageMask ); - } + void vkCmdClearAttachments( VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment * pAttachments, + uint32_t rectCount, + const VkClearRect * pRects ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); + } - void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResetEvent( commandBuffer, event, stageMask ); - } + void vkCmdResolveImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } - void vkCmdWaitEvents( VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent * pEvents, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags dstStageMask, - uint32_t memoryBarrierCount, - const VkMemoryBarrier * pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier * pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWaitEvents( commandBuffer, - eventCount, - pEvents, - srcStageMask, - dstStageMask, - memoryBarrierCount, - pMemoryBarriers, - bufferMemoryBarrierCount, - pBufferMemoryBarriers, - imageMemoryBarrierCount, - pImageMemoryBarriers ); - } + void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent( commandBuffer, event, stageMask ); + } - void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags dstStageMask, - VkDependencyFlags dependencyFlags, - uint32_t memoryBarrierCount, - const VkMemoryBarrier * pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier * pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPipelineBarrier( commandBuffer, - srcStageMask, - dstStageMask, - dependencyFlags, - memoryBarrierCount, - pMemoryBarriers, - bufferMemoryBarrierCount, - pBufferMemoryBarriers, - imageMemoryBarrierCount, - pImageMemoryBarriers ); - } + void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent( commandBuffer, event, stageMask ); + } - void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); - } + void vkCmdWaitEvents( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents( commandBuffer, + eventCount, + pEvents, + srcStageMask, + dstStageMask, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } - void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndQuery( commandBuffer, queryPool, query ); - } + void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier( commandBuffer, + srcStageMask, + dstStageMask, + dependencyFlags, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } - void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); - } + void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); + } - void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, - VkPipelineStageFlagBits pipelineStage, - VkQueryPool queryPool, - uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); - } + void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndQuery( commandBuffer, queryPool, query ); + } - void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - VkDeviceSize stride, - VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); - } + void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); + } - void vkCmdPushConstants( VkCommandBuffer commandBuffer, - VkPipelineLayout layout, - VkShaderStageFlags stageFlags, - uint32_t offset, - uint32_t size, - const void * pValues ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); - } + void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); + } - void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo * pRenderPassBegin, - VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); - } + void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); + } - void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdNextSubpass( commandBuffer, contents ); - } + void vkCmdPushConstants( VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); + } - void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRenderPass( commandBuffer ); - } + void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); + } - void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); - } + void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass( commandBuffer, contents ); + } - //=== VK_VERSION_1_1 === + void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass( commandBuffer ); + } - VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateInstanceVersion( pApiVersion ); - } + void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); + } - VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); - } + //=== VK_VERSION_1_1 === - VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); - } + VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceVersion( pApiVersion ); + } - void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, - uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); - } + VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); + } - void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); - } + VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); + } - void vkCmdDispatchBase( VkCommandBuffer commandBuffer, - uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } + void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + } - VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, - uint32_t * pPhysicalDeviceGroupCount, - VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); - } + void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); + } - void vkGetImageMemoryRequirements2( VkDevice device, - const VkImageMemoryRequirementsInfo2 * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); - } + void vkCmdDispatchBase( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } - void vkGetBufferMemoryRequirements2( VkDevice device, - const VkBufferMemoryRequirementsInfo2 * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); - } + VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + } - void vkGetImageSparseMemoryRequirements2( VkDevice device, - const VkImageSparseMemoryRequirementsInfo2 * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } + void vkGetImageMemoryRequirements2( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); + } - void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); - } + void vkGetBufferMemoryRequirements2( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); + } - void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); - } + void vkGetImageSparseMemoryRequirements2( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } - void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); - } + void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); + } - VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, - VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); - } + void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); + } - void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, - uint32_t * pQueueFamilyPropertyCount, - VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); - } + void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); + } - void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); - } + VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + } - void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, - uint32_t * pPropertyCount, - VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); - } + void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } - void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkTrimCommandPool( device, commandPool, flags ); - } + void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); + } - void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2 * pQueueInfo, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); - } + void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + } - VkResult vkCreateSamplerYcbcrConversion( VkDevice device, - const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); - } + void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTrimCommandPool( device, commandPool, flags ); + } - void vkDestroySamplerYcbcrConversion( VkDevice device, - VkSamplerYcbcrConversion ycbcrConversion, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); - } + void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2 * pQueueInfo, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); + } - VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, - const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); - } + VkResult vkCreateSamplerYcbcrConversion( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); + } - void vkDestroyDescriptorUpdateTemplate( VkDevice device, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, + void vkDestroySamplerYcbcrConversion( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); - } + { + return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); + } - void vkUpdateDescriptorSetWithTemplate( VkDevice device, - VkDescriptorSet descriptorSet, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); - } + VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + } - void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, - VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); - } + void vkDestroyDescriptorUpdateTemplate( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); + } - void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, - VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); - } + void vkUpdateDescriptorSetWithTemplate( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); + } - void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, - VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); - } + void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + } - void vkGetDescriptorSetLayoutSupport( VkDevice device, - const VkDescriptorSetLayoutCreateInfo * pCreateInfo, - VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); - } + void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + } - //=== VK_VERSION_1_2 === + void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + } - void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } + void vkGetDescriptorSetLayoutSupport( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); + } - void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } + //=== VK_VERSION_1_2 === - VkResult vkCreateRenderPass2( VkDevice device, - const VkRenderPassCreateInfo2 * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass ); - } + void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } - void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo * pRenderPassBegin, - const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); - } + void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } - void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, - const VkSubpassBeginInfo * pSubpassBeginInfo, - const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); - } - - void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo ); - } + VkResult vkCreateRenderPass2( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass ); + } - void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); - } + void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); + } - VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); - } + void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + } - VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitSemaphores( device, pWaitInfo, timeout ); - } + void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo ); + } - VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSignalSemaphore( device, pSignalInfo ); - } + void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); + } - VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferDeviceAddress( device, pInfo ); - } + VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); + } - uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferOpaqueCaptureAddress( device, pInfo ); - } + VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitSemaphores( device, pWaitInfo, timeout ); + } - uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo ); - } + VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSignalSemaphore( device, pSignalInfo ); + } - //=== VK_VERSION_1_3 === + VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddress( device, pInfo ); + } - VkResult vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice, - uint32_t * pToolCount, - VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceToolProperties( physicalDevice, pToolCount, pToolProperties ); - } + uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureAddress( device, pInfo ); + } - VkResult vkCreatePrivateDataSlot( VkDevice device, - const VkPrivateDataSlotCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePrivateDataSlot( device, pCreateInfo, pAllocator, pPrivateDataSlot ); - } + uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo ); + } - void vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPrivateDataSlot( device, privateDataSlot, pAllocator ); - } + //=== VK_VERSION_1_3 === - VkResult vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkSetPrivateData( device, objectType, objectHandle, privateDataSlot, data ); - } + VkResult vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice, + uint32_t * pToolCount, + VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceToolProperties( physicalDevice, pToolCount, pToolProperties ); + } - void vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkGetPrivateData( device, objectType, objectHandle, privateDataSlot, pData ); - } + VkResult vkCreatePrivateDataSlot( VkDevice device, + const VkPrivateDataSlotCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePrivateDataSlot( device, pCreateInfo, pAllocator, pPrivateDataSlot ); + } - void vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetEvent2( commandBuffer, event, pDependencyInfo ); - } + void vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPrivateDataSlot( device, privateDataSlot, pAllocator ); + } - void vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResetEvent2( commandBuffer, event, stageMask ); - } + VkResult vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkSetPrivateData( device, objectType, objectHandle, privateDataSlot, data ); + } - void vkCmdWaitEvents2( VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent * pEvents, - const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWaitEvents2( commandBuffer, eventCount, pEvents, pDependencyInfos ); - } + void vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPrivateData( device, objectType, objectHandle, privateDataSlot, pData ); + } - void vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPipelineBarrier2( commandBuffer, pDependencyInfo ); - } + void vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent2( commandBuffer, event, pDependencyInfo ); + } - void vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteTimestamp2( commandBuffer, stage, queryPool, query ); - } + void vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent2( commandBuffer, event, stageMask ); + } - VkResult vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueSubmit2( queue, submitCount, pSubmits, fence ); - } + void vkCmdWaitEvents2( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents2( commandBuffer, eventCount, pEvents, pDependencyInfos ); + } - void vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBuffer2( commandBuffer, pCopyBufferInfo ); - } + void vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier2( commandBuffer, pDependencyInfo ); + } - void vkCmdCopyImage2( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImage2( commandBuffer, pCopyImageInfo ); - } + void vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp2( commandBuffer, stage, queryPool, query ); + } - void vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBufferToImage2( commandBuffer, pCopyBufferToImageInfo ); - } + VkResult vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit2( queue, submitCount, pSubmits, fence ); + } - void vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImageToBuffer2( commandBuffer, pCopyImageToBufferInfo ); - } + void vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer2( commandBuffer, pCopyBufferInfo ); + } - void vkCmdBlitImage2( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBlitImage2( commandBuffer, pBlitImageInfo ); - } + void vkCmdCopyImage2( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage2( commandBuffer, pCopyImageInfo ); + } - void vkCmdResolveImage2( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResolveImage2( commandBuffer, pResolveImageInfo ); - } + void vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage2( commandBuffer, pCopyBufferToImageInfo ); + } - void vkCmdBeginRendering( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRendering( commandBuffer, pRenderingInfo ); - } + void vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer2( commandBuffer, pCopyImageToBufferInfo ); + } - void vkCmdEndRendering( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRendering( commandBuffer ); - } + void vkCmdBlitImage2( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage2( commandBuffer, pBlitImageInfo ); + } - void vkCmdSetCullMode( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCullMode( commandBuffer, cullMode ); - } + void vkCmdResolveImage2( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage2( commandBuffer, pResolveImageInfo ); + } - void vkCmdSetFrontFace( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetFrontFace( commandBuffer, frontFace ); - } + void vkCmdBeginRendering( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRendering( commandBuffer, pRenderingInfo ); + } - void vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPrimitiveTopology( commandBuffer, primitiveTopology ); - } + void vkCmdEndRendering( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRendering( commandBuffer ); + } - void vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportWithCount( commandBuffer, viewportCount, pViewports ); - } + void vkCmdSetCullMode( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCullMode( commandBuffer, cullMode ); + } - void vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetScissorWithCount( commandBuffer, scissorCount, pScissors ); - } + void vkCmdSetFrontFace( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFrontFace( commandBuffer, frontFace ); + } - void vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer * pBuffers, - const VkDeviceSize * pOffsets, - const VkDeviceSize * pSizes, - const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindVertexBuffers2( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); - } + void vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveTopology( commandBuffer, primitiveTopology ); + } - void vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthTestEnable( commandBuffer, depthTestEnable ); - } + void vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWithCount( commandBuffer, viewportCount, pViewports ); + } - void vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthWriteEnable( commandBuffer, depthWriteEnable ); - } + void vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissorWithCount( commandBuffer, scissorCount, pScissors ); + } - void vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthCompareOp( commandBuffer, depthCompareOp ); - } + void vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes, + const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers2( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); + } - void vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBoundsTestEnable( commandBuffer, depthBoundsTestEnable ); - } + void vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthTestEnable( commandBuffer, depthTestEnable ); + } - void vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilTestEnable( commandBuffer, stencilTestEnable ); - } + void vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthWriteEnable( commandBuffer, depthWriteEnable ); + } - void vkCmdSetStencilOp( VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - VkStencilOp failOp, - VkStencilOp passOp, - VkStencilOp depthFailOp, - VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilOp( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); - } + void vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthCompareOp( commandBuffer, depthCompareOp ); + } - void vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRasterizerDiscardEnable( commandBuffer, rasterizerDiscardEnable ); - } + void vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBoundsTestEnable( commandBuffer, depthBoundsTestEnable ); + } - void vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBiasEnable( commandBuffer, depthBiasEnable ); - } + void vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilTestEnable( commandBuffer, stencilTestEnable ); + } - void vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPrimitiveRestartEnable( commandBuffer, primitiveRestartEnable ); - } + void vkCmdSetStencilOp( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilOp( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); + } - void vkGetDeviceBufferMemoryRequirements( VkDevice device, - const VkDeviceBufferMemoryRequirements * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceBufferMemoryRequirements( device, pInfo, pMemoryRequirements ); - } + void vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizerDiscardEnable( commandBuffer, rasterizerDiscardEnable ); + } - void vkGetDeviceImageMemoryRequirements( VkDevice device, - const VkDeviceImageMemoryRequirements * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageMemoryRequirements( device, pInfo, pMemoryRequirements ); - } + void vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBiasEnable( commandBuffer, depthBiasEnable ); + } - void vkGetDeviceImageSparseMemoryRequirements( VkDevice device, - const VkDeviceImageMemoryRequirements * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageSparseMemoryRequirements( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } + void vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveRestartEnable( commandBuffer, primitiveRestartEnable ); + } - //=== VK_KHR_surface === + void vkGetDeviceBufferMemoryRequirements( VkDevice device, + const VkDeviceBufferMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceBufferMemoryRequirements( device, pInfo, pMemoryRequirements ); + } - void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); - } + void vkGetDeviceImageMemoryRequirements( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageMemoryRequirements( device, pInfo, pMemoryRequirements ); + } - VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - VkSurfaceKHR surface, - VkBool32 * pSupported ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); - } + void vkGetDeviceImageSparseMemoryRequirements( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSparseMemoryRequirements( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } - VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); - } + //=== VK_VERSION_1_4 === - VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t * pSurfaceFormatCount, - VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); - } + void vkCmdSetLineStipple( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStipple( commandBuffer, lineStippleFactor, lineStipplePattern ); + } - VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t * pPresentModeCount, - VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); - } + VkResult vkMapMemory2( VkDevice device, const VkMemoryMapInfo * pMemoryMapInfo, void ** ppData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMapMemory2( device, pMemoryMapInfo, ppData ); + } - //=== VK_KHR_swapchain === + VkResult vkUnmapMemory2( VkDevice device, const VkMemoryUnmapInfo * pMemoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnmapMemory2( device, pMemoryUnmapInfo ); + } - VkResult vkCreateSwapchainKHR( VkDevice device, - const VkSwapchainCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSwapchainKHR * pSwapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); - } + void vkCmdBindIndexBuffer2( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindIndexBuffer2( commandBuffer, buffer, offset, size, indexType ); + } - void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); - } + void vkGetRenderingAreaGranularity( VkDevice device, const VkRenderingAreaInfo * pRenderingAreaInfo, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderingAreaGranularity( device, pRenderingAreaInfo, pGranularity ); + } - VkResult vkGetSwapchainImagesKHR( VkDevice device, - VkSwapchainKHR swapchain, - uint32_t * pSwapchainImageCount, - VkImage * pSwapchainImages ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); - } + void vkGetDeviceImageSubresourceLayout( VkDevice device, + const VkDeviceImageSubresourceInfo * pInfo, + VkSubresourceLayout2 * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSubresourceLayout( device, pInfo, pLayout ); + } - VkResult vkAcquireNextImageKHR( - VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); - } + void vkGetImageSubresourceLayout2( VkDevice device, + VkImage image, + const VkImageSubresource2 * pSubresource, + VkSubresourceLayout2 * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout2( device, image, pSubresource, pLayout ); + } - VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueuePresentKHR( queue, pPresentInfo ); - } + void vkCmdPushDescriptorSet( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSet( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); + } - VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, - VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); - } + void vkCmdPushDescriptorSetWithTemplate( VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplate( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); + } - VkResult - vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); - } + void vkCmdSetRenderingAttachmentLocations( VkCommandBuffer commandBuffer, + const VkRenderingAttachmentLocationInfo * pLocationInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRenderingAttachmentLocations( commandBuffer, pLocationInfo ); + } - VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t * pRectCount, - VkRect2D * pRects ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); - } + void vkCmdSetRenderingInputAttachmentIndices( VkCommandBuffer commandBuffer, + const VkRenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRenderingInputAttachmentIndices( commandBuffer, pInputAttachmentIndexInfo ); + } - VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); - } + void vkCmdBindDescriptorSets2( VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfo * pBindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorSets2( commandBuffer, pBindDescriptorSetsInfo ); + } - //=== VK_KHR_display === + void vkCmdPushConstants2( VkCommandBuffer commandBuffer, const VkPushConstantsInfo * pPushConstantsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushConstants2( commandBuffer, pPushConstantsInfo ); + } - VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); - } + void vkCmdPushDescriptorSet2( VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfo * pPushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSet2( commandBuffer, pPushDescriptorSetInfo ); + } - VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkDisplayPlanePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); - } + void vkCmdPushDescriptorSetWithTemplate2( VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplate2( commandBuffer, pPushDescriptorSetWithTemplateInfo ); + } - VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, - uint32_t planeIndex, - uint32_t * pDisplayCount, - VkDisplayKHR * pDisplays ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); - } + VkResult vkCopyMemoryToImage( VkDevice device, const VkCopyMemoryToImageInfo * pCopyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToImage( device, pCopyMemoryToImageInfo ); + } - VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - uint32_t * pPropertyCount, - VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); - } + VkResult vkCopyImageToMemory( VkDevice device, const VkCopyImageToMemoryInfo * pCopyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToMemory( device, pCopyImageToMemoryInfo ); + } - VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - const VkDisplayModeCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDisplayModeKHR * pMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); - } + VkResult vkCopyImageToImage( VkDevice device, const VkCopyImageToImageInfo * pCopyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToImage( device, pCopyImageToImageInfo ); + } - VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, - VkDisplayModeKHR mode, - uint32_t planeIndex, - VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); - } + VkResult + vkTransitionImageLayout( VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfo * pTransitions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTransitionImageLayout( device, transitionCount, pTransitions ); + } - VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, - const VkDisplaySurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } + //=== VK_KHR_surface === + + void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); + } - //=== VK_KHR_display_swapchain === + VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32 * pSupported ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); + } - VkResult vkCreateSharedSwapchainsKHR( VkDevice device, - uint32_t swapchainCount, - const VkSwapchainCreateInfoKHR * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkSwapchainKHR * pSwapchains ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); - } + VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); + } + + VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); + } + + VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); + } + + //=== VK_KHR_swapchain === + + VkResult vkCreateSwapchainKHR( VkDevice device, + const VkSwapchainCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); + } + + void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); + } + + VkResult vkGetSwapchainImagesKHR( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VkImage * pSwapchainImages ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); + } + + VkResult vkAcquireNextImageKHR( + VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); + } + + VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueuePresentKHR( queue, pPresentInfo ); + } + + VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, + VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); + } + + VkResult + vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); + } + + VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pRectCount, + VkRect2D * pRects ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); + } + + VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); + } + + //=== VK_KHR_display === + + VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlanePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t * pDisplayCount, + VkDisplayKHR * pDisplays ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); + } + + VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); + } + + VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDisplayModeKHR * pMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); + } + + VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); + } + + VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + //=== VK_KHR_display_swapchain === + + VkResult vkCreateSharedSwapchainsKHR( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); + } # if defined( VK_USE_PLATFORM_XLIB_KHR ) - //=== VK_KHR_xlib_surface === + //=== VK_KHR_xlib_surface === - VkResult vkCreateXlibSurfaceKHR( VkInstance instance, - const VkXlibSurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateXlibSurfaceKHR( VkInstance instance, + const VkXlibSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } - VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - Display * dpy, - VisualID visualID ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); - } + VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); + } # endif /*VK_USE_PLATFORM_XLIB_KHR*/ # if defined( VK_USE_PLATFORM_XCB_KHR ) - //=== VK_KHR_xcb_surface === + //=== VK_KHR_xcb_surface === - VkResult vkCreateXcbSurfaceKHR( VkInstance instance, - const VkXcbSurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateXcbSurfaceKHR( VkInstance instance, + const VkXcbSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } - VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - xcb_connection_t * connection, - xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); - } + VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); + } # endif /*VK_USE_PLATFORM_XCB_KHR*/ # if defined( VK_USE_PLATFORM_WAYLAND_KHR ) - //=== VK_KHR_wayland_surface === + //=== VK_KHR_wayland_surface === - VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, - const VkWaylandSurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } - VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - struct wl_display * display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); - } + VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display * display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); + } # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ # if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_KHR_android_surface === + //=== VK_KHR_android_surface === - VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, - const VkAndroidSurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, + const VkAndroidSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ # if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_win32_surface === + //=== VK_KHR_win32_surface === - VkResult vkCreateWin32SurfaceKHR( VkInstance instance, - const VkWin32SurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateWin32SurfaceKHR( VkInstance instance, + const VkWin32SurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } - VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex ); - } + VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex ); + } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_EXT_debug_report === + //=== VK_EXT_debug_report === - VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, - const VkDebugReportCallbackCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); - } + VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); + } - void vkDestroyDebugReportCallbackEXT( VkInstance instance, - VkDebugReportCallbackEXT callback, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); - } + void vkDestroyDebugReportCallbackEXT( VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); + } - void vkDebugReportMessageEXT( VkInstance instance, - VkDebugReportFlagsEXT flags, - VkDebugReportObjectTypeEXT objectType, - uint64_t object, - size_t location, - int32_t messageCode, - const char * pLayerPrefix, - const char * pMessage ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); - } + void vkDebugReportMessageEXT( VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); + } - //=== VK_EXT_debug_marker === + //=== VK_EXT_debug_marker === - VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); - } + VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); + } - VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); - } + VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); + } - void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo ); - } + void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo ); + } - void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDebugMarkerEndEXT( commandBuffer ); - } + void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerEndEXT( commandBuffer ); + } - void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo ); - } + void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo ); + } -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_queue === + //=== VK_KHR_video_queue === - VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice, - const VkVideoProfileInfoKHR * pVideoProfile, - VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities ); - } + VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice, + const VkVideoProfileInfoKHR * pVideoProfile, + VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities ); + } - VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, - uint32_t * pVideoFormatPropertyCount, - VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR( physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties ); - } + VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR( physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties ); + } - VkResult vkCreateVideoSessionKHR( VkDevice device, - const VkVideoSessionCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkVideoSessionKHR * pVideoSession ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession ); - } + VkResult vkCreateVideoSessionKHR( VkDevice device, + const VkVideoSessionCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkVideoSessionKHR * pVideoSession ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession ); + } - void vkDestroyVideoSessionKHR( VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator ); - } + void vkDestroyVideoSessionKHR( VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator ); + } - VkResult vkGetVideoSessionMemoryRequirementsKHR( VkDevice device, - VkVideoSessionKHR videoSession, - uint32_t * pMemoryRequirementsCount, - VkVideoSessionMemoryRequirementsKHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetVideoSessionMemoryRequirementsKHR( device, videoSession, pMemoryRequirementsCount, pMemoryRequirements ); - } + VkResult vkGetVideoSessionMemoryRequirementsKHR( VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t * pMemoryRequirementsCount, + VkVideoSessionMemoryRequirementsKHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetVideoSessionMemoryRequirementsKHR( device, videoSession, pMemoryRequirementsCount, pMemoryRequirements ); + } - VkResult vkBindVideoSessionMemoryKHR( VkDevice device, - VkVideoSessionKHR videoSession, - uint32_t bindSessionMemoryInfoCount, - const VkBindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindVideoSessionMemoryKHR( device, videoSession, bindSessionMemoryInfoCount, pBindSessionMemoryInfos ); - } + VkResult vkBindVideoSessionMemoryKHR( VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t bindSessionMemoryInfoCount, + const VkBindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindVideoSessionMemoryKHR( device, videoSession, bindSessionMemoryInfoCount, pBindSessionMemoryInfos ); + } - VkResult vkCreateVideoSessionParametersKHR( VkDevice device, - const VkVideoSessionParametersCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkVideoSessionParametersKHR * pVideoSessionParameters ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters ); - } + VkResult vkCreateVideoSessionParametersKHR( VkDevice device, + const VkVideoSessionParametersCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkVideoSessionParametersKHR * pVideoSessionParameters ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters ); + } - VkResult vkUpdateVideoSessionParametersKHR( VkDevice device, - VkVideoSessionParametersKHR videoSessionParameters, - const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo ); - } + VkResult vkUpdateVideoSessionParametersKHR( VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo ); + } - void vkDestroyVideoSessionParametersKHR( VkDevice device, - VkVideoSessionParametersKHR videoSessionParameters, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator ); - } + void vkDestroyVideoSessionParametersKHR( VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator ); + } - void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo ); - } + void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo ); + } - void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo ); - } + void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo ); + } - void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo ); - } -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo ); + } -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_decode_queue === + //=== VK_KHR_video_decode_queue === - void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR * pFrameInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDecodeVideoKHR( commandBuffer, pFrameInfo ); - } -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR * pDecodeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecodeVideoKHR( commandBuffer, pDecodeInfo ); + } + + //=== VK_EXT_transform_feedback === - //=== VK_EXT_transform_feedback === + void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); + } - void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer * pBuffers, - const VkDeviceSize * pOffsets, - const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); - } + void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer * pCounterBuffers, + const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + } - void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, + void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer * pCounterBuffers, const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); - } + { + return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + } - void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, - uint32_t firstCounterBuffer, - uint32_t counterBufferCount, - const VkBuffer * pCounterBuffers, - const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); - } + void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index ); + } - void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index ); - } + void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index ); + } - void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index ); - } + void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, + uint32_t instanceCount, + uint32_t firstInstance, + VkBuffer counterBuffer, + VkDeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); + } - void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, - uint32_t instanceCount, - uint32_t firstInstance, - VkBuffer counterBuffer, - VkDeviceSize counterBufferOffset, - uint32_t counterOffset, - uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); - } + //=== VK_NVX_binary_import === - //=== VK_NVX_binary_import === + VkResult vkCreateCuModuleNVX( VkDevice device, + const VkCuModuleCreateInfoNVX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCuModuleNVX * pModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCuModuleNVX( device, pCreateInfo, pAllocator, pModule ); + } - VkResult vkCreateCuModuleNVX( VkDevice device, - const VkCuModuleCreateInfoNVX * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCuModuleNVX * pModule ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCuModuleNVX( device, pCreateInfo, pAllocator, pModule ); - } + VkResult vkCreateCuFunctionNVX( VkDevice device, + const VkCuFunctionCreateInfoNVX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCuFunctionNVX * pFunction ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction ); + } - VkResult vkCreateCuFunctionNVX( VkDevice device, - const VkCuFunctionCreateInfoNVX * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCuFunctionNVX * pFunction ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction ); - } + void vkDestroyCuModuleNVX( VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCuModuleNVX( device, module, pAllocator ); + } - void vkDestroyCuModuleNVX( VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCuModuleNVX( device, module, pAllocator ); - } + void vkDestroyCuFunctionNVX( VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCuFunctionNVX( device, function, pAllocator ); + } - void vkDestroyCuFunctionNVX( VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCuFunctionNVX( device, function, pAllocator ); - } + void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo ); + } - void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo ); - } + //=== VK_NVX_image_view_handle === - //=== VK_NVX_image_view_handle === + uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewHandleNVX( device, pInfo ); + } - uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageViewHandleNVX( device, pInfo ); - } + uint64_t vkGetImageViewHandle64NVX( VkDevice device, const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewHandle64NVX( device, pInfo ); + } - VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); - } + VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); + } - //=== VK_AMD_draw_indirect_count === + //=== VK_AMD_draw_indirect_count === - void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } + void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } - void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } + void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } - //=== VK_AMD_shader_info === + //=== VK_AMD_shader_info === - VkResult vkGetShaderInfoAMD( VkDevice device, - VkPipeline pipeline, - VkShaderStageFlagBits shaderStage, - VkShaderInfoTypeAMD infoType, - size_t * pInfoSize, - void * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); - } + VkResult vkGetShaderInfoAMD( VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); + } - //=== VK_KHR_dynamic_rendering === + //=== VK_KHR_dynamic_rendering === - void vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderingKHR( commandBuffer, pRenderingInfo ); - } + void vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderingKHR( commandBuffer, pRenderingInfo ); + } - void vkCmdEndRenderingKHR( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRenderingKHR( commandBuffer ); - } + void vkCmdEndRenderingKHR( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderingKHR( commandBuffer ); + } # if defined( VK_USE_PLATFORM_GGP ) - //=== VK_GGP_stream_descriptor_surface === + //=== VK_GGP_stream_descriptor_surface === - VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, - const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, + const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); + } # endif /*VK_USE_PLATFORM_GGP*/ - //=== VK_NV_external_memory_capabilities === + //=== VK_NV_external_memory_capabilities === - VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkImageTiling tiling, - VkImageUsageFlags usage, - VkImageCreateFlags flags, - VkExternalMemoryHandleTypeFlagsNV externalHandleType, - VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( - physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); - } + VkResult + vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); + } # if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_external_memory_win32 === + //=== VK_NV_external_memory_win32 === - VkResult vkGetMemoryWin32HandleNV( VkDevice device, - VkDeviceMemory memory, - VkExternalMemoryHandleTypeFlagsNV handleType, - HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle ); - } + VkResult vkGetMemoryWin32HandleNV( VkDevice device, + VkDeviceMemory memory, + VkExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle ); + } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_get_physical_device_properties2 === + //=== VK_KHR_get_physical_device_properties2 === - void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures ); - } + void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures ); + } - void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties ); - } + void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties ); + } - void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); - } + void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); + } - VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, - VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties ); - } + VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + } - void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, - uint32_t * pQueueFamilyPropertyCount, - VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); - } + void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } - void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties ); - } + void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties ); + } - void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, - uint32_t * pPropertyCount, - VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); - } + void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + } - //=== VK_KHR_device_group === + //=== VK_KHR_device_group === - void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, - uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); - } + void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + } - void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask ); - } + void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask ); + } - void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, - uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } + void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } # if defined( VK_USE_PLATFORM_VI_NN ) - //=== VK_NN_vi_surface === + //=== VK_NN_vi_surface === - VkResult vkCreateViSurfaceNN( VkInstance instance, - const VkViSurfaceCreateInfoNN * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateViSurfaceNN( VkInstance instance, + const VkViSurfaceCreateInfoNN * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); + } # endif /*VK_USE_PLATFORM_VI_NN*/ - //=== VK_KHR_maintenance1 === + //=== VK_KHR_maintenance1 === - void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkTrimCommandPoolKHR( device, commandPool, flags ); - } + void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTrimCommandPoolKHR( device, commandPool, flags ); + } - //=== VK_KHR_device_group_creation === + //=== VK_KHR_device_group_creation === - VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, - uint32_t * pPhysicalDeviceGroupCount, - VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); - } + VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + } - //=== VK_KHR_external_memory_capabilities === + //=== VK_KHR_external_memory_capabilities === - void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, - VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); - } + void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + } # if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_memory_win32 === + //=== VK_KHR_external_memory_win32 === - VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); - } + VkResult + vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } - VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - HANDLE handle, - VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties ); - } + VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties ); + } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_external_memory_fd === + //=== VK_KHR_external_memory_fd === - VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd ); - } + VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd ); + } - VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - int fd, - VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties ); - } + VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties ); + } - //=== VK_KHR_external_semaphore_capabilities === + //=== VK_KHR_external_semaphore_capabilities === - void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, - VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); - } + void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + } # if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_semaphore_win32 === + //=== VK_KHR_external_semaphore_win32 === - VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, - const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); - } + VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, + const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); + } - VkResult - vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); - } + VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, + const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_external_semaphore_fd === + //=== VK_KHR_external_semaphore_fd === - VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); - } + VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); + } - VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); - } + VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); + } - //=== VK_KHR_push_descriptor === + //=== VK_KHR_push_descriptor === - void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t set, - uint32_t descriptorWriteCount, - const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); - } + void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); + } - void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - VkPipelineLayout layout, - uint32_t set, - const void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); - } + void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); + } - //=== VK_EXT_conditional_rendering === + //=== VK_EXT_conditional_rendering === - void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, - const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); - } + void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, + const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); + } - void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndConditionalRenderingEXT( commandBuffer ); - } + void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndConditionalRenderingEXT( commandBuffer ); + } - //=== VK_KHR_descriptor_update_template === + //=== VK_KHR_descriptor_update_template === - VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, - const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); - } + VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + } - void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator ); - } + void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator ); + } - void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, - VkDescriptorSet descriptorSet, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); - } + void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); + } - //=== VK_NV_clip_space_w_scaling === + //=== VK_NV_clip_space_w_scaling === - void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkViewportWScalingNV * pViewportWScalings ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); - } + void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV * pViewportWScalings ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); + } - //=== VK_EXT_direct_mode_display === + //=== VK_EXT_direct_mode_display === - VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseDisplayEXT( physicalDevice, display ); - } + VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseDisplayEXT( physicalDevice, display ); + } # if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) - //=== VK_EXT_acquire_xlib_display === + //=== VK_EXT_acquire_xlib_display === - VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); - } + VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); + } - VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, RROutput rrOutput, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay ); - } + VkResult + vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, RROutput rrOutput, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay ); + } # endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - //=== VK_EXT_display_surface_counter === + //=== VK_EXT_display_surface_counter === - VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities ); - } + VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities ); + } - //=== VK_EXT_display_control === + //=== VK_EXT_display_control === - VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); - } + VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); + } - VkResult vkRegisterDeviceEventEXT( VkDevice device, - const VkDeviceEventInfoEXT * pDeviceEventInfo, - const VkAllocationCallbacks * pAllocator, - VkFence * pFence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); - } + VkResult vkRegisterDeviceEventEXT( VkDevice device, + const VkDeviceEventInfoEXT * pDeviceEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); + } - VkResult vkRegisterDisplayEventEXT( VkDevice device, - VkDisplayKHR display, - const VkDisplayEventInfoEXT * pDisplayEventInfo, - const VkAllocationCallbacks * pAllocator, - VkFence * pFence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); - } + VkResult vkRegisterDisplayEventEXT( VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT * pDisplayEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); + } - VkResult vkGetSwapchainCounterEXT( VkDevice device, - VkSwapchainKHR swapchain, - VkSurfaceCounterFlagBitsEXT counter, - uint64_t * pCounterValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); - } + VkResult vkGetSwapchainCounterEXT( VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); + } - //=== VK_GOOGLE_display_timing === + //=== VK_GOOGLE_display_timing === - VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, - VkSwapchainKHR swapchain, - VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); - } + VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); + } - VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, - VkSwapchainKHR swapchain, - uint32_t * pPresentationTimingCount, - VkPastPresentationTimingGOOGLE * pPresentationTimings ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings ); - } + VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VkPastPresentationTimingGOOGLE * pPresentationTimings ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings ); + } - //=== VK_EXT_discard_rectangles === + //=== VK_EXT_discard_rectangles === - void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, - uint32_t firstDiscardRectangle, - uint32_t discardRectangleCount, - const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); - } + void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); + } - //=== VK_EXT_hdr_metadata === + void vkCmdSetDiscardRectangleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleEnableEXT( commandBuffer, discardRectangleEnable ); + } - void vkSetHdrMetadataEXT( VkDevice device, - uint32_t swapchainCount, - const VkSwapchainKHR * pSwapchains, - const VkHdrMetadataEXT * pMetadata ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); - } + void vkCmdSetDiscardRectangleModeEXT( VkCommandBuffer commandBuffer, VkDiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleModeEXT( commandBuffer, discardRectangleMode ); + } - //=== VK_KHR_create_renderpass2 === + //=== VK_EXT_hdr_metadata === - VkResult vkCreateRenderPass2KHR( VkDevice device, - const VkRenderPassCreateInfo2 * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass ); - } + void vkSetHdrMetadataEXT( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainKHR * pSwapchains, + const VkHdrMetadataEXT * pMetadata ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); + } - void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo * pRenderPassBegin, - const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); - } + //=== VK_KHR_create_renderpass2 === - void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, - const VkSubpassBeginInfo * pSubpassBeginInfo, - const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); - } + VkResult vkCreateRenderPass2KHR( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass ); + } - void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo ); - } + void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); + } - //=== VK_KHR_shared_presentable_image === + void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + } - VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSwapchainStatusKHR( device, swapchain ); - } + void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo ); + } - //=== VK_KHR_external_fence_capabilities === + //=== VK_KHR_shared_presentable_image === - void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, - VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); - } + VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainStatusKHR( device, swapchain ); + } + + //=== VK_KHR_external_fence_capabilities === + + void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + } # if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_fence_win32 === + //=== VK_KHR_external_fence_win32 === - VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); - } + VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); + } - VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); - } + VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_external_fence_fd === + //=== VK_KHR_external_fence_fd === - VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); - } + VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); + } - VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd ); - } + VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd ); + } - //=== VK_KHR_performance_query === + //=== VK_KHR_performance_query === - VkResult - vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - uint32_t * pCounterCount, - VkPerformanceCounterKHR * pCounters, - VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( - physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); - } + VkResult + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VkPerformanceCounterKHR * pCounters, + VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); + } - void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, - const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, - uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses ); - } + void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, + const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses ); + } - VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireProfilingLockKHR( device, pInfo ); - } + VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireProfilingLockKHR( device, pInfo ); + } - void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseProfilingLockKHR( device ); - } + void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseProfilingLockKHR( device ); + } - //=== VK_KHR_get_surface_capabilities2 === + //=== VK_KHR_get_surface_capabilities2 === - VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities ); - } + VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities ); + } - VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - uint32_t * pSurfaceFormatCount, - VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); - } + VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); + } - //=== VK_KHR_get_display_properties2 === + //=== VK_KHR_get_display_properties2 === - VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties ); - } + VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties ); + } - VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkDisplayPlaneProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); - } + VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlaneProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); + } - VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - uint32_t * pPropertyCount, - VkDisplayModeProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); - } + VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModeProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); + } - VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, - const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo, - VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); - } + VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); + } # if defined( VK_USE_PLATFORM_IOS_MVK ) - //=== VK_MVK_ios_surface === + //=== VK_MVK_ios_surface === - VkResult vkCreateIOSSurfaceMVK( VkInstance instance, - const VkIOSSurfaceCreateInfoMVK * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateIOSSurfaceMVK( VkInstance instance, + const VkIOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } # endif /*VK_USE_PLATFORM_IOS_MVK*/ # if defined( VK_USE_PLATFORM_MACOS_MVK ) - //=== VK_MVK_macos_surface === + //=== VK_MVK_macos_surface === - VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, - const VkMacOSSurfaceCreateInfoMVK * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, + const VkMacOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } # endif /*VK_USE_PLATFORM_MACOS_MVK*/ - //=== VK_EXT_debug_utils === + //=== VK_EXT_debug_utils === - VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); - } + VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); + } - VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); - } + VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); + } - void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo ); - } + void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo ); + } - void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueEndDebugUtilsLabelEXT( queue ); - } + void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueEndDebugUtilsLabelEXT( queue ); + } - void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo ); - } + void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo ); + } - void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); - } + void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + } - void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer ); - } + void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer ); + } - void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); - } + void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + } - VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, - const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); - } + VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); + } - void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, - VkDebugUtilsMessengerEXT messenger, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); - } + void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); + } - void vkSubmitDebugUtilsMessageEXT( VkInstance instance, - VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, - VkDebugUtilsMessageTypeFlagsEXT messageTypes, - const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); - } + void vkSubmitDebugUtilsMessageEXT( VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); + } # if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_ANDROID_external_memory_android_hardware_buffer === + //=== VK_ANDROID_external_memory_android_hardware_buffer === - VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, - const struct AHardwareBuffer * buffer, - VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties ); - } + VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, + const struct AHardwareBuffer * buffer, + VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties ); + } - VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, - const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo, - struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer ); - } + VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, + const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer ); + } # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - //=== VK_EXT_sample_locations === +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + VkResult vkCreateExecutionGraphPipelinesAMDX( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateExecutionGraphPipelinesAMDX( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } - void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); - } + VkResult vkGetExecutionGraphPipelineScratchSizeAMDX( VkDevice device, + VkPipeline executionGraph, + VkExecutionGraphPipelineScratchSizeAMDX * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetExecutionGraphPipelineScratchSizeAMDX( device, executionGraph, pSizeInfo ); + } - void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, - VkSampleCountFlagBits samples, - VkMultisamplePropertiesEXT * pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties ); - } + VkResult vkGetExecutionGraphPipelineNodeIndexAMDX( VkDevice device, + VkPipeline executionGraph, + const VkPipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, + uint32_t * pNodeIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetExecutionGraphPipelineNodeIndexAMDX( device, executionGraph, pNodeInfo, pNodeIndex ); + } - //=== VK_KHR_get_memory_requirements2 === + void vkCmdInitializeGraphScratchMemoryAMDX( VkCommandBuffer commandBuffer, + VkPipeline executionGraph, + VkDeviceAddress scratch, + VkDeviceSize scratchSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdInitializeGraphScratchMemoryAMDX( commandBuffer, executionGraph, scratch, scratchSize ); + } - void vkGetImageMemoryRequirements2KHR( VkDevice device, - const VkImageMemoryRequirementsInfo2 * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); - } + void vkCmdDispatchGraphAMDX( VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphAMDX( commandBuffer, scratch, scratchSize, pCountInfo ); + } - void vkGetBufferMemoryRequirements2KHR( VkDevice device, - const VkBufferMemoryRequirementsInfo2 * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); - } + void vkCmdDispatchGraphIndirectAMDX( VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphIndirectAMDX( commandBuffer, scratch, scratchSize, pCountInfo ); + } - void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, - const VkImageSparseMemoryRequirementsInfo2 * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } + void vkCmdDispatchGraphIndirectCountAMDX( VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + VkDeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphIndirectCountAMDX( commandBuffer, scratch, scratchSize, countInfo ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_KHR_acceleration_structure === + //=== VK_EXT_sample_locations === - VkResult vkCreateAccelerationStructureKHR( VkDevice device, - const VkAccelerationStructureCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure ); - } + void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); + } - void vkDestroyAccelerationStructureKHR( VkDevice device, - VkAccelerationStructureKHR accelerationStructure, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator ); - } + void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT * pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties ); + } - void vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer, - uint32_t infoCount, - const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, - const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos ); - } + //=== VK_KHR_get_memory_requirements2 === - void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, - uint32_t infoCount, - const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, - const VkDeviceAddress * pIndirectDeviceAddresses, - const uint32_t * pIndirectStrides, - const uint32_t * const * ppMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBuildAccelerationStructuresIndirectKHR( - commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts ); - } + void vkGetImageMemoryRequirements2KHR( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + } - VkResult vkBuildAccelerationStructuresKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - uint32_t infoCount, - const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, - const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos ); - } + void vkGetBufferMemoryRequirements2KHR( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + } - VkResult vkCopyAccelerationStructureKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo ); - } + void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } - VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo ); - } + //=== VK_KHR_acceleration_structure === - VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo ); - } + VkResult vkCreateAccelerationStructureKHR( VkDevice device, + const VkAccelerationStructureCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure ); + } - VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, - uint32_t accelerationStructureCount, - const VkAccelerationStructureKHR * pAccelerationStructures, - VkQueryType queryType, - size_t dataSize, - void * pData, - size_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); - } + void vkDestroyAccelerationStructureKHR( VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator ); + } - void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo ); - } + void vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos ); + } - void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, - const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo ); - } + void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkDeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructuresIndirectKHR( + commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts ); + } - void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, - const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo ); - } + VkResult vkBuildAccelerationStructuresKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos ); + } - VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, - const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo ); - } + VkResult vkCopyAccelerationStructureKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo ); + } - void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, - uint32_t accelerationStructureCount, - const VkAccelerationStructureKHR * pAccelerationStructures, - VkQueryType queryType, - VkQueryPool queryPool, - uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteAccelerationStructuresPropertiesKHR( - commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); - } + VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo ); + } - void vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, - const VkAccelerationStructureVersionInfoKHR * pVersionInfo, - VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility ); - } + VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo ); + } - void vkGetAccelerationStructureBuildSizesKHR( VkDevice device, - VkAccelerationStructureBuildTypeKHR buildType, - const VkAccelerationStructureBuildGeometryInfoKHR * pBuildInfo, - const uint32_t * pMaxPrimitiveCounts, - VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo ); - } + VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void * pData, + size_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); + } - //=== VK_KHR_sampler_ycbcr_conversion === + void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo ); + } - VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, - const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion ); - } + void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo ); + } - void vkDestroySamplerYcbcrConversionKHR( VkDevice device, - VkSamplerYcbcrConversion ycbcrConversion, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator ); - } + void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo ); + } - //=== VK_KHR_bind_memory2 === + VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, + const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo ); + } - VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); - } + void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesKHR( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } - VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); - } + void vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, + const VkAccelerationStructureVersionInfoKHR * pVersionInfo, + VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility ); + } - //=== VK_EXT_image_drm_format_modifier === + void vkGetAccelerationStructureBuildSizesKHR( VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkAccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo ); + } - VkResult - vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); - } + //=== VK_KHR_ray_tracing_pipeline === - //=== VK_EXT_validation_cache === + void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysKHR( + commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth ); + } - VkResult vkCreateValidationCacheEXT( VkDevice device, - const VkValidationCacheCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); - } + VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesKHR( device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } - void - vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator ); - } + VkResult vkGetRayTracingShaderGroupHandlesKHR( + VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } - VkResult vkMergeValidationCachesEXT( VkDevice device, - VkValidationCacheEXT dstCache, - uint32_t srcCacheCount, - const VkValidationCacheEXT * pSrcCaches ) const VULKAN_HPP_NOEXCEPT - { - return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); - } + VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } - VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); - } + void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysIndirectKHR( + commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress ); + } - //=== VK_NV_shading_rate_image === + VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, + VkPipeline pipeline, + uint32_t group, + VkShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader ); + } - void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); - } + void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize ); + } - void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkShadingRatePaletteNV * pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); - } + //=== VK_KHR_sampler_ycbcr_conversion === - void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, - VkCoarseSampleOrderTypeNV sampleOrderType, - uint32_t customSampleOrderCount, - const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); - } + VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion ); + } - //=== VK_NV_ray_tracing === + void vkDestroySamplerYcbcrConversionKHR( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator ); + } - VkResult vkCreateAccelerationStructureNV( VkDevice device, - const VkAccelerationStructureCreateInfoNV * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); - } + //=== VK_KHR_bind_memory2 === - void vkDestroyAccelerationStructureNV( VkDevice device, - VkAccelerationStructureNV accelerationStructure, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); - } + VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); + } - void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, - const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo, - VkMemoryRequirements2KHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); - } + VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); + } - VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, - uint32_t bindInfoCount, - const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); - } + //=== VK_EXT_image_drm_format_modifier === - void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, - const VkAccelerationStructureInfoNV * pInfo, - VkBuffer instanceData, - VkDeviceSize instanceOffset, - VkBool32 update, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, - VkBuffer scratch, - VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); - } + VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, + VkImage image, + VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); + } - void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, - VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); - } + //=== VK_EXT_validation_cache === - void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, - VkBuffer raygenShaderBindingTableBuffer, - VkDeviceSize raygenShaderBindingOffset, - VkBuffer missShaderBindingTableBuffer, - VkDeviceSize missShaderBindingOffset, - VkDeviceSize missShaderBindingStride, - VkBuffer hitShaderBindingTableBuffer, - VkDeviceSize hitShaderBindingOffset, - VkDeviceSize hitShaderBindingStride, - VkBuffer callableShaderBindingTableBuffer, - VkDeviceSize callableShaderBindingOffset, - VkDeviceSize callableShaderBindingStride, - uint32_t width, - uint32_t height, - uint32_t depth ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysNV( commandBuffer, - raygenShaderBindingTableBuffer, - raygenShaderBindingOffset, - missShaderBindingTableBuffer, - missShaderBindingOffset, - missShaderBindingStride, - hitShaderBindingTableBuffer, - hitShaderBindingOffset, - hitShaderBindingStride, - callableShaderBindingTableBuffer, - callableShaderBindingOffset, - callableShaderBindingStride, - width, - height, - depth ); - } + VkResult vkCreateValidationCacheEXT( VkDevice device, + const VkValidationCacheCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); + } - VkResult vkCreateRayTracingPipelinesNV( VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkRayTracingPipelineCreateInfoNV * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } + void + vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator ); + } - VkResult vkGetRayTracingShaderGroupHandlesNV( - VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); - } + VkResult vkMergeValidationCachesEXT( VkDevice device, + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT * pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); + } - VkResult vkGetAccelerationStructureHandleNV( VkDevice device, - VkAccelerationStructureNV accelerationStructure, - size_t dataSize, - void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); - } + VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); + } - void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, - uint32_t accelerationStructureCount, - const VkAccelerationStructureNV * pAccelerationStructures, - VkQueryType queryType, - VkQueryPool queryPool, - uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteAccelerationStructuresPropertiesNV( - commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); - } + //=== VK_NV_shading_rate_image === - VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCompileDeferredNV( device, pipeline, shader ); - } + void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); + } - //=== VK_KHR_maintenance3 === + void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkShadingRatePaletteNV * pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); + } - void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, - const VkDescriptorSetLayoutCreateInfo * pCreateInfo, - VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); - } + void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, + VkCoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); + } - //=== VK_KHR_draw_indirect_count === + //=== VK_NV_ray_tracing === - void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } + VkResult vkCreateAccelerationStructureNV( VkDevice device, + const VkAccelerationStructureCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); + } - void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } + void vkDestroyAccelerationStructureNV( VkDevice device, + VkAccelerationStructureNV accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); + } - //=== VK_EXT_external_memory_host === + void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2KHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } - VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - const void * pHostPointer, - VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties ); - } + VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); + } - //=== VK_AMD_buffer_marker === + void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, + const VkAccelerationStructureInfoNV * pInfo, + VkBuffer instanceData, + VkDeviceSize instanceOffset, + VkBool32 update, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkBuffer scratch, + VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); + } - void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, - VkPipelineStageFlagBits pipelineStage, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - uint32_t marker ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); - } + void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); + } - //=== VK_EXT_calibrated_timestamps === + void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, + VkBuffer raygenShaderBindingTableBuffer, + VkDeviceSize raygenShaderBindingOffset, + VkBuffer missShaderBindingTableBuffer, + VkDeviceSize missShaderBindingOffset, + VkDeviceSize missShaderBindingStride, + VkBuffer hitShaderBindingTableBuffer, + VkDeviceSize hitShaderBindingOffset, + VkDeviceSize hitShaderBindingStride, + VkBuffer callableShaderBindingTableBuffer, + VkDeviceSize callableShaderBindingOffset, + VkDeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysNV( commandBuffer, + raygenShaderBindingTableBuffer, + raygenShaderBindingOffset, + missShaderBindingTableBuffer, + missShaderBindingOffset, + missShaderBindingStride, + hitShaderBindingTableBuffer, + hitShaderBindingOffset, + hitShaderBindingStride, + callableShaderBindingTableBuffer, + callableShaderBindingOffset, + callableShaderBindingStride, + width, + height, + depth ); + } - VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, - uint32_t * pTimeDomainCount, - VkTimeDomainEXT * pTimeDomains ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); - } + VkResult vkCreateRayTracingPipelinesNV( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoNV * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } - VkResult vkGetCalibratedTimestampsEXT( VkDevice device, - uint32_t timestampCount, - const VkCalibratedTimestampInfoEXT * pTimestampInfos, - uint64_t * pTimestamps, - uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); - } + VkResult vkGetRayTracingShaderGroupHandlesNV( + VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } - //=== VK_NV_mesh_shader === + VkResult vkGetAccelerationStructureHandleNV( VkDevice device, + VkAccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); + } - void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask ); - } + void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureNV * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesNV( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } - void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride ); - } + VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCompileDeferredNV( device, pipeline, shader ); + } - void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } + //=== VK_KHR_maintenance3 === - //=== VK_NV_scissor_exclusive === + void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); + } - void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, - uint32_t firstExclusiveScissor, - uint32_t exclusiveScissorCount, - const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); - } + //=== VK_KHR_draw_indirect_count === - //=== VK_NV_device_diagnostic_checkpoints === + void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } - void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker ); - } + void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } - void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); - } + //=== VK_EXT_external_memory_host === - //=== VK_KHR_timeline_semaphore === + VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties ); + } - VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue ); - } + //=== VK_AMD_buffer_marker === - VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); - } + void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); + } - VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSignalSemaphoreKHR( device, pSignalInfo ); - } + void vkCmdWriteBufferMarker2AMD( + VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker ); + } - //=== VK_INTEL_performance_query === + //=== VK_EXT_calibrated_timestamps === - VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); - } + VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, + uint32_t * pTimeDomainCount, + VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); + } - void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUninitializePerformanceApiINTEL( device ); - } + VkResult vkGetCalibratedTimestampsEXT( VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); + } - VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo ); - } + //=== VK_NV_mesh_shader === - VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, - const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); - } + void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask ); + } - VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo ); - } + void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride ); + } - VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, - const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, - VkPerformanceConfigurationINTEL * pConfiguration ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); - } + void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } - VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); - } + //=== VK_NV_scissor_exclusive === - VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration ); - } + void vkCmdSetExclusiveScissorEnableNV( VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkBool32 * pExclusiveScissorEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExclusiveScissorEnableNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissorEnables ); + } - VkResult - vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); - } + void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); + } - //=== VK_AMD_display_native_hdr === + //=== VK_NV_device_diagnostic_checkpoints === - void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); - } + void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker ); + } -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_imagepipe_surface === + void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); + } - VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, - const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); - } -# endif /*VK_USE_PLATFORM_FUCHSIA*/ + void vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData ); + } -# if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_surface === + //=== VK_KHR_timeline_semaphore === - VkResult vkCreateMetalSurfaceEXT( VkInstance instance, - const VkMetalSurfaceCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); - } -# endif /*VK_USE_PLATFORM_METAL_EXT*/ + VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue ); + } - //=== VK_KHR_fragment_shading_rate === + VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); + } - VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice, - uint32_t * pFragmentShadingRateCount, - VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates ); - } + VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSignalSemaphoreKHR( device, pSignalInfo ); + } - void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, - const VkExtent2D * pFragmentSize, - const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps ); - } + //=== VK_INTEL_performance_query === - //=== VK_EXT_buffer_device_address === + VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); + } - VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferDeviceAddressEXT( device, pInfo ); - } + void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUninitializePerformanceApiINTEL( device ); + } - //=== VK_EXT_tooling_info === + VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo ); + } - VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, - uint32_t * pToolCount, - VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties ); - } + VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, + const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); + } - //=== VK_KHR_present_wait === + VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo ); + } - VkResult vkWaitForPresentKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitForPresentKHR( device, swapchain, presentId, timeout ); - } + VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, + const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VkPerformanceConfigurationINTEL * pConfiguration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); + } - //=== VK_NV_cooperative_matrix === + VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); + } - VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkCooperativeMatrixPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); - } + VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration ); + } - //=== VK_NV_coverage_reduction_mode === + VkResult + vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); + } - VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - VkPhysicalDevice physicalDevice, uint32_t * pCombinationCount, VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations ); - } + //=== VK_AMD_display_native_hdr === -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === + void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); + } - VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - uint32_t * pPresentModeCount, - VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); - } +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === - VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); - } + VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, + const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ - VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); - } +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === - VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, - const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes ); - } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + VkResult vkCreateMetalSurfaceEXT( VkInstance instance, + const VkMetalSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_EXT_headless_surface === + //=== VK_KHR_fragment_shading_rate === - VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, - const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pFragmentShadingRateCount, + VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates ); + } - //=== VK_KHR_buffer_device_address === + void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, + const VkExtent2D * pFragmentSize, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps ); + } - VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferDeviceAddressKHR( device, pInfo ); - } + //=== VK_KHR_dynamic_rendering_local_read === - uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo ); - } + void vkCmdSetRenderingAttachmentLocationsKHR( VkCommandBuffer commandBuffer, + const VkRenderingAttachmentLocationInfo * pLocationInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRenderingAttachmentLocationsKHR( commandBuffer, pLocationInfo ); + } - uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo ); - } + void vkCmdSetRenderingInputAttachmentIndicesKHR( VkCommandBuffer commandBuffer, + const VkRenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRenderingInputAttachmentIndicesKHR( commandBuffer, pInputAttachmentIndexInfo ); + } - //=== VK_EXT_line_rasterization === + //=== VK_EXT_buffer_device_address === - void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); - } + VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddressEXT( device, pInfo ); + } - //=== VK_EXT_host_query_reset === + //=== VK_EXT_tooling_info === - void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); - } + VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, + uint32_t * pToolCount, + VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties ); + } - //=== VK_EXT_extended_dynamic_state === + //=== VK_KHR_present_wait === - void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCullModeEXT( commandBuffer, cullMode ); - } + VkResult vkWaitForPresentKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitForPresentKHR( device, swapchain, presentId, timeout ); + } - void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace ); - } + //=== VK_NV_cooperative_matrix === - void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology ); - } + VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkCooperativeMatrixPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); + } - void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports ); - } + //=== VK_NV_coverage_reduction_mode === - void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors ); - } + VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + VkPhysicalDevice physicalDevice, uint32_t * pCombinationCount, VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations ); + } - void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer * pBuffers, - const VkDeviceSize * pOffsets, - const VkDeviceSize * pSizes, - const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); - } +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === - void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable ); - } + VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); + } - void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable ); - } + VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); + } - void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp ); - } + VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); + } - void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable ); - } + VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable ); - } + //=== VK_EXT_headless_surface === - void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - VkStencilOp failOp, - VkStencilOp passOp, - VkStencilOp depthFailOp, - VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); - } + VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, + const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } - //=== VK_KHR_deferred_host_operations === + //=== VK_KHR_buffer_device_address === - VkResult vkCreateDeferredOperationKHR( VkDevice device, - const VkAllocationCallbacks * pAllocator, - VkDeferredOperationKHR * pDeferredOperation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation ); - } + VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddressKHR( device, pInfo ); + } - void vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator ); - } + uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo ); + } - uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation ); - } + uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo ); + } - VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeferredOperationResultKHR( device, operation ); - } + //=== VK_EXT_line_rasterization === - VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDeferredOperationJoinKHR( device, operation ); - } + void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); + } - //=== VK_KHR_pipeline_executable_properties === + //=== VK_EXT_host_query_reset === - VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, - const VkPipelineInfoKHR * pPipelineInfo, - uint32_t * pExecutableCount, - VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); - } + void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); + } - VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, - const VkPipelineExecutableInfoKHR * pExecutableInfo, - uint32_t * pStatisticCount, - VkPipelineExecutableStatisticKHR * pStatistics ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); - } + //=== VK_EXT_extended_dynamic_state === - VkResult - vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, - const VkPipelineExecutableInfoKHR * pExecutableInfo, - uint32_t * pInternalRepresentationCount, - VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); - } + void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCullModeEXT( commandBuffer, cullMode ); + } - //=== VK_NV_device_generated_commands === + void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace ); + } - void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, - const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); - } + void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology ); + } - void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo ); - } + void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports ); + } - void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, - VkBool32 isPreprocessed, - const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); - } + void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors ); + } - void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipeline pipeline, - uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex ); - } + void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes, + const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); + } - VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, - const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); - } + void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable ); + } - void vkDestroyIndirectCommandsLayoutNV( VkDevice device, - VkIndirectCommandsLayoutNV indirectCommandsLayout, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator ); - } + void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable ); + } - //=== VK_EXT_acquire_drm_display === + void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp ); + } - VkResult vkAcquireDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireDrmDisplayEXT( physicalDevice, drmFd, display ); - } + void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable ); + } - VkResult vkGetDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR * display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDrmDisplayEXT( physicalDevice, drmFd, connectorId, display ); - } + void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable ); + } - //=== VK_EXT_private_data === + void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); + } - VkResult vkCreatePrivateDataSlotEXT( VkDevice device, - const VkPrivateDataSlotCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot ); - } + //=== VK_KHR_deferred_host_operations === - void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator ); - } + VkResult vkCreateDeferredOperationKHR( VkDevice device, + const VkAllocationCallbacks * pAllocator, + VkDeferredOperationKHR * pDeferredOperation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation ); + } - VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data ); - } + void + vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator ); + } - void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData ); - } + uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation ); + } -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_encode_queue === + VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationResultKHR( device, operation ); + } - void vkCmdEncodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR * pEncodeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEncodeVideoKHR( commandBuffer, pEncodeInfo ); - } -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeferredOperationJoinKHR( device, operation ); + } -# if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === + //=== VK_KHR_pipeline_executable_properties === - void vkExportMetalObjectsEXT( VkDevice device, VkExportMetalObjectsInfoEXT * pMetalObjectsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkExportMetalObjectsEXT( device, pMetalObjectsInfo ); - } -# endif /*VK_USE_PLATFORM_METAL_EXT*/ + VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, + const VkPipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); + } - //=== VK_KHR_synchronization2 === + VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VkPipelineExecutableStatisticKHR * pStatistics ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); + } - void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo ); - } + VkResult + vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); + } - void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask ); - } + //=== VK_EXT_host_image_copy === - void vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent * pEvents, - const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos ); - } + VkResult vkCopyMemoryToImageEXT( VkDevice device, const VkCopyMemoryToImageInfo * pCopyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToImageEXT( device, pCopyMemoryToImageInfo ); + } - void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo ); - } + VkResult vkCopyImageToMemoryEXT( VkDevice device, const VkCopyImageToMemoryInfo * pCopyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToMemoryEXT( device, pCopyImageToMemoryInfo ); + } - void vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query ); - } + VkResult vkCopyImageToImageEXT( VkDevice device, const VkCopyImageToImageInfo * pCopyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToImageEXT( device, pCopyImageToImageInfo ); + } - VkResult vkQueueSubmit2KHR( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence ); - } + VkResult + vkTransitionImageLayoutEXT( VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfo * pTransitions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTransitionImageLayoutEXT( device, transitionCount, pTransitions ); + } - void vkCmdWriteBufferMarker2AMD( - VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker ); - } + void vkGetImageSubresourceLayout2EXT( VkDevice device, + VkImage image, + const VkImageSubresource2 * pSubresource, + VkSubresourceLayout2 * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout ); + } - void vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData ); - } + //=== VK_KHR_map_memory2 === - //=== VK_NV_fragment_shading_rate_enums === - - void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer commandBuffer, - VkFragmentShadingRateNV shadingRate, - const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps ); - } - - //=== VK_EXT_mesh_shader === + VkResult vkMapMemory2KHR( VkDevice device, const VkMemoryMapInfo * pMemoryMapInfo, void ** ppData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMapMemory2KHR( device, pMemoryMapInfo, ppData ); + } - void vkCmdDrawMeshTasksEXT( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksEXT( commandBuffer, groupCountX, groupCountY, groupCountZ ); - } + VkResult vkUnmapMemory2KHR( VkDevice device, const VkMemoryUnmapInfo * pMemoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnmapMemory2KHR( device, pMemoryUnmapInfo ); + } - void vkCmdDrawMeshTasksIndirectEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectEXT( commandBuffer, buffer, offset, drawCount, stride ); - } + //=== VK_EXT_swapchain_maintenance1 === - void vkCmdDrawMeshTasksIndirectCountEXT( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectCountEXT( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } + VkResult vkReleaseSwapchainImagesEXT( VkDevice device, const VkReleaseSwapchainImagesInfoEXT * pReleaseInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseSwapchainImagesEXT( device, pReleaseInfo ); + } - //=== VK_KHR_copy_commands2 === + //=== VK_NV_device_generated_commands === - void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo ); - } + void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } - void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo ); - } + void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo ); + } - void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo ); - } + void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); + } - void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo ); - } + void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex ); + } - void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo ); - } + VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); + } - void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo ); - } + void vkDestroyIndirectCommandsLayoutNV( VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator ); + } - //=== VK_EXT_image_compression_control === + //=== VK_EXT_depth_bias_control === - void vkGetImageSubresourceLayout2EXT( VkDevice device, - VkImage image, - const VkImageSubresource2EXT * pSubresource, - VkSubresourceLayout2EXT * pLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout ); - } + void vkCmdSetDepthBias2EXT( VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT * pDepthBiasInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBias2EXT( commandBuffer, pDepthBiasInfo ); + } -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_acquire_winrt_display === + //=== VK_EXT_acquire_drm_display === - VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireWinrtDisplayNV( physicalDevice, display ); - } + VkResult vkAcquireDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireDrmDisplayEXT( physicalDevice, drmFd, display ); + } - VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay ); - } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + VkResult vkGetDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR * display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDrmDisplayEXT( physicalDevice, drmFd, connectorId, display ); + } -# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) - //=== VK_EXT_directfb_surface === + //=== VK_EXT_private_data === - VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, - const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreatePrivateDataSlotEXT( VkDevice device, + const VkPrivateDataSlotCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot ); + } - VkBool32 - vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB * dfb ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb ); - } -# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator ); + } - //=== VK_KHR_ray_tracing_pipeline === + VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data ); + } - void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, - const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, - uint32_t width, - uint32_t height, - uint32_t depth ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysKHR( - commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth ); - } + void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData ); + } - VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkRayTracingPipelineCreateInfoKHR * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRayTracingPipelinesKHR( device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } + //=== VK_KHR_video_encode_queue === - VkResult vkGetRayTracingShaderGroupHandlesKHR( - VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); - } + VkResult + vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, + VkVideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( physicalDevice, pQualityLevelInfo, pQualityLevelProperties ); + } - VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( - VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); - } + VkResult vkGetEncodedVideoSessionParametersKHR( VkDevice device, + const VkVideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, + VkVideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, + size_t * pDataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetEncodedVideoSessionParametersKHR( device, pVideoSessionParametersInfo, pFeedbackInfo, pDataSize, pData ); + } - void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, - const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, - VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysIndirectKHR( - commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress ); - } + void vkCmdEncodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR * pEncodeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEncodeVideoKHR( commandBuffer, pEncodeInfo ); + } - VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, - VkPipeline pipeline, - uint32_t group, - VkShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader ); - } +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === - void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize ); - } + VkResult vkCreateCudaModuleNV( VkDevice device, + const VkCudaModuleCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCudaModuleNV * pModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCudaModuleNV( device, pCreateInfo, pAllocator, pModule ); + } - //=== VK_EXT_vertex_input_dynamic_state === + VkResult vkGetCudaModuleCacheNV( VkDevice device, VkCudaModuleNV module, size_t * pCacheSize, void * pCacheData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCudaModuleCacheNV( device, module, pCacheSize, pCacheData ); + } - void vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, - uint32_t vertexBindingDescriptionCount, - const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions, - uint32_t vertexAttributeDescriptionCount, - const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetVertexInputEXT( - commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions ); - } + VkResult vkCreateCudaFunctionNV( VkDevice device, + const VkCudaFunctionCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCudaFunctionNV * pFunction ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCudaFunctionNV( device, pCreateInfo, pAllocator, pFunction ); + } -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_memory === + void vkDestroyCudaModuleNV( VkDevice device, VkCudaModuleNV module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCudaModuleNV( device, module, pAllocator ); + } - VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice device, - const VkMemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, - zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); - } + void vkDestroyCudaFunctionNV( VkDevice device, VkCudaFunctionNV function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCudaFunctionNV( device, function, pAllocator ); + } - VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA( VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - zx_handle_t zirconHandle, - VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryZirconHandlePropertiesFUCHSIA( device, handleType, zirconHandle, pMemoryZirconHandleProperties ); - } -# endif /*VK_USE_PLATFORM_FUCHSIA*/ + void vkCmdCudaLaunchKernelNV( VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCudaLaunchKernelNV( commandBuffer, pLaunchInfo ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_semaphore === +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === - VkResult vkImportSemaphoreZirconHandleFUCHSIA( VkDevice device, - const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportSemaphoreZirconHandleFUCHSIA( device, pImportSemaphoreZirconHandleInfo ); - } + void vkExportMetalObjectsEXT( VkDevice device, VkExportMetalObjectsInfoEXT * pMetalObjectsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkExportMetalObjectsEXT( device, pMetalObjectsInfo ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ - VkResult vkGetSemaphoreZirconHandleFUCHSIA( VkDevice device, - const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, - zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); - } -# endif /*VK_USE_PLATFORM_FUCHSIA*/ + //=== VK_KHR_synchronization2 === -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === + void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo ); + } - VkResult vkCreateBufferCollectionFUCHSIA( VkDevice device, - const VkBufferCollectionCreateInfoFUCHSIA * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkBufferCollectionFUCHSIA * pCollection ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateBufferCollectionFUCHSIA( device, pCreateInfo, pAllocator, pCollection ); - } + void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask ); + } - VkResult vkSetBufferCollectionImageConstraintsFUCHSIA( VkDevice device, - VkBufferCollectionFUCHSIA collection, - const VkImageConstraintsInfoFUCHSIA * pImageConstraintsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetBufferCollectionImageConstraintsFUCHSIA( device, collection, pImageConstraintsInfo ); - } + void vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos ); + } - VkResult vkSetBufferCollectionBufferConstraintsFUCHSIA( VkDevice device, - VkBufferCollectionFUCHSIA collection, - const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetBufferCollectionBufferConstraintsFUCHSIA( device, collection, pBufferConstraintsInfo ); - } + void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo ); + } - void vkDestroyBufferCollectionFUCHSIA( VkDevice device, - VkBufferCollectionFUCHSIA collection, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyBufferCollectionFUCHSIA( device, collection, pAllocator ); - } + void + vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query ); + } - VkResult vkGetBufferCollectionPropertiesFUCHSIA( VkDevice device, - VkBufferCollectionFUCHSIA collection, - VkBufferCollectionPropertiesFUCHSIA * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferCollectionPropertiesFUCHSIA( device, collection, pProperties ); - } -# endif /*VK_USE_PLATFORM_FUCHSIA*/ + VkResult vkQueueSubmit2KHR( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence ); + } - //=== VK_HUAWEI_subpass_shading === + //=== VK_EXT_descriptor_buffer === - VkResult - vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( VkDevice device, VkRenderPass renderpass, VkExtent2D * pMaxWorkgroupSize ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( device, renderpass, pMaxWorkgroupSize ); - } + void vkGetDescriptorSetLayoutSizeEXT( VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize * pLayoutSizeInBytes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSizeEXT( device, layout, pLayoutSizeInBytes ); + } - void vkCmdSubpassShadingHUAWEI( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSubpassShadingHUAWEI( commandBuffer ); - } + void vkGetDescriptorSetLayoutBindingOffsetEXT( VkDevice device, + VkDescriptorSetLayout layout, + uint32_t binding, + VkDeviceSize * pOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutBindingOffsetEXT( device, layout, binding, pOffset ); + } - //=== VK_HUAWEI_invocation_mask === + void vkGetDescriptorEXT( VkDevice device, const VkDescriptorGetInfoEXT * pDescriptorInfo, size_t dataSize, void * pDescriptor ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorEXT( device, pDescriptorInfo, dataSize, pDescriptor ); + } - void vkCmdBindInvocationMaskHUAWEI( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindInvocationMaskHUAWEI( commandBuffer, imageView, imageLayout ); - } + void vkCmdBindDescriptorBuffersEXT( VkCommandBuffer commandBuffer, + uint32_t bufferCount, + const VkDescriptorBufferBindingInfoEXT * pBindingInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorBuffersEXT( commandBuffer, bufferCount, pBindingInfos ); + } - //=== VK_NV_external_memory_rdma === + void vkCmdSetDescriptorBufferOffsetsEXT( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t setCount, + const uint32_t * pBufferIndices, + const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDescriptorBufferOffsetsEXT( commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pBufferIndices, pOffsets ); + } - VkResult vkGetMemoryRemoteAddressNV( VkDevice device, - const VkMemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, - VkRemoteAddressNV * pAddress ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryRemoteAddressNV( device, pMemoryGetRemoteAddressInfo, pAddress ); - } + void vkCmdBindDescriptorBufferEmbeddedSamplersEXT( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorBufferEmbeddedSamplersEXT( commandBuffer, pipelineBindPoint, layout, set ); + } - //=== VK_EXT_pipeline_properties === + VkResult + vkGetBufferOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } - VkResult - vkGetPipelinePropertiesEXT( VkDevice device, const VkPipelineInfoEXT * pPipelineInfo, VkBaseOutStructure * pPipelineProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelinePropertiesEXT( device, pPipelineInfo, pPipelineProperties ); - } + VkResult + vkGetImageOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } - //=== VK_EXT_extended_dynamic_state2 === + VkResult vkGetImageViewOpaqueCaptureDescriptorDataEXT( VkDevice device, + const VkImageViewCaptureDescriptorDataInfoEXT * pInfo, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } - void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPatchControlPointsEXT( commandBuffer, patchControlPoints ); - } + VkResult vkGetSamplerOpaqueCaptureDescriptorDataEXT( VkDevice device, + const VkSamplerCaptureDescriptorDataInfoEXT * pInfo, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSamplerOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } - void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRasterizerDiscardEnableEXT( commandBuffer, rasterizerDiscardEnable ); - } + VkResult vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( VkDevice device, + const VkAccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } - void vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBiasEnableEXT( commandBuffer, depthBiasEnable ); - } + //=== VK_NV_fragment_shading_rate_enums === - void vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, VkLogicOp logicOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLogicOpEXT( commandBuffer, logicOp ); - } + void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer commandBuffer, + VkFragmentShadingRateNV shadingRate, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps ); + } - void vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPrimitiveRestartEnableEXT( commandBuffer, primitiveRestartEnable ); - } + //=== VK_EXT_mesh_shader === -# if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_screen_surface === + void vkCmdDrawMeshTasksEXT( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksEXT( commandBuffer, groupCountX, groupCountY, groupCountZ ); + } - VkResult vkCreateScreenSurfaceQNX( VkInstance instance, - const VkScreenSurfaceCreateInfoQNX * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateScreenSurfaceQNX( instance, pCreateInfo, pAllocator, pSurface ); - } + void vkCmdDrawMeshTasksIndirectEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectEXT( commandBuffer, buffer, offset, drawCount, stride ); + } - VkBool32 vkGetPhysicalDeviceScreenPresentationSupportQNX( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - struct _screen_window * window ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceScreenPresentationSupportQNX( physicalDevice, queueFamilyIndex, window ); - } -# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + void vkCmdDrawMeshTasksIndirectCountEXT( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectCountEXT( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } - //=== VK_EXT_color_write_enable === + //=== VK_KHR_copy_commands2 === - void vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetColorWriteEnableEXT( commandBuffer, attachmentCount, pColorWriteEnables ); - } + void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo ); + } - //=== VK_KHR_ray_tracing_maintenance1 === + void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo ); + } - void vkCmdTraceRaysIndirect2KHR( VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysIndirect2KHR( commandBuffer, indirectDeviceAddress ); - } + void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo ); + } - //=== VK_EXT_multi_draw === + void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo ); + } - void vkCmdDrawMultiEXT( VkCommandBuffer commandBuffer, - uint32_t drawCount, - const VkMultiDrawInfoEXT * pVertexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMultiEXT( commandBuffer, drawCount, pVertexInfo, instanceCount, firstInstance, stride ); - } + void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo ); + } - void vkCmdDrawMultiIndexedEXT( VkCommandBuffer commandBuffer, - uint32_t drawCount, - const VkMultiDrawIndexedInfoEXT * pIndexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - uint32_t stride, - const int32_t * pVertexOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMultiIndexedEXT( commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset ); - } + void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo ); + } - //=== VK_EXT_pageable_device_local_memory === + //=== VK_EXT_device_fault === - void vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, float priority ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetDeviceMemoryPriorityEXT( device, memory, priority ); - } + VkResult vkGetDeviceFaultInfoEXT( VkDevice device, VkDeviceFaultCountsEXT * pFaultCounts, VkDeviceFaultInfoEXT * pFaultInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceFaultInfoEXT( device, pFaultCounts, pFaultInfo ); + } - //=== VK_KHR_maintenance4 === +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === - void vkGetDeviceBufferMemoryRequirementsKHR( VkDevice device, - const VkDeviceBufferMemoryRequirements * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceBufferMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); - } + VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireWinrtDisplayNV( physicalDevice, display ); + } - void vkGetDeviceImageMemoryRequirementsKHR( VkDevice device, - const VkDeviceImageMemoryRequirements * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); - } + VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - void vkGetDeviceImageSparseMemoryRequirementsKHR( VkDevice device, - const VkDeviceImageMemoryRequirements * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageSparseMemoryRequirementsKHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === - //=== VK_VALVE_descriptor_set_host_mapping === + VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } - void vkGetDescriptorSetLayoutHostMappingInfoVALVE( VkDevice device, - const VkDescriptorSetBindingReferenceVALVE * pBindingReference, - VkDescriptorSetLayoutHostMappingInfoVALVE * pHostMapping ) const VULKAN_HPP_NOEXCEPT + VkBool32 vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB * dfb ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb ); + } +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + + void vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, + uint32_t vertexBindingDescriptionCount, + const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetVertexInputEXT( + commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions ); + } + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + + VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice device, + const VkMemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); + } + + VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryZirconHandlePropertiesFUCHSIA( device, handleType, zirconHandle, pMemoryZirconHandleProperties ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + VkResult + vkImportSemaphoreZirconHandleFUCHSIA( VkDevice device, + const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreZirconHandleFUCHSIA( device, pImportSemaphoreZirconHandleInfo ); + } + + VkResult vkGetSemaphoreZirconHandleFUCHSIA( VkDevice device, + const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + + VkResult vkCreateBufferCollectionFUCHSIA( VkDevice device, + const VkBufferCollectionCreateInfoFUCHSIA * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBufferCollectionFUCHSIA * pCollection ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBufferCollectionFUCHSIA( device, pCreateInfo, pAllocator, pCollection ); + } + + VkResult vkSetBufferCollectionImageConstraintsFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkImageConstraintsInfoFUCHSIA * pImageConstraintsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetBufferCollectionImageConstraintsFUCHSIA( device, collection, pImageConstraintsInfo ); + } + + VkResult vkSetBufferCollectionBufferConstraintsFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetBufferCollectionBufferConstraintsFUCHSIA( device, collection, pBufferConstraintsInfo ); + } + + void vkDestroyBufferCollectionFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBufferCollectionFUCHSIA( device, collection, pAllocator ); + } + + VkResult vkGetBufferCollectionPropertiesFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + VkBufferCollectionPropertiesFUCHSIA * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferCollectionPropertiesFUCHSIA( device, collection, pProperties ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + + VkResult + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( VkDevice device, VkRenderPass renderpass, VkExtent2D * pMaxWorkgroupSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( device, renderpass, pMaxWorkgroupSize ); + } + + void vkCmdSubpassShadingHUAWEI( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSubpassShadingHUAWEI( commandBuffer ); + } + + //=== VK_HUAWEI_invocation_mask === + + void vkCmdBindInvocationMaskHUAWEI( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindInvocationMaskHUAWEI( commandBuffer, imageView, imageLayout ); + } + + //=== VK_NV_external_memory_rdma === + + VkResult vkGetMemoryRemoteAddressNV( VkDevice device, + const VkMemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, + VkRemoteAddressNV * pAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryRemoteAddressNV( device, pMemoryGetRemoteAddressInfo, pAddress ); + } + + //=== VK_EXT_pipeline_properties === + + VkResult vkGetPipelinePropertiesEXT( VkDevice device, + const VkPipelineInfoEXT * pPipelineInfo, + VkBaseOutStructure * pPipelineProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelinePropertiesEXT( device, pPipelineInfo, pPipelineProperties ); + } + + //=== VK_EXT_extended_dynamic_state2 === + + void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPatchControlPointsEXT( commandBuffer, patchControlPoints ); + } + + void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizerDiscardEnableEXT( commandBuffer, rasterizerDiscardEnable ); + } + + void vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBiasEnableEXT( commandBuffer, depthBiasEnable ); + } + + void vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, VkLogicOp logicOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLogicOpEXT( commandBuffer, logicOp ); + } + + void vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveRestartEnableEXT( commandBuffer, primitiveRestartEnable ); + } + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + VkResult vkCreateScreenSurfaceQNX( VkInstance instance, + const VkScreenSurfaceCreateInfoQNX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateScreenSurfaceQNX( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceScreenPresentationSupportQNX( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct _screen_window * window ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceScreenPresentationSupportQNX( physicalDevice, queueFamilyIndex, window ); + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + + void vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorWriteEnableEXT( commandBuffer, attachmentCount, pColorWriteEnables ); + } + + //=== VK_KHR_ray_tracing_maintenance1 === + + void vkCmdTraceRaysIndirect2KHR( VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysIndirect2KHR( commandBuffer, indirectDeviceAddress ); + } + + //=== VK_EXT_multi_draw === + + void vkCmdDrawMultiEXT( VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawInfoEXT * pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMultiEXT( commandBuffer, drawCount, pVertexInfo, instanceCount, firstInstance, stride ); + } + + void vkCmdDrawMultiIndexedEXT( VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawIndexedInfoEXT * pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t * pVertexOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMultiIndexedEXT( commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset ); + } + + //=== VK_EXT_opacity_micromap === + + VkResult vkCreateMicromapEXT( VkDevice device, + const VkMicromapCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkMicromapEXT * pMicromap ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMicromapEXT( device, pCreateInfo, pAllocator, pMicromap ); + } + + void vkDestroyMicromapEXT( VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyMicromapEXT( device, micromap, pAllocator ); + } + + void vkCmdBuildMicromapsEXT( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildMicromapsEXT( commandBuffer, infoCount, pInfos ); + } + + VkResult vkBuildMicromapsEXT( VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBuildMicromapsEXT( device, deferredOperation, infoCount, pInfos ); + } + + VkResult vkCopyMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMicromapEXT( device, deferredOperation, pInfo ); + } + + VkResult vkCopyMicromapToMemoryEXT( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMicromapToMemoryEXT( device, deferredOperation, pInfo ); + } + + VkResult vkCopyMemoryToMicromapEXT( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToMicromapEXT( device, deferredOperation, pInfo ); + } + + VkResult vkWriteMicromapsPropertiesEXT( VkDevice device, + uint32_t micromapCount, + const VkMicromapEXT * pMicromaps, + VkQueryType queryType, + size_t dataSize, + void * pData, + size_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWriteMicromapsPropertiesEXT( device, micromapCount, pMicromaps, queryType, dataSize, pData, stride ); + } + + void vkCmdCopyMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMicromapEXT( commandBuffer, pInfo ); + } + + void vkCmdCopyMicromapToMemoryEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMicromapToMemoryEXT( commandBuffer, pInfo ); + } + + void vkCmdCopyMemoryToMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToMicromapEXT( commandBuffer, pInfo ); + } + + void vkCmdWriteMicromapsPropertiesEXT( VkCommandBuffer commandBuffer, + uint32_t micromapCount, + const VkMicromapEXT * pMicromaps, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteMicromapsPropertiesEXT( commandBuffer, micromapCount, pMicromaps, queryType, queryPool, firstQuery ); + } + + void vkGetDeviceMicromapCompatibilityEXT( VkDevice device, + const VkMicromapVersionInfoEXT * pVersionInfo, + VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMicromapCompatibilityEXT( device, pVersionInfo, pCompatibility ); + } + + void vkGetMicromapBuildSizesEXT( VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkMicromapBuildInfoEXT * pBuildInfo, + VkMicromapBuildSizesInfoEXT * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMicromapBuildSizesEXT( device, buildType, pBuildInfo, pSizeInfo ); + } + + //=== VK_HUAWEI_cluster_culling_shader === + + void vkCmdDrawClusterHUAWEI( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawClusterHUAWEI( commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + void vkCmdDrawClusterIndirectHUAWEI( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawClusterIndirectHUAWEI( commandBuffer, buffer, offset ); + } + + //=== VK_EXT_pageable_device_local_memory === + + void vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, float priority ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDeviceMemoryPriorityEXT( device, memory, priority ); + } + + //=== VK_KHR_maintenance4 === + + void vkGetDeviceBufferMemoryRequirementsKHR( VkDevice device, + const VkDeviceBufferMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceBufferMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageMemoryRequirementsKHR( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageSparseMemoryRequirementsKHR( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSparseMemoryRequirementsKHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + //=== VK_VALVE_descriptor_set_host_mapping === + + void vkGetDescriptorSetLayoutHostMappingInfoVALVE( VkDevice device, + const VkDescriptorSetBindingReferenceVALVE * pBindingReference, + VkDescriptorSetLayoutHostMappingInfoVALVE * pHostMapping ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutHostMappingInfoVALVE( device, pBindingReference, pHostMapping ); + } + + void vkGetDescriptorSetHostMappingVALVE( VkDevice device, VkDescriptorSet descriptorSet, void ** ppData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetHostMappingVALVE( device, descriptorSet, ppData ); + } + + //=== VK_NV_copy_memory_indirect === + + void vkCmdCopyMemoryIndirectNV( VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride ); + } + + void vkCmdCopyMemoryToImageIndirectNV( VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VkImage dstImage, + VkImageLayout dstImageLayout, + const VkImageSubresourceLayers * pImageSubresources ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToImageIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride, dstImage, dstImageLayout, pImageSubresources ); + } + + //=== VK_NV_memory_decompression === + + void vkCmdDecompressMemoryNV( VkCommandBuffer commandBuffer, + uint32_t decompressRegionCount, + const VkDecompressMemoryRegionNV * pDecompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecompressMemoryNV( commandBuffer, decompressRegionCount, pDecompressMemoryRegions ); + } + + void vkCmdDecompressMemoryIndirectCountNV( VkCommandBuffer commandBuffer, + VkDeviceAddress indirectCommandsAddress, + VkDeviceAddress indirectCommandsCountAddress, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecompressMemoryIndirectCountNV( commandBuffer, indirectCommandsAddress, indirectCommandsCountAddress, stride ); + } + + //=== VK_NV_device_generated_commands_compute === + + void vkGetPipelineIndirectMemoryRequirementsNV( VkDevice device, + const VkComputePipelineCreateInfo * pCreateInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineIndirectMemoryRequirementsNV( device, pCreateInfo, pMemoryRequirements ); + } + + void vkCmdUpdatePipelineIndirectBufferNV( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdUpdatePipelineIndirectBufferNV( commandBuffer, pipelineBindPoint, pipeline ); + } + + VkDeviceAddress vkGetPipelineIndirectDeviceAddressNV( VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineIndirectDeviceAddressNV( device, pInfo ); + } + + //=== VK_EXT_extended_dynamic_state3 === + + void vkCmdSetDepthClampEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClampEnableEXT( commandBuffer, depthClampEnable ); + } + + void vkCmdSetPolygonModeEXT( VkCommandBuffer commandBuffer, VkPolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPolygonModeEXT( commandBuffer, polygonMode ); + } + + void vkCmdSetRasterizationSamplesEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizationSamplesEXT( commandBuffer, rasterizationSamples ); + } + + void vkCmdSetSampleMaskEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask * pSampleMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleMaskEXT( commandBuffer, samples, pSampleMask ); + } + + void vkCmdSetAlphaToCoverageEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetAlphaToCoverageEnableEXT( commandBuffer, alphaToCoverageEnable ); + } + + void vkCmdSetAlphaToOneEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetAlphaToOneEnableEXT( commandBuffer, alphaToOneEnable ); + } + + void vkCmdSetLogicOpEnableEXT( VkCommandBuffer commandBuffer, VkBool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLogicOpEnableEXT( commandBuffer, logicOpEnable ); + } + + void vkCmdSetColorBlendEnableEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkBool32 * pColorBlendEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorBlendEnableEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEnables ); + } + + void vkCmdSetColorBlendEquationEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendEquationEXT * pColorBlendEquations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorBlendEquationEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEquations ); + } + + void vkCmdSetColorWriteMaskEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorComponentFlags * pColorWriteMasks ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorWriteMaskEXT( commandBuffer, firstAttachment, attachmentCount, pColorWriteMasks ); + } + + void vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetTessellationDomainOriginEXT( commandBuffer, domainOrigin ); + } + + void vkCmdSetRasterizationStreamEXT( VkCommandBuffer commandBuffer, uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizationStreamEXT( commandBuffer, rasterizationStream ); + } + + void vkCmdSetConservativeRasterizationModeEXT( VkCommandBuffer commandBuffer, + VkConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetConservativeRasterizationModeEXT( commandBuffer, conservativeRasterizationMode ); + } + + void vkCmdSetExtraPrimitiveOverestimationSizeEXT( VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExtraPrimitiveOverestimationSizeEXT( commandBuffer, extraPrimitiveOverestimationSize ); + } + + void vkCmdSetDepthClipEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClipEnableEXT( commandBuffer, depthClipEnable ); + } + + void vkCmdSetSampleLocationsEnableEXT( VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleLocationsEnableEXT( commandBuffer, sampleLocationsEnable ); + } + + void vkCmdSetColorBlendAdvancedEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendAdvancedEXT * pColorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorBlendAdvancedEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendAdvanced ); + } + + void vkCmdSetProvokingVertexModeEXT( VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetProvokingVertexModeEXT( commandBuffer, provokingVertexMode ); + } + + void vkCmdSetLineRasterizationModeEXT( VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineRasterizationModeEXT( commandBuffer, lineRasterizationMode ); + } + + void vkCmdSetLineStippleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStippleEnableEXT( commandBuffer, stippledLineEnable ); + } + + void vkCmdSetDepthClipNegativeOneToOneEXT( VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClipNegativeOneToOneEXT( commandBuffer, negativeOneToOne ); + } + + void vkCmdSetViewportWScalingEnableNV( VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWScalingEnableNV( commandBuffer, viewportWScalingEnable ); + } + + void vkCmdSetViewportSwizzleNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportSwizzleNV * pViewportSwizzles ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportSwizzleNV( commandBuffer, firstViewport, viewportCount, pViewportSwizzles ); + } + + void vkCmdSetCoverageToColorEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageToColorEnableNV( commandBuffer, coverageToColorEnable ); + } + + void vkCmdSetCoverageToColorLocationNV( VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageToColorLocationNV( commandBuffer, coverageToColorLocation ); + } + + void vkCmdSetCoverageModulationModeNV( VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageModulationModeNV( commandBuffer, coverageModulationMode ); + } + + void vkCmdSetCoverageModulationTableEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageModulationTableEnableNV( commandBuffer, coverageModulationTableEnable ); + } + + void vkCmdSetCoverageModulationTableNV( VkCommandBuffer commandBuffer, + uint32_t coverageModulationTableCount, + const float * pCoverageModulationTable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageModulationTableNV( commandBuffer, coverageModulationTableCount, pCoverageModulationTable ); + } + + void vkCmdSetShadingRateImageEnableNV( VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetShadingRateImageEnableNV( commandBuffer, shadingRateImageEnable ); + } + + void vkCmdSetRepresentativeFragmentTestEnableNV( VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRepresentativeFragmentTestEnableNV( commandBuffer, representativeFragmentTestEnable ); + } + + void vkCmdSetCoverageReductionModeNV( VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageReductionModeNV( commandBuffer, coverageReductionMode ); + } + + //=== VK_EXT_shader_module_identifier === + + void vkGetShaderModuleIdentifierEXT( VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderModuleIdentifierEXT( device, shaderModule, pIdentifier ); + } + + void vkGetShaderModuleCreateInfoIdentifierEXT( VkDevice device, + const VkShaderModuleCreateInfo * pCreateInfo, + VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderModuleCreateInfoIdentifierEXT( device, pCreateInfo, pIdentifier ); + } + + //=== VK_NV_optical_flow === + + VkResult vkGetPhysicalDeviceOpticalFlowImageFormatsNV( VkPhysicalDevice physicalDevice, + const VkOpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, + uint32_t * pFormatCount, + VkOpticalFlowImageFormatPropertiesNV * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceOpticalFlowImageFormatsNV( physicalDevice, pOpticalFlowImageFormatInfo, pFormatCount, pImageFormatProperties ); + } + + VkResult vkCreateOpticalFlowSessionNV( VkDevice device, + const VkOpticalFlowSessionCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkOpticalFlowSessionNV * pSession ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateOpticalFlowSessionNV( device, pCreateInfo, pAllocator, pSession ); + } + + void vkDestroyOpticalFlowSessionNV( VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyOpticalFlowSessionNV( device, session, pAllocator ); + } + + VkResult vkBindOpticalFlowSessionImageNV( VkDevice device, + VkOpticalFlowSessionNV session, + VkOpticalFlowSessionBindingPointNV bindingPoint, + VkImageView view, + VkImageLayout layout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindOpticalFlowSessionImageNV( device, session, bindingPoint, view, layout ); + } + + void vkCmdOpticalFlowExecuteNV( VkCommandBuffer commandBuffer, + VkOpticalFlowSessionNV session, + const VkOpticalFlowExecuteInfoNV * pExecuteInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo ); + } + + //=== VK_KHR_maintenance5 === + + void vkCmdBindIndexBuffer2KHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindIndexBuffer2KHR( commandBuffer, buffer, offset, size, indexType ); + } + + void + vkGetRenderingAreaGranularityKHR( VkDevice device, const VkRenderingAreaInfo * pRenderingAreaInfo, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderingAreaGranularityKHR( device, pRenderingAreaInfo, pGranularity ); + } + + void vkGetDeviceImageSubresourceLayoutKHR( VkDevice device, + const VkDeviceImageSubresourceInfo * pInfo, + VkSubresourceLayout2 * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSubresourceLayoutKHR( device, pInfo, pLayout ); + } + + void vkGetImageSubresourceLayout2KHR( VkDevice device, + VkImage image, + const VkImageSubresource2 * pSubresource, + VkSubresourceLayout2 * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout2KHR( device, image, pSubresource, pLayout ); + } + + //=== VK_AMD_anti_lag === + + void vkAntiLagUpdateAMD( VkDevice device, const VkAntiLagDataAMD * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAntiLagUpdateAMD( device, pData ); + } + + //=== VK_EXT_shader_object === + + VkResult vkCreateShadersEXT( VkDevice device, + uint32_t createInfoCount, + const VkShaderCreateInfoEXT * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateShadersEXT( device, createInfoCount, pCreateInfos, pAllocator, pShaders ); + } + + void vkDestroyShaderEXT( VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyShaderEXT( device, shader, pAllocator ); + } + + VkResult vkGetShaderBinaryDataEXT( VkDevice device, VkShaderEXT shader, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderBinaryDataEXT( device, shader, pDataSize, pData ); + } + + void vkCmdBindShadersEXT( VkCommandBuffer commandBuffer, + uint32_t stageCount, + const VkShaderStageFlagBits * pStages, + const VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindShadersEXT( commandBuffer, stageCount, pStages, pShaders ); + } + + void vkCmdSetDepthClampRangeEXT( VkCommandBuffer commandBuffer, + VkDepthClampModeEXT depthClampMode, + const VkDepthClampRangeEXT * pDepthClampRange ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClampRangeEXT( commandBuffer, depthClampMode, pDepthClampRange ); + } + + //=== VK_KHR_pipeline_binary === + + VkResult vkCreatePipelineBinariesKHR( VkDevice device, + const VkPipelineBinaryCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineBinaryHandlesInfoKHR * pBinaries ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineBinariesKHR( device, pCreateInfo, pAllocator, pBinaries ); + } + + void vkDestroyPipelineBinaryKHR( VkDevice device, VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineBinaryKHR( device, pipelineBinary, pAllocator ); + } + + VkResult vkGetPipelineKeyKHR( VkDevice device, + const VkPipelineCreateInfoKHR * pPipelineCreateInfo, + VkPipelineBinaryKeyKHR * pPipelineKey ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineKeyKHR( device, pPipelineCreateInfo, pPipelineKey ); + } + + VkResult vkGetPipelineBinaryDataKHR( VkDevice device, + const VkPipelineBinaryDataInfoKHR * pInfo, + VkPipelineBinaryKeyKHR * pPipelineBinaryKey, + size_t * pPipelineBinaryDataSize, + void * pPipelineBinaryData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineBinaryDataKHR( device, pInfo, pPipelineBinaryKey, pPipelineBinaryDataSize, pPipelineBinaryData ); + } + + VkResult vkReleaseCapturedPipelineDataKHR( VkDevice device, + const VkReleaseCapturedPipelineDataInfoKHR * pInfo, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseCapturedPipelineDataKHR( device, pInfo, pAllocator ); + } + + //=== VK_QCOM_tile_properties === + + VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device, + VkFramebuffer framebuffer, + uint32_t * pPropertiesCount, + VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFramebufferTilePropertiesQCOM( device, framebuffer, pPropertiesCount, pProperties ); + } + + VkResult vkGetDynamicRenderingTilePropertiesQCOM( VkDevice device, + const VkRenderingInfo * pRenderingInfo, + VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDynamicRenderingTilePropertiesQCOM( device, pRenderingInfo, pProperties ); + } + + //=== VK_NV_cooperative_vector === + + VkResult vkGetPhysicalDeviceCooperativeVectorPropertiesNV( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkCooperativeVectorPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeVectorPropertiesNV( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkConvertCooperativeVectorMatrixNV( VkDevice device, const VkConvertCooperativeVectorMatrixInfoNV * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkConvertCooperativeVectorMatrixNV( device, pInfo ); + } + + void vkCmdConvertCooperativeVectorMatrixNV( VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkConvertCooperativeVectorMatrixInfoNV * pInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdConvertCooperativeVectorMatrixNV( commandBuffer, infoCount, pInfos ); + } + + //=== VK_NV_low_latency2 === + + VkResult vkSetLatencySleepModeNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepModeInfoNV * pSleepModeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLatencySleepModeNV( device, swapchain, pSleepModeInfo ); + } + + VkResult vkLatencySleepNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepInfoNV * pSleepInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkLatencySleepNV( device, swapchain, pSleepInfo ); + } + + void vkSetLatencyMarkerNV( VkDevice device, VkSwapchainKHR swapchain, const VkSetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLatencyMarkerNV( device, swapchain, pLatencyMarkerInfo ); + } + + void vkGetLatencyTimingsNV( VkDevice device, VkSwapchainKHR swapchain, VkGetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetLatencyTimingsNV( device, swapchain, pLatencyMarkerInfo ); + } + + void vkQueueNotifyOutOfBandNV( VkQueue queue, const VkOutOfBandQueueTypeInfoNV * pQueueTypeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueNotifyOutOfBandNV( queue, pQueueTypeInfo ); + } + + //=== VK_KHR_cooperative_matrix === + + VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkCooperativeMatrixPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + + void vkCmdSetAttachmentFeedbackLoopEnableEXT( VkCommandBuffer commandBuffer, VkImageAspectFlags aspectMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetAttachmentFeedbackLoopEnableEXT( commandBuffer, aspectMask ); + } + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + + VkResult vkGetScreenBufferPropertiesQNX( VkDevice device, + const struct _screen_buffer * buffer, + VkScreenBufferPropertiesQNX * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetScreenBufferPropertiesQNX( device, buffer, pProperties ); + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_line_rasterization === + + void vkCmdSetLineStippleKHR( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStippleKHR( commandBuffer, lineStippleFactor, lineStipplePattern ); + } + + //=== VK_KHR_calibrated_timestamps === + + VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( VkPhysicalDevice physicalDevice, + uint32_t * pTimeDomainCount, + VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( physicalDevice, pTimeDomainCount, pTimeDomains ); + } + + VkResult vkGetCalibratedTimestampsKHR( VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCalibratedTimestampsKHR( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); + } + + //=== VK_KHR_maintenance6 === + + void vkCmdBindDescriptorSets2KHR( VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfo * pBindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorSets2KHR( commandBuffer, pBindDescriptorSetsInfo ); + } + + void vkCmdPushConstants2KHR( VkCommandBuffer commandBuffer, const VkPushConstantsInfo * pPushConstantsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushConstants2KHR( commandBuffer, pPushConstantsInfo ); + } + + void vkCmdPushDescriptorSet2KHR( VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfo * pPushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSet2KHR( commandBuffer, pPushDescriptorSetInfo ); + } + + void vkCmdPushDescriptorSetWithTemplate2KHR( VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplate2KHR( commandBuffer, pPushDescriptorSetWithTemplateInfo ); + } + + void vkCmdSetDescriptorBufferOffsets2EXT( VkCommandBuffer commandBuffer, + const VkSetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDescriptorBufferOffsets2EXT( commandBuffer, pSetDescriptorBufferOffsetsInfo ); + } + + void vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + VkCommandBuffer commandBuffer, + const VkBindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( commandBuffer, pBindDescriptorBufferEmbeddedSamplersInfo ); + } + + //=== VK_NV_cluster_acceleration_structure === + + void vkGetClusterAccelerationStructureBuildSizesNV( VkDevice device, + const VkClusterAccelerationStructureInputInfoNV * pInfo, + VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetClusterAccelerationStructureBuildSizesNV( device, pInfo, pSizeInfo ); + } + + void vkCmdBuildClusterAccelerationStructureIndirectNV( VkCommandBuffer commandBuffer, + const VkClusterAccelerationStructureCommandsInfoNV * pCommandInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildClusterAccelerationStructureIndirectNV( commandBuffer, pCommandInfos ); + } + + //=== VK_NV_partitioned_acceleration_structure === + + void vkGetPartitionedAccelerationStructuresBuildSizesNV( VkDevice device, + const VkPartitionedAccelerationStructureInstancesInputNV * pInfo, + VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPartitionedAccelerationStructuresBuildSizesNV( device, pInfo, pSizeInfo ); + } + + void vkCmdBuildPartitionedAccelerationStructuresNV( VkCommandBuffer commandBuffer, + const VkBuildPartitionedAccelerationStructureInfoNV * pBuildInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildPartitionedAccelerationStructuresNV( commandBuffer, pBuildInfo ); + } + + //=== VK_EXT_device_generated_commands === + + void vkGetGeneratedCommandsMemoryRequirementsEXT( VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoEXT * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetGeneratedCommandsMemoryRequirementsEXT( device, pInfo, pMemoryRequirements ); + } + + void vkCmdPreprocessGeneratedCommandsEXT( VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + VkCommandBuffer stateCommandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPreprocessGeneratedCommandsEXT( commandBuffer, pGeneratedCommandsInfo, stateCommandBuffer ); + } + + void vkCmdExecuteGeneratedCommandsEXT( VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoEXT * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteGeneratedCommandsEXT( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); + } + + VkResult vkCreateIndirectCommandsLayoutEXT( VkDevice device, + const VkIndirectCommandsLayoutCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkIndirectCommandsLayoutEXT * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIndirectCommandsLayoutEXT( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); + } + + void vkDestroyIndirectCommandsLayoutEXT( VkDevice device, + VkIndirectCommandsLayoutEXT indirectCommandsLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyIndirectCommandsLayoutEXT( device, indirectCommandsLayout, pAllocator ); + } + + VkResult vkCreateIndirectExecutionSetEXT( VkDevice device, + const VkIndirectExecutionSetCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkIndirectExecutionSetEXT * pIndirectExecutionSet ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIndirectExecutionSetEXT( device, pCreateInfo, pAllocator, pIndirectExecutionSet ); + } + + void vkDestroyIndirectExecutionSetEXT( VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyIndirectExecutionSetEXT( device, indirectExecutionSet, pAllocator ); + } + + void vkUpdateIndirectExecutionSetPipelineEXT( VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetPipelineEXT * pExecutionSetWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateIndirectExecutionSetPipelineEXT( device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites ); + } + + void vkUpdateIndirectExecutionSetShaderEXT( VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetShaderEXT * pExecutionSetWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateIndirectExecutionSetShaderEXT( device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites ); + } + + //=== VK_NV_cooperative_matrix2 === + + VkResult vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkCooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( physicalDevice, pPropertyCount, pProperties ); + } + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + + VkResult + vkGetMemoryMetalHandleEXT( VkDevice device, const VkMemoryGetMetalHandleInfoEXT * pGetMetalHandleInfo, void ** pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryMetalHandleEXT( device, pGetMetalHandleInfo, pHandle ); + } + + VkResult vkGetMemoryMetalHandlePropertiesEXT( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void * pHandle, + VkMemoryMetalHandlePropertiesEXT * pMemoryMetalHandleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryMetalHandlePropertiesEXT( device, handleType, pHandle, pMemoryMetalHandleProperties ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + }; + + inline DispatchLoaderStatic & getDispatchLoaderStatic() + { + static DispatchLoaderStatic dls; + return dls; + } +#endif + + } // namespace detail +#if ( 14 <= VULKAN_HPP_CPP_VERSION ) + using std::exchange; +#else + template + VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_INLINE T exchange( T & obj, U && newValue ) + { + T oldValue = std::move( obj ); + obj = std::forward( newValue ); + return oldValue; + } +#endif + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + struct AllocationCallbacks; + + namespace detail + { + template + class ObjectDestroy + { + public: + ObjectDestroy() = default; + + ObjectDestroy( OwnerType owner, + Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + class NoParent; + + template + class ObjectDestroy + { + public: + ObjectDestroy() = default; + + ObjectDestroy( Optional allocationCallbacks, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + { + } + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_dispatch ); + t.destroy( m_allocationCallbacks, *m_dispatch ); + } + + private: + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + template + class ObjectFree + { + public: + ObjectFree() = default; + + ObjectFree( OwnerType owner, + Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + ( m_owner.free )( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + template + class ObjectRelease + { + public: + ObjectRelease() = default; + + ObjectRelease( OwnerType owner, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.release( t, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Dispatch const * m_dispatch = nullptr; + }; + + template + class PoolFree + { + public: + PoolFree() = default; + + PoolFree( OwnerType owner, PoolType pool, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_pool( pool ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + PoolType getPool() const VULKAN_HPP_NOEXCEPT + { + return m_pool; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + ( m_owner.free )( m_pool, t, *m_dispatch ); + } + + private: + OwnerType m_owner = OwnerType(); + PoolType m_pool = PoolType(); + Dispatch const * m_dispatch = nullptr; + }; + + } // namespace detail +#endif // !VULKAN_HPP_NO_SMART_HANDLE + + //================== + //=== BASE TYPEs === + //================== + + using Bool32 = uint32_t; + using DeviceAddress = uint64_t; + using DeviceSize = uint64_t; + using RemoteAddressNV = void *; + using SampleMask = uint32_t; + + template + struct CppType + { + }; +} // namespace VULKAN_HPP_NAMESPACE + +#include +#if !defined( VULKAN_HPP_NO_TO_STRING ) +# include +#endif + +#ifndef VULKAN_HPP_NO_EXCEPTIONS +namespace std +{ + template <> + struct is_error_code_enum : public true_type + { + }; +} // namespace std +#endif + +namespace VULKAN_HPP_NAMESPACE +{ +#ifndef VULKAN_HPP_NO_EXCEPTIONS + class ErrorCategoryImpl : public std::error_category + { + public: + virtual const char * name() const VULKAN_HPP_NOEXCEPT override + { + return VULKAN_HPP_NAMESPACE_STRING "::Result"; + } + + virtual std::string message( int ev ) const override + { +# if defined( VULKAN_HPP_NO_TO_STRING ) + return std::to_string( ev ); +# else + return VULKAN_HPP_NAMESPACE::to_string( static_cast( ev ) ); +# endif + } + }; + + class Error + { + public: + Error() VULKAN_HPP_NOEXCEPT = default; + Error( const Error & ) VULKAN_HPP_NOEXCEPT = default; + virtual ~Error() VULKAN_HPP_NOEXCEPT = default; + + virtual const char * what() const VULKAN_HPP_NOEXCEPT = 0; + }; + + class LogicError + : public Error + , public std::logic_error + { + public: + explicit LogicError( const std::string & what ) : Error(), std::logic_error( what ) {} + + explicit LogicError( char const * what ) : Error(), std::logic_error( what ) {} + + virtual const char * what() const VULKAN_HPP_NOEXCEPT + { + return std::logic_error::what(); + } + }; + + class SystemError + : public Error + , public std::system_error + { + public: + SystemError( std::error_code ec ) : Error(), std::system_error( ec ) {} + + SystemError( std::error_code ec, std::string const & what ) : Error(), std::system_error( ec, what ) {} + + SystemError( std::error_code ec, char const * what ) : Error(), std::system_error( ec, what ) {} + + SystemError( int ev, std::error_category const & ecat ) : Error(), std::system_error( ev, ecat ) {} + + SystemError( int ev, std::error_category const & ecat, std::string const & what ) : Error(), std::system_error( ev, ecat, what ) {} + + SystemError( int ev, std::error_category const & ecat, char const * what ) : Error(), std::system_error( ev, ecat, what ) {} + + virtual const char * what() const VULKAN_HPP_NOEXCEPT + { + return std::system_error::what(); + } + }; + + VULKAN_HPP_INLINE const std::error_category & errorCategory() VULKAN_HPP_NOEXCEPT + { + static ErrorCategoryImpl instance; + return instance; + } + + VULKAN_HPP_INLINE std::error_code make_error_code( Result e ) VULKAN_HPP_NOEXCEPT + { + return std::error_code( static_cast( e ), errorCategory() ); + } + + VULKAN_HPP_INLINE std::error_condition make_error_condition( Result e ) VULKAN_HPP_NOEXCEPT + { + return std::error_condition( static_cast( e ), errorCategory() ); + } + + class OutOfHostMemoryError : public SystemError + { + public: + OutOfHostMemoryError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + + OutOfHostMemoryError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + }; + + class OutOfDeviceMemoryError : public SystemError + { + public: + OutOfDeviceMemoryError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + + OutOfDeviceMemoryError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + }; + + class InitializationFailedError : public SystemError + { + public: + InitializationFailedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + + InitializationFailedError( char const * message ) : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + }; + + class DeviceLostError : public SystemError + { + public: + DeviceLostError( std::string const & message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + + DeviceLostError( char const * message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + }; + + class MemoryMapFailedError : public SystemError + { + public: + MemoryMapFailedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + + MemoryMapFailedError( char const * message ) : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + }; + + class LayerNotPresentError : public SystemError + { + public: + LayerNotPresentError( std::string const & message ) : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + + LayerNotPresentError( char const * message ) : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + }; + + class ExtensionNotPresentError : public SystemError + { + public: + ExtensionNotPresentError( std::string const & message ) : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + + ExtensionNotPresentError( char const * message ) : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + }; + + class FeatureNotPresentError : public SystemError + { + public: + FeatureNotPresentError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + + FeatureNotPresentError( char const * message ) : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + }; + + class IncompatibleDriverError : public SystemError + { + public: + IncompatibleDriverError( std::string const & message ) : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + + IncompatibleDriverError( char const * message ) : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + }; + + class TooManyObjectsError : public SystemError + { + public: + TooManyObjectsError( std::string const & message ) : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + + TooManyObjectsError( char const * message ) : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + }; + + class FormatNotSupportedError : public SystemError + { + public: + FormatNotSupportedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + + FormatNotSupportedError( char const * message ) : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + }; + + class FragmentedPoolError : public SystemError + { + public: + FragmentedPoolError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + + FragmentedPoolError( char const * message ) : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + }; + + class UnknownError : public SystemError + { + public: + UnknownError( std::string const & message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} + + UnknownError( char const * message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} + }; + + class OutOfPoolMemoryError : public SystemError + { + public: + OutOfPoolMemoryError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + + OutOfPoolMemoryError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + }; + + class InvalidExternalHandleError : public SystemError + { + public: + InvalidExternalHandleError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + + InvalidExternalHandleError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + }; + + class FragmentationError : public SystemError + { + public: + FragmentationError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {} + + FragmentationError( char const * message ) : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {} + }; + + class InvalidOpaqueCaptureAddressError : public SystemError + { + public: + InvalidOpaqueCaptureAddressError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {} + + InvalidOpaqueCaptureAddressError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {} + }; + + class NotPermittedError : public SystemError + { + public: + NotPermittedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorNotPermitted ), message ) {} + + NotPermittedError( char const * message ) : SystemError( make_error_code( Result::eErrorNotPermitted ), message ) {} + }; + + class SurfaceLostKHRError : public SystemError + { + public: + SurfaceLostKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + + SurfaceLostKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + }; + + class NativeWindowInUseKHRError : public SystemError + { + public: + NativeWindowInUseKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + + NativeWindowInUseKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + }; + + class OutOfDateKHRError : public SystemError + { + public: + OutOfDateKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + + OutOfDateKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + }; + + class IncompatibleDisplayKHRError : public SystemError + { + public: + IncompatibleDisplayKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + + IncompatibleDisplayKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + }; + + class ValidationFailedEXTError : public SystemError + { + public: + ValidationFailedEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + + ValidationFailedEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + }; + + class InvalidShaderNVError : public SystemError + { + public: + InvalidShaderNVError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} + + InvalidShaderNVError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} + }; + + class ImageUsageNotSupportedKHRError : public SystemError + { + public: + ImageUsageNotSupportedKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorImageUsageNotSupportedKHR ), message ) {} + + ImageUsageNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorImageUsageNotSupportedKHR ), message ) {} + }; + + class VideoPictureLayoutNotSupportedKHRError : public SystemError + { + public: + VideoPictureLayoutNotSupportedKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorVideoPictureLayoutNotSupportedKHR ), message ) + { + } + + VideoPictureLayoutNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoPictureLayoutNotSupportedKHR ), message ) + { + } + }; + + class VideoProfileOperationNotSupportedKHRError : public SystemError + { + public: + VideoProfileOperationNotSupportedKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorVideoProfileOperationNotSupportedKHR ), message ) + { + } + + VideoProfileOperationNotSupportedKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorVideoProfileOperationNotSupportedKHR ), message ) + { + } + }; + + class VideoProfileFormatNotSupportedKHRError : public SystemError + { + public: + VideoProfileFormatNotSupportedKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorVideoProfileFormatNotSupportedKHR ), message ) + { + } + + VideoProfileFormatNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoProfileFormatNotSupportedKHR ), message ) + { + } + }; + + class VideoProfileCodecNotSupportedKHRError : public SystemError + { + public: + VideoProfileCodecNotSupportedKHRError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorVideoProfileCodecNotSupportedKHR ), message ) + { + } + + VideoProfileCodecNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoProfileCodecNotSupportedKHR ), message ) {} + }; + + class VideoStdVersionNotSupportedKHRError : public SystemError + { + public: + VideoStdVersionNotSupportedKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorVideoStdVersionNotSupportedKHR ), message ) + { + } + + VideoStdVersionNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoStdVersionNotSupportedKHR ), message ) {} + }; + + class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError + { + public: + InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const & message ) + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) + { + } + + InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) + { + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + class FullScreenExclusiveModeLostEXTError : public SystemError + { + public: + FullScreenExclusiveModeLostEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) + { + } + + FullScreenExclusiveModeLostEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {} + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + class InvalidVideoStdParametersKHRError : public SystemError + { + public: + InvalidVideoStdParametersKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidVideoStdParametersKHR ), message ) {} + + InvalidVideoStdParametersKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidVideoStdParametersKHR ), message ) {} + }; + + class CompressionExhaustedEXTError : public SystemError + { + public: + CompressionExhaustedEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {} + + CompressionExhaustedEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {} + }; + + class NotEnoughSpaceKHRError : public SystemError + { + public: + NotEnoughSpaceKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorNotEnoughSpaceKHR ), message ) {} + + NotEnoughSpaceKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorNotEnoughSpaceKHR ), message ) {} + }; + + namespace detail + { + [[noreturn]] VULKAN_HPP_INLINE void throwResultException( Result result, char const * message ) + { + switch ( result ) + { + case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError( message ); + case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError( message ); + case Result::eErrorInitializationFailed: throw InitializationFailedError( message ); + case Result::eErrorDeviceLost: throw DeviceLostError( message ); + case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError( message ); + case Result::eErrorLayerNotPresent: throw LayerNotPresentError( message ); + case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError( message ); + case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError( message ); + case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError( message ); + case Result::eErrorTooManyObjects: throw TooManyObjectsError( message ); + case Result::eErrorFormatNotSupported: throw FormatNotSupportedError( message ); + case Result::eErrorFragmentedPool: throw FragmentedPoolError( message ); + case Result::eErrorUnknown: throw UnknownError( message ); + case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError( message ); + case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError( message ); + case Result::eErrorFragmentation: throw FragmentationError( message ); + case Result::eErrorInvalidOpaqueCaptureAddress: throw InvalidOpaqueCaptureAddressError( message ); + case Result::eErrorNotPermitted: throw NotPermittedError( message ); + case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError( message ); + case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError( message ); + case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError( message ); + case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message ); + case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message ); + case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message ); + case Result::eErrorImageUsageNotSupportedKHR: throw ImageUsageNotSupportedKHRError( message ); + case Result::eErrorVideoPictureLayoutNotSupportedKHR: throw VideoPictureLayoutNotSupportedKHRError( message ); + case Result::eErrorVideoProfileOperationNotSupportedKHR: throw VideoProfileOperationNotSupportedKHRError( message ); + case Result::eErrorVideoProfileFormatNotSupportedKHR: throw VideoProfileFormatNotSupportedKHRError( message ); + case Result::eErrorVideoProfileCodecNotSupportedKHR: throw VideoProfileCodecNotSupportedKHRError( message ); + case Result::eErrorVideoStdVersionNotSupportedKHR: throw VideoStdVersionNotSupportedKHRError( message ); + case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message ); +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + case Result::eErrorInvalidVideoStdParametersKHR: throw InvalidVideoStdParametersKHRError( message ); + case Result::eErrorCompressionExhaustedEXT: throw CompressionExhaustedEXTError( message ); + case Result::eErrorNotEnoughSpaceKHR: throw NotEnoughSpaceKHRError( message ); + default: throw SystemError( make_error_code( result ), message ); + } + } + } // namespace detail +#endif + + template + struct ResultValue + { +#ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, T & v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( v ) ) ) +#else + ResultValue( Result r, T & v ) +#endif + : result( r ), value( v ) + { + } + +#ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( std::move( v ) ) ) ) +#else + ResultValue( Result r, T && v ) +#endif + : result( r ), value( std::move( v ) ) + { + } + + Result result; + T value; + + operator std::tuple() VULKAN_HPP_NOEXCEPT + { + return std::tuple( result, value ); + } + }; + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + template + struct ResultValue> + { +# ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, UniqueHandle && v ) VULKAN_HPP_NOEXCEPT +# else + ResultValue( Result r, UniqueHandle && v ) +# endif + : result( r ) + , value( std::move( v ) ) + { + } + + VULKAN_HPP_DEPRECATED( + "asTuple() on an l-value is deprecated, as it implicitly moves the UniqueHandle out of the ResultValue. Use asTuple() on an r-value instead, requiring to explicitly move the UniqueHandle." ) + + std::tuple> asTuple() & + { + return std::make_tuple( result, std::move( value ) ); + } + + std::tuple> asTuple() && + { + return std::make_tuple( result, std::move( value ) ); + } + + Result result; + UniqueHandle value; + }; + + template + struct ResultValue>> + { +# ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, std::vector> && v ) VULKAN_HPP_NOEXCEPT +# else + ResultValue( Result r, std::vector> && v ) +# endif + : result( r ) + , value( std::move( v ) ) + { + } + + VULKAN_HPP_DEPRECATED( + "asTuple() on an l-value is deprecated, as it implicitly moves the UniqueHandle out of the ResultValue. Use asTuple() on an r-value instead, requiring to explicitly move the UniqueHandle." ) + + std::tuple>> asTuple() & + { + return std::make_tuple( result, std::move( value ) ); + } + + std::tuple>> asTuple() && + { + return std::make_tuple( result, std::move( value ) ); + } + + Result result; + std::vector> value; + }; +#endif + + template + struct ResultValueType + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + using type = ResultValue; +#else + using type = T; +#endif + }; + + template <> + struct ResultValueType + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + using type = Result; +#else + using type = void; +#endif + }; + + namespace detail + { + template + void ignore( T const & ) VULKAN_HPP_NOEXCEPT + { + } + + VULKAN_HPP_INLINE typename VULKAN_HPP_NAMESPACE::ResultValueType::type createResultValueType( VULKAN_HPP_NAMESPACE::Result result ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + return result; +#else + VULKAN_HPP_NAMESPACE::detail::ignore( result ); +#endif + } + + template + VULKAN_HPP_INLINE typename VULKAN_HPP_NAMESPACE::ResultValueType::type createResultValueType( VULKAN_HPP_NAMESPACE::Result result, T & data ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + return ResultValue( result, data ); +#else + VULKAN_HPP_NAMESPACE::detail::ignore( result ); + return data; +#endif + } + + template + VULKAN_HPP_INLINE typename VULKAN_HPP_NAMESPACE::ResultValueType::type createResultValueType( VULKAN_HPP_NAMESPACE::Result result, T && data ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + return ResultValue( result, std::move( data ) ); +#else + VULKAN_HPP_NAMESPACE::detail::ignore( result ); + return std::move( data ); +#endif + } + } // namespace detail + + namespace detail + { + VULKAN_HPP_INLINE void resultCheck( Result result, char const * message ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_NAMESPACE::detail::ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_NAMESPACE::detail::ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); +#else + if ( result != Result::eSuccess ) + { + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, message ); + } +#endif + } + + VULKAN_HPP_INLINE void resultCheck( Result result, char const * message, std::initializer_list successCodes ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_NAMESPACE::detail::ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_NAMESPACE::detail::ignore( message ); + VULKAN_HPP_NAMESPACE::detail::ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +#else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, message ); + } +#endif + } + } // namespace detail + + //=========================== + //=== CONSTEXPR CONSTANTs === + //=========================== + + //=== VK_VERSION_1_0 === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t AttachmentUnused = VK_ATTACHMENT_UNUSED; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t False = VK_FALSE; + VULKAN_HPP_CONSTEXPR_INLINE float LodClampNone = VK_LOD_CLAMP_NONE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyIgnored = VK_QUEUE_FAMILY_IGNORED; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingArrayLayers = VK_REMAINING_ARRAY_LAYERS; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingMipLevels = VK_REMAINING_MIP_LEVELS; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t SubpassExternal = VK_SUBPASS_EXTERNAL; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t True = VK_TRUE; + VULKAN_HPP_CONSTEXPR_INLINE uint64_t WholeSize = VK_WHOLE_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryTypes = VK_MAX_MEMORY_TYPES; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxPhysicalDeviceNameSize = VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t UuidSize = VK_UUID_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxExtensionNameSize = VK_MAX_EXTENSION_NAME_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDescriptionSize = VK_MAX_DESCRIPTION_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryHeaps = VK_MAX_MEMORY_HEAPS; + + //=== VK_VERSION_1_1 === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDeviceGroupSize = VK_MAX_DEVICE_GROUP_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t LuidSize = VK_LUID_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyExternal = VK_QUEUE_FAMILY_EXTERNAL; + + //=== VK_VERSION_1_2 === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverNameSize = VK_MAX_DRIVER_NAME_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSize = VK_MAX_DRIVER_INFO_SIZE; + + //=== VK_VERSION_1_4 === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySize = VK_MAX_GLOBAL_PRIORITY_SIZE; + + //=== VK_KHR_device_group_creation === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDeviceGroupSizeKHR = VK_MAX_DEVICE_GROUP_SIZE_KHR; + + //=== VK_KHR_external_memory_capabilities === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t LuidSizeKHR = VK_LUID_SIZE_KHR; + + //=== VK_KHR_external_memory === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyExternalKHR = VK_QUEUE_FAMILY_EXTERNAL_KHR; + + //=== VK_EXT_queue_family_foreign === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyForeignEXT = VK_QUEUE_FAMILY_FOREIGN_EXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderIndexUnusedAMDX = VK_SHADER_INDEX_UNUSED_AMDX; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_ray_tracing_pipeline === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedKHR = VK_SHADER_UNUSED_KHR; + + //=== VK_NV_ray_tracing === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedNV = VK_SHADER_UNUSED_NV; + + //=== VK_KHR_global_priority === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeKHR = VK_MAX_GLOBAL_PRIORITY_SIZE_KHR; + + //=== VK_KHR_driver_properties === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverNameSizeKHR = VK_MAX_DRIVER_NAME_SIZE_KHR; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSizeKHR = VK_MAX_DRIVER_INFO_SIZE_KHR; + + //=== VK_EXT_global_priority_query === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeEXT = VK_MAX_GLOBAL_PRIORITY_SIZE_EXT; + + //=== VK_EXT_image_sliced_view_of_3d === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t Remaining3DSlicesEXT = VK_REMAINING_3D_SLICES_EXT; + + //=== VK_EXT_shader_module_identifier === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxShaderModuleIdentifierSizeEXT = VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT; + + //=== VK_KHR_pipeline_binary === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxPipelineBinaryKeySizeKHR = VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR; + + //=== VK_KHR_video_decode_av1 === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxVideoAv1ReferencesPerFrameKHR = VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR; + + //=== VK_NV_partitioned_acceleration_structure === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t PartitionedAccelerationStructurePartitionIndexGlobalNV = VK_PARTITIONED_ACCELERATION_STRUCTURE_PARTITION_INDEX_GLOBAL_NV; + + //======================== + //=== CONSTEXPR VALUEs === + //======================== + VULKAN_HPP_CONSTEXPR_INLINE uint32_t HeaderVersion = VK_HEADER_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t Use64BitPtrDefines = VK_USE_64_BIT_PTR_DEFINES; + + //========================= + //=== CONSTEXPR CALLEEs === + //========================= + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t apiVersionMajor( T const version ) + { + return ( ( (uint32_t)( version ) >> 22U ) & 0x7FU ); + } + + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t apiVersionMinor( T const version ) + { + return ( ( (uint32_t)( version ) >> 12U ) & 0x3FFU ); + } + + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t apiVersionPatch( T const version ) + { + return ( (uint32_t)(version)&0xFFFU ); + } + + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t apiVersionVariant( T const version ) + { + return ( (uint32_t)( version ) >> 29U ); + } + + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t makeApiVersion( T const variant, T const major, T const minor, T const patch ) + { + return ( ( ( (uint32_t)( variant ) ) << 29U ) | ( ( (uint32_t)( major ) ) << 22U ) | ( ( (uint32_t)( minor ) ) << 12U ) | ( (uint32_t)( patch ) ) ); + } + + template ::value>::type> + VULKAN_HPP_DEPRECATED( "This define is deprecated. VK_MAKE_API_VERSION should be used instead." ) + VULKAN_HPP_CONSTEXPR uint32_t makeVersion( T const major, T const minor, T const patch ) + { + return ( ( ( (uint32_t)( major ) ) << 22U ) | ( ( (uint32_t)( minor ) ) << 12U ) | ( (uint32_t)( patch ) ) ); + } + + template ::value>::type> + VULKAN_HPP_DEPRECATED( "This define is deprecated. VK_API_VERSION_MAJOR should be used instead." ) + VULKAN_HPP_CONSTEXPR uint32_t versionMajor( T const version ) + { + return ( (uint32_t)( version ) >> 22U ); + } + + template ::value>::type> + VULKAN_HPP_DEPRECATED( "This define is deprecated. VK_API_VERSION_MINOR should be used instead." ) + VULKAN_HPP_CONSTEXPR uint32_t versionMinor( T const version ) + { + return ( ( (uint32_t)( version ) >> 12U ) & 0x3FFU ); + } + + template ::value>::type> + VULKAN_HPP_DEPRECATED( "This define is deprecated. VK_API_VERSION_PATCH should be used instead." ) + VULKAN_HPP_CONSTEXPR uint32_t versionPatch( T const version ) + { + return ( (uint32_t)(version)&0xFFFU ); + } + + //========================= + //=== CONSTEXPR CALLERs === + //========================= + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion = makeApiVersion( 0, 1, 0, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion10 = makeApiVersion( 0, 1, 0, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion11 = makeApiVersion( 0, 1, 1, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion12 = makeApiVersion( 0, 1, 2, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion13 = makeApiVersion( 0, 1, 3, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion14 = makeApiVersion( 0, 1, 4, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto HeaderVersionComplete = makeApiVersion( 0, 1, 4, VK_HEADER_VERSION ); + + //================================= + //=== CONSTEXPR EXTENSION NAMEs === + //================================= + + //=== VK_KHR_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSurfaceExtensionName = VK_KHR_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSurfaceSpecVersion = VK_KHR_SURFACE_SPEC_VERSION; + + //=== VK_KHR_swapchain === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSwapchainExtensionName = VK_KHR_SWAPCHAIN_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSwapchainSpecVersion = VK_KHR_SWAPCHAIN_SPEC_VERSION; + + //=== VK_KHR_display === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDisplayExtensionName = VK_KHR_DISPLAY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDisplaySpecVersion = VK_KHR_DISPLAY_SPEC_VERSION; + + //=== VK_KHR_display_swapchain === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDisplaySwapchainExtensionName = VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDisplaySwapchainSpecVersion = VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRXlibSurfaceExtensionName = VK_KHR_XLIB_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRXlibSurfaceSpecVersion = VK_KHR_XLIB_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRXcbSurfaceExtensionName = VK_KHR_XCB_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRXcbSurfaceSpecVersion = VK_KHR_XCB_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWaylandSurfaceExtensionName = VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWaylandSurfaceSpecVersion = VK_KHR_WAYLAND_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRAndroidSurfaceExtensionName = VK_KHR_ANDROID_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRAndroidSurfaceSpecVersion = VK_KHR_ANDROID_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWin32SurfaceExtensionName = VK_KHR_WIN32_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWin32SurfaceSpecVersion = VK_KHR_WIN32_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + VULKAN_HPP_DEPRECATED( "The VK_EXT_debug_report extension has been deprecated by VK_EXT_debug_utils." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugReportExtensionName = VK_EXT_DEBUG_REPORT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_debug_report extension has been deprecated by VK_EXT_debug_utils." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugReportSpecVersion = VK_EXT_DEBUG_REPORT_SPEC_VERSION; + + //=== VK_NV_glsl_shader === + VULKAN_HPP_DEPRECATED( "The VK_NV_glsl_shader extension has been deprecated." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVGlslShaderExtensionName = VK_NV_GLSL_SHADER_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_glsl_shader extension has been deprecated." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVGlslShaderSpecVersion = VK_NV_GLSL_SHADER_SPEC_VERSION; + + //=== VK_EXT_depth_range_unrestricted === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthRangeUnrestrictedExtensionName = VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthRangeUnrestrictedSpecVersion = VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION; + + //=== VK_KHR_sampler_mirror_clamp_to_edge === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerMirrorClampToEdgeExtensionName = VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerMirrorClampToEdgeSpecVersion = VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION; + + //=== VK_IMG_filter_cubic === + VULKAN_HPP_CONSTEXPR_INLINE auto IMGFilterCubicExtensionName = VK_IMG_FILTER_CUBIC_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto IMGFilterCubicSpecVersion = VK_IMG_FILTER_CUBIC_SPEC_VERSION; + + //=== VK_AMD_rasterization_order === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDRasterizationOrderExtensionName = VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDRasterizationOrderSpecVersion = VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION; + + //=== VK_AMD_shader_trinary_minmax === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderTrinaryMinmaxExtensionName = VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderTrinaryMinmaxSpecVersion = VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION; + + //=== VK_AMD_shader_explicit_vertex_parameter === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderExplicitVertexParameterExtensionName = VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderExplicitVertexParameterSpecVersion = VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION; + + //=== VK_EXT_debug_marker === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugMarkerExtensionName = VK_EXT_DEBUG_MARKER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugMarkerSpecVersion = VK_EXT_DEBUG_MARKER_SPEC_VERSION; + + //=== VK_KHR_video_queue === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoQueueExtensionName = VK_KHR_VIDEO_QUEUE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoQueueSpecVersion = VK_KHR_VIDEO_QUEUE_SPEC_VERSION; + + //=== VK_KHR_video_decode_queue === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeQueueExtensionName = VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeQueueSpecVersion = VK_KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION; + + //=== VK_AMD_gcn_shader === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGcnShaderExtensionName = VK_AMD_GCN_SHADER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGcnShaderSpecVersion = VK_AMD_GCN_SHADER_SPEC_VERSION; + + //=== VK_NV_dedicated_allocation === + VULKAN_HPP_DEPRECATED( "The VK_NV_dedicated_allocation extension has been deprecated by VK_KHR_dedicated_allocation." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVDedicatedAllocationExtensionName = VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_dedicated_allocation extension has been deprecated by VK_KHR_dedicated_allocation." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVDedicatedAllocationSpecVersion = VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION; + + //=== VK_EXT_transform_feedback === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTransformFeedbackExtensionName = VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTransformFeedbackSpecVersion = VK_EXT_TRANSFORM_FEEDBACK_SPEC_VERSION; + + //=== VK_NVX_binary_import === + VULKAN_HPP_CONSTEXPR_INLINE auto NVXBinaryImportExtensionName = VK_NVX_BINARY_IMPORT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVXBinaryImportSpecVersion = VK_NVX_BINARY_IMPORT_SPEC_VERSION; + + //=== VK_NVX_image_view_handle === + VULKAN_HPP_CONSTEXPR_INLINE auto NVXImageViewHandleExtensionName = VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVXImageViewHandleSpecVersion = VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION; + + //=== VK_AMD_draw_indirect_count === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDrawIndirectCountExtensionName = VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDrawIndirectCountSpecVersion = VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION; + + //=== VK_AMD_negative_viewport_height === + VULKAN_HPP_DEPRECATED( "The VK_AMD_negative_viewport_height extension has been obsoleted by VK_KHR_maintenance1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDNegativeViewportHeightExtensionName = VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_AMD_negative_viewport_height extension has been obsoleted by VK_KHR_maintenance1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDNegativeViewportHeightSpecVersion = VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION; + + //=== VK_AMD_gpu_shader_half_float === + VULKAN_HPP_DEPRECATED( "The VK_AMD_gpu_shader_half_float extension has been deprecated by VK_KHR_shader_float16_int8." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGpuShaderHalfFloatExtensionName = VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_AMD_gpu_shader_half_float extension has been deprecated by VK_KHR_shader_float16_int8." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGpuShaderHalfFloatSpecVersion = VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION; + + //=== VK_AMD_shader_ballot === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderBallotExtensionName = VK_AMD_SHADER_BALLOT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderBallotSpecVersion = VK_AMD_SHADER_BALLOT_SPEC_VERSION; + + //=== VK_KHR_video_encode_h264 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeH264ExtensionName = VK_KHR_VIDEO_ENCODE_H264_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeH264SpecVersion = VK_KHR_VIDEO_ENCODE_H264_SPEC_VERSION; + + //=== VK_KHR_video_encode_h265 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeH265ExtensionName = VK_KHR_VIDEO_ENCODE_H265_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeH265SpecVersion = VK_KHR_VIDEO_ENCODE_H265_SPEC_VERSION; + + //=== VK_KHR_video_decode_h264 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeH264ExtensionName = VK_KHR_VIDEO_DECODE_H264_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeH264SpecVersion = VK_KHR_VIDEO_DECODE_H264_SPEC_VERSION; + + //=== VK_AMD_texture_gather_bias_lod === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDTextureGatherBiasLodExtensionName = VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDTextureGatherBiasLodSpecVersion = VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION; + + //=== VK_AMD_shader_info === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderInfoExtensionName = VK_AMD_SHADER_INFO_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderInfoSpecVersion = VK_AMD_SHADER_INFO_SPEC_VERSION; + + //=== VK_KHR_dynamic_rendering === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDynamicRenderingExtensionName = VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDynamicRenderingSpecVersion = VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION; + + //=== VK_AMD_shader_image_load_store_lod === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderImageLoadStoreLodExtensionName = VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderImageLoadStoreLodSpecVersion = VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto GGPStreamDescriptorSurfaceExtensionName = VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GGPStreamDescriptorSurfaceSpecVersion = VK_GGP_STREAM_DESCRIPTOR_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_corner_sampled_image === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCornerSampledImageExtensionName = VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCornerSampledImageSpecVersion = VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION; + + //=== VK_KHR_multiview === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMultiviewExtensionName = VK_KHR_MULTIVIEW_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMultiviewSpecVersion = VK_KHR_MULTIVIEW_SPEC_VERSION; + + //=== VK_IMG_format_pvrtc === + VULKAN_HPP_DEPRECATED( "The VK_IMG_format_pvrtc extension has been deprecated." ) + VULKAN_HPP_CONSTEXPR_INLINE auto IMGFormatPvrtcExtensionName = VK_IMG_FORMAT_PVRTC_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_IMG_format_pvrtc extension has been deprecated." ) + VULKAN_HPP_CONSTEXPR_INLINE auto IMGFormatPvrtcSpecVersion = VK_IMG_FORMAT_PVRTC_SPEC_VERSION; + + //=== VK_NV_external_memory_capabilities === + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory_capabilities extension has been deprecated by VK_KHR_external_memory_capabilities." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryCapabilitiesExtensionName = VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory_capabilities extension has been deprecated by VK_KHR_external_memory_capabilities." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryCapabilitiesSpecVersion = VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION; + + //=== VK_NV_external_memory === + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory extension has been deprecated by VK_KHR_external_memory." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryExtensionName = VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory extension has been deprecated by VK_KHR_external_memory." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemorySpecVersion = VK_NV_EXTERNAL_MEMORY_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory_win32 extension has been deprecated by VK_KHR_external_memory_win32." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryWin32ExtensionName = VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_external_memory_win32 extension has been deprecated by VK_KHR_external_memory_win32." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryWin32SpecVersion = VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_win32_keyed_mutex === + VULKAN_HPP_CONSTEXPR_INLINE auto NVWin32KeyedMutexExtensionName = VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVWin32KeyedMutexSpecVersion = VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetPhysicalDeviceProperties2ExtensionName = VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetPhysicalDeviceProperties2SpecVersion = VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION; + + //=== VK_KHR_device_group === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupExtensionName = VK_KHR_DEVICE_GROUP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupSpecVersion = VK_KHR_DEVICE_GROUP_SPEC_VERSION; + + //=== VK_EXT_validation_flags === + VULKAN_HPP_DEPRECATED( "The VK_EXT_validation_flags extension has been deprecated by VK_EXT_layer_settings." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationFlagsExtensionName = VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_validation_flags extension has been deprecated by VK_EXT_layer_settings." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationFlagsSpecVersion = VK_EXT_VALIDATION_FLAGS_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto NNViSurfaceExtensionName = VK_NN_VI_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NNViSurfaceSpecVersion = VK_NN_VI_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_shader_draw_parameters === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderDrawParametersExtensionName = VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderDrawParametersSpecVersion = VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION; + + //=== VK_EXT_shader_subgroup_ballot === + VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_subgroup_ballot extension has been deprecated by VK_VERSION_1_2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderSubgroupBallotExtensionName = VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_subgroup_ballot extension has been deprecated by VK_VERSION_1_2." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderSubgroupBallotSpecVersion = VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION; + + //=== VK_EXT_shader_subgroup_vote === + VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_subgroup_vote extension has been deprecated by VK_VERSION_1_1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderSubgroupVoteExtensionName = VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_subgroup_vote extension has been deprecated by VK_VERSION_1_1." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderSubgroupVoteSpecVersion = VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION; + + //=== VK_EXT_texture_compression_astc_hdr === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTextureCompressionAstcHdrExtensionName = VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTextureCompressionAstcHdrSpecVersion = VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION; + + //=== VK_EXT_astc_decode_mode === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAstcDecodeModeExtensionName = VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAstcDecodeModeSpecVersion = VK_EXT_ASTC_DECODE_MODE_SPEC_VERSION; + + //=== VK_EXT_pipeline_robustness === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineRobustnessExtensionName = VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineRobustnessSpecVersion = VK_EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION; + + //=== VK_KHR_maintenance1 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance1ExtensionName = VK_KHR_MAINTENANCE_1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance1SpecVersion = VK_KHR_MAINTENANCE_1_SPEC_VERSION; + + //=== VK_KHR_device_group_creation === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupCreationExtensionName = VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupCreationSpecVersion = VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION; + + //=== VK_KHR_external_memory_capabilities === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryCapabilitiesExtensionName = VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryCapabilitiesSpecVersion = VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION; + + //=== VK_KHR_external_memory === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryExtensionName = VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemorySpecVersion = VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryWin32ExtensionName = VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryWin32SpecVersion = VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryFdExtensionName = VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryFdSpecVersion = VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_keyed_mutex === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWin32KeyedMutexExtensionName = VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWin32KeyedMutexSpecVersion = VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_capabilities === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreCapabilitiesExtensionName = VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreCapabilitiesSpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION; + + //=== VK_KHR_external_semaphore === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreExtensionName = VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreSpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreWin32ExtensionName = VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreWin32SpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreFdExtensionName = VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreFdSpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION; + + //=== VK_KHR_push_descriptor === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPushDescriptorExtensionName = VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPushDescriptorSpecVersion = VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION; + + //=== VK_EXT_conditional_rendering === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTConditionalRenderingExtensionName = VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTConditionalRenderingSpecVersion = VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION; + + //=== VK_KHR_shader_float16_int8 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloat16Int8ExtensionName = VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloat16Int8SpecVersion = VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION; + + //=== VK_KHR_16bit_storage === + VULKAN_HPP_CONSTEXPR_INLINE auto KHR16BitStorageExtensionName = VK_KHR_16BIT_STORAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHR16BitStorageSpecVersion = VK_KHR_16BIT_STORAGE_SPEC_VERSION; + + //=== VK_KHR_incremental_present === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRIncrementalPresentExtensionName = VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRIncrementalPresentSpecVersion = VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION; + + //=== VK_KHR_descriptor_update_template === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDescriptorUpdateTemplateExtensionName = VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDescriptorUpdateTemplateSpecVersion = VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION; + + //=== VK_NV_clip_space_w_scaling === + VULKAN_HPP_CONSTEXPR_INLINE auto NVClipSpaceWScalingExtensionName = VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVClipSpaceWScalingSpecVersion = VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION; + + //=== VK_EXT_direct_mode_display === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDirectModeDisplayExtensionName = VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDirectModeDisplaySpecVersion = VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAcquireXlibDisplayExtensionName = VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAcquireXlibDisplaySpecVersion = VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDisplaySurfaceCounterExtensionName = VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDisplaySurfaceCounterSpecVersion = VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION; + + //=== VK_EXT_display_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDisplayControlExtensionName = VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDisplayControlSpecVersion = VK_EXT_DISPLAY_CONTROL_SPEC_VERSION; + + //=== VK_GOOGLE_display_timing === + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEDisplayTimingExtensionName = VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEDisplayTimingSpecVersion = VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION; + + //=== VK_NV_sample_mask_override_coverage === + VULKAN_HPP_CONSTEXPR_INLINE auto NVSampleMaskOverrideCoverageExtensionName = VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVSampleMaskOverrideCoverageSpecVersion = VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION; + + //=== VK_NV_geometry_shader_passthrough === + VULKAN_HPP_CONSTEXPR_INLINE auto NVGeometryShaderPassthroughExtensionName = VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVGeometryShaderPassthroughSpecVersion = VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION; + + //=== VK_NV_viewport_array2 === + VULKAN_HPP_CONSTEXPR_INLINE auto NVViewportArray2ExtensionName = VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVViewportArray2SpecVersion = VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION; + + //=== VK_NVX_multiview_per_view_attributes === + VULKAN_HPP_CONSTEXPR_INLINE auto NVXMultiviewPerViewAttributesExtensionName = VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVXMultiviewPerViewAttributesSpecVersion = VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION; + + //=== VK_NV_viewport_swizzle === + VULKAN_HPP_CONSTEXPR_INLINE auto NVViewportSwizzleExtensionName = VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVViewportSwizzleSpecVersion = VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION; + + //=== VK_EXT_discard_rectangles === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDiscardRectanglesExtensionName = VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDiscardRectanglesSpecVersion = VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION; + + //=== VK_EXT_conservative_rasterization === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTConservativeRasterizationExtensionName = VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTConservativeRasterizationSpecVersion = VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION; + + //=== VK_EXT_depth_clip_enable === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClipEnableExtensionName = VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClipEnableSpecVersion = VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION; + + //=== VK_EXT_swapchain_colorspace === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSwapchainColorSpaceExtensionName = VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSwapchainColorSpaceSpecVersion = VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION; + + //=== VK_EXT_hdr_metadata === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHdrMetadataExtensionName = VK_EXT_HDR_METADATA_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHdrMetadataSpecVersion = VK_EXT_HDR_METADATA_SPEC_VERSION; + + //=== VK_KHR_imageless_framebuffer === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRImagelessFramebufferExtensionName = VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRImagelessFramebufferSpecVersion = VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION; + + //=== VK_KHR_create_renderpass2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCreateRenderpass2ExtensionName = VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCreateRenderpass2SpecVersion = VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION; + + //=== VK_IMG_relaxed_line_rasterization === + VULKAN_HPP_CONSTEXPR_INLINE auto IMGRelaxedLineRasterizationExtensionName = VK_IMG_RELAXED_LINE_RASTERIZATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto IMGRelaxedLineRasterizationSpecVersion = VK_IMG_RELAXED_LINE_RASTERIZATION_SPEC_VERSION; + + //=== VK_KHR_shared_presentable_image === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSharedPresentableImageExtensionName = VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSharedPresentableImageSpecVersion = VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION; + + //=== VK_KHR_external_fence_capabilities === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceCapabilitiesExtensionName = VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceCapabilitiesSpecVersion = VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION; + + //=== VK_KHR_external_fence === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceExtensionName = VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceSpecVersion = VK_KHR_EXTERNAL_FENCE_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceWin32ExtensionName = VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceWin32SpecVersion = VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceFdExtensionName = VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceFdSpecVersion = VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION; + + //=== VK_KHR_performance_query === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPerformanceQueryExtensionName = VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPerformanceQuerySpecVersion = VK_KHR_PERFORMANCE_QUERY_SPEC_VERSION; + + //=== VK_KHR_maintenance2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance2ExtensionName = VK_KHR_MAINTENANCE_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance2SpecVersion = VK_KHR_MAINTENANCE_2_SPEC_VERSION; + + //=== VK_KHR_get_surface_capabilities2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetSurfaceCapabilities2ExtensionName = VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetSurfaceCapabilities2SpecVersion = VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION; + + //=== VK_KHR_variable_pointers === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVariablePointersExtensionName = VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVariablePointersSpecVersion = VK_KHR_VARIABLE_POINTERS_SPEC_VERSION; + + //=== VK_KHR_get_display_properties2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetDisplayProperties2ExtensionName = VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetDisplayProperties2SpecVersion = VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + VULKAN_HPP_DEPRECATED( "The VK_MVK_ios_surface extension has been deprecated by VK_EXT_metal_surface." ) + VULKAN_HPP_CONSTEXPR_INLINE auto MVKIosSurfaceExtensionName = VK_MVK_IOS_SURFACE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_MVK_ios_surface extension has been deprecated by VK_EXT_metal_surface." ) + VULKAN_HPP_CONSTEXPR_INLINE auto MVKIosSurfaceSpecVersion = VK_MVK_IOS_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + VULKAN_HPP_DEPRECATED( "The VK_MVK_macos_surface extension has been deprecated by VK_EXT_metal_surface." ) + VULKAN_HPP_CONSTEXPR_INLINE auto MVKMacosSurfaceExtensionName = VK_MVK_MACOS_SURFACE_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_MVK_macos_surface extension has been deprecated by VK_EXT_metal_surface." ) + VULKAN_HPP_CONSTEXPR_INLINE auto MVKMacosSurfaceSpecVersion = VK_MVK_MACOS_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_external_memory_dma_buf === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryDmaBufExtensionName = VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryDmaBufSpecVersion = VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION; + + //=== VK_EXT_queue_family_foreign === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTQueueFamilyForeignExtensionName = VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTQueueFamilyForeignSpecVersion = VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION; + + //=== VK_KHR_dedicated_allocation === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDedicatedAllocationExtensionName = VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDedicatedAllocationSpecVersion = VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION; + + //=== VK_EXT_debug_utils === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugUtilsExtensionName = VK_EXT_DEBUG_UTILS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugUtilsSpecVersion = VK_EXT_DEBUG_UTILS_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + VULKAN_HPP_CONSTEXPR_INLINE auto ANDROIDExternalMemoryAndroidHardwareBufferExtensionName = VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ANDROIDExternalMemoryAndroidHardwareBufferSpecVersion = VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_EXT_sampler_filter_minmax === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSamplerFilterMinmaxExtensionName = VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSamplerFilterMinmaxSpecVersion = VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION; + + //=== VK_KHR_storage_buffer_storage_class === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRStorageBufferStorageClassExtensionName = VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRStorageBufferStorageClassSpecVersion = VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION; + + //=== VK_AMD_gpu_shader_int16 === + VULKAN_HPP_DEPRECATED( "The VK_AMD_gpu_shader_int16 extension has been deprecated by VK_KHR_shader_float16_int8." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGpuShaderInt16ExtensionName = VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_AMD_gpu_shader_int16 extension has been deprecated by VK_KHR_shader_float16_int8." ) + VULKAN_HPP_CONSTEXPR_INLINE auto AMDGpuShaderInt16SpecVersion = VK_AMD_GPU_SHADER_INT16_SPEC_VERSION; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDXShaderEnqueueExtensionName = VK_AMDX_SHADER_ENQUEUE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDXShaderEnqueueSpecVersion = VK_AMDX_SHADER_ENQUEUE_SPEC_VERSION; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_mixed_attachment_samples === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDMixedAttachmentSamplesExtensionName = VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDMixedAttachmentSamplesSpecVersion = VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION; + + //=== VK_AMD_shader_fragment_mask === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderFragmentMaskExtensionName = VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderFragmentMaskSpecVersion = VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION; + + //=== VK_EXT_inline_uniform_block === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTInlineUniformBlockExtensionName = VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTInlineUniformBlockSpecVersion = VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION; + + //=== VK_EXT_shader_stencil_export === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderStencilExportExtensionName = VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderStencilExportSpecVersion = VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION; + + //=== VK_EXT_sample_locations === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSampleLocationsExtensionName = VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSampleLocationsSpecVersion = VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION; + + //=== VK_KHR_relaxed_block_layout === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRelaxedBlockLayoutExtensionName = VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRelaxedBlockLayoutSpecVersion = VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION; + + //=== VK_KHR_get_memory_requirements2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetMemoryRequirements2ExtensionName = VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetMemoryRequirements2SpecVersion = VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION; + + //=== VK_KHR_image_format_list === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRImageFormatListExtensionName = VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRImageFormatListSpecVersion = VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION; + + //=== VK_EXT_blend_operation_advanced === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBlendOperationAdvancedExtensionName = VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBlendOperationAdvancedSpecVersion = VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION; + + //=== VK_NV_fragment_coverage_to_color === + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentCoverageToColorExtensionName = VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentCoverageToColorSpecVersion = VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION; + + //=== VK_KHR_acceleration_structure === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRAccelerationStructureExtensionName = VK_KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRAccelerationStructureSpecVersion = VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION; + + //=== VK_KHR_ray_tracing_pipeline === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPipelineExtensionName = VK_KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPipelineSpecVersion = VK_KHR_RAY_TRACING_PIPELINE_SPEC_VERSION; + + //=== VK_KHR_ray_query === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayQueryExtensionName = VK_KHR_RAY_QUERY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayQuerySpecVersion = VK_KHR_RAY_QUERY_SPEC_VERSION; + + //=== VK_NV_framebuffer_mixed_samples === + VULKAN_HPP_CONSTEXPR_INLINE auto NVFramebufferMixedSamplesExtensionName = VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVFramebufferMixedSamplesSpecVersion = VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION; + + //=== VK_NV_fill_rectangle === + VULKAN_HPP_CONSTEXPR_INLINE auto NVFillRectangleExtensionName = VK_NV_FILL_RECTANGLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVFillRectangleSpecVersion = VK_NV_FILL_RECTANGLE_SPEC_VERSION; + + //=== VK_NV_shader_sm_builtins === + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderSmBuiltinsExtensionName = VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderSmBuiltinsSpecVersion = VK_NV_SHADER_SM_BUILTINS_SPEC_VERSION; + + //=== VK_EXT_post_depth_coverage === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPostDepthCoverageExtensionName = VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPostDepthCoverageSpecVersion = VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION; + + //=== VK_KHR_sampler_ycbcr_conversion === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerYcbcrConversionExtensionName = VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerYcbcrConversionSpecVersion = VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION; + + //=== VK_KHR_bind_memory2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRBindMemory2ExtensionName = VK_KHR_BIND_MEMORY_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRBindMemory2SpecVersion = VK_KHR_BIND_MEMORY_2_SPEC_VERSION; + + //=== VK_EXT_image_drm_format_modifier === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageDrmFormatModifierExtensionName = VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageDrmFormatModifierSpecVersion = VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION; + + //=== VK_EXT_validation_cache === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationCacheExtensionName = VK_EXT_VALIDATION_CACHE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationCacheSpecVersion = VK_EXT_VALIDATION_CACHE_SPEC_VERSION; + + //=== VK_EXT_descriptor_indexing === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorIndexingExtensionName = VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorIndexingSpecVersion = VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION; + + //=== VK_EXT_shader_viewport_index_layer === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderViewportIndexLayerExtensionName = VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderViewportIndexLayerSpecVersion = VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_portability_subset === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPortabilitySubsetExtensionName = VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPortabilitySubsetSpecVersion = VK_KHR_PORTABILITY_SUBSET_SPEC_VERSION; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_shading_rate_image === + VULKAN_HPP_CONSTEXPR_INLINE auto NVShadingRateImageExtensionName = VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVShadingRateImageSpecVersion = VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION; + + //=== VK_NV_ray_tracing === + VULKAN_HPP_DEPRECATED( "The VK_NV_ray_tracing extension has been deprecated by VK_KHR_ray_tracing_pipeline." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingExtensionName = VK_NV_RAY_TRACING_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_NV_ray_tracing extension has been deprecated by VK_KHR_ray_tracing_pipeline." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingSpecVersion = VK_NV_RAY_TRACING_SPEC_VERSION; + + //=== VK_NV_representative_fragment_test === + VULKAN_HPP_CONSTEXPR_INLINE auto NVRepresentativeFragmentTestExtensionName = VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVRepresentativeFragmentTestSpecVersion = VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION; + + //=== VK_KHR_maintenance3 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance3ExtensionName = VK_KHR_MAINTENANCE_3_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance3SpecVersion = VK_KHR_MAINTENANCE_3_SPEC_VERSION; + + //=== VK_KHR_draw_indirect_count === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDrawIndirectCountExtensionName = VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDrawIndirectCountSpecVersion = VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION; + + //=== VK_EXT_filter_cubic === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFilterCubicExtensionName = VK_EXT_FILTER_CUBIC_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFilterCubicSpecVersion = VK_EXT_FILTER_CUBIC_SPEC_VERSION; + + //=== VK_QCOM_render_pass_shader_resolve === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassShaderResolveExtensionName = VK_QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassShaderResolveSpecVersion = VK_QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION; + + //=== VK_EXT_global_priority === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityExtensionName = VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPrioritySpecVersion = VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION; + + //=== VK_KHR_shader_subgroup_extended_types === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupExtendedTypesExtensionName = VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupExtendedTypesSpecVersion = VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION; + + //=== VK_KHR_8bit_storage === + VULKAN_HPP_CONSTEXPR_INLINE auto KHR8BitStorageExtensionName = VK_KHR_8BIT_STORAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHR8BitStorageSpecVersion = VK_KHR_8BIT_STORAGE_SPEC_VERSION; + + //=== VK_EXT_external_memory_host === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryHostExtensionName = VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryHostSpecVersion = VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION; + + //=== VK_AMD_buffer_marker === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDBufferMarkerExtensionName = VK_AMD_BUFFER_MARKER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDBufferMarkerSpecVersion = VK_AMD_BUFFER_MARKER_SPEC_VERSION; + + //=== VK_KHR_shader_atomic_int64 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderAtomicInt64ExtensionName = VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderAtomicInt64SpecVersion = VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION; + + //=== VK_KHR_shader_clock === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderClockExtensionName = VK_KHR_SHADER_CLOCK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderClockSpecVersion = VK_KHR_SHADER_CLOCK_SPEC_VERSION; + + //=== VK_AMD_pipeline_compiler_control === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDPipelineCompilerControlExtensionName = VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDPipelineCompilerControlSpecVersion = VK_AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION; + + //=== VK_EXT_calibrated_timestamps === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTCalibratedTimestampsExtensionName = VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTCalibratedTimestampsSpecVersion = VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION; + + //=== VK_AMD_shader_core_properties === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderCorePropertiesExtensionName = VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderCorePropertiesSpecVersion = VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION; + + //=== VK_KHR_video_decode_h265 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeH265ExtensionName = VK_KHR_VIDEO_DECODE_H265_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeH265SpecVersion = VK_KHR_VIDEO_DECODE_H265_SPEC_VERSION; + + //=== VK_KHR_global_priority === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGlobalPriorityExtensionName = VK_KHR_GLOBAL_PRIORITY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGlobalPrioritySpecVersion = VK_KHR_GLOBAL_PRIORITY_SPEC_VERSION; + + //=== VK_AMD_memory_overallocation_behavior === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDMemoryOverallocationBehaviorExtensionName = VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDMemoryOverallocationBehaviorSpecVersion = VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION; + + //=== VK_EXT_vertex_attribute_divisor === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeDivisorExtensionName = VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeDivisorSpecVersion = VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_frame_token === + VULKAN_HPP_CONSTEXPR_INLINE auto GGPFrameTokenExtensionName = VK_GGP_FRAME_TOKEN_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GGPFrameTokenSpecVersion = VK_GGP_FRAME_TOKEN_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_EXT_pipeline_creation_feedback === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationFeedbackExtensionName = VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationFeedbackSpecVersion = VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION; + + //=== VK_KHR_driver_properties === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDriverPropertiesExtensionName = VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDriverPropertiesSpecVersion = VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION; + + //=== VK_KHR_shader_float_controls === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControlsExtensionName = VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControlsSpecVersion = VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION; + + //=== VK_NV_shader_subgroup_partitioned === + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderSubgroupPartitionedExtensionName = VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderSubgroupPartitionedSpecVersion = VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION; + + //=== VK_KHR_depth_stencil_resolve === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDepthStencilResolveExtensionName = VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDepthStencilResolveSpecVersion = VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION; + + //=== VK_KHR_swapchain_mutable_format === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSwapchainMutableFormatExtensionName = VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSwapchainMutableFormatSpecVersion = VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION; + + //=== VK_NV_compute_shader_derivatives === + VULKAN_HPP_CONSTEXPR_INLINE auto NVComputeShaderDerivativesExtensionName = VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVComputeShaderDerivativesSpecVersion = VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION; + + //=== VK_NV_mesh_shader === + VULKAN_HPP_CONSTEXPR_INLINE auto NVMeshShaderExtensionName = VK_NV_MESH_SHADER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVMeshShaderSpecVersion = VK_NV_MESH_SHADER_SPEC_VERSION; + + //=== VK_NV_fragment_shader_barycentric === + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShaderBarycentricExtensionName = VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShaderBarycentricSpecVersion = VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION; + + //=== VK_NV_shader_image_footprint === + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderImageFootprintExtensionName = VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderImageFootprintSpecVersion = VK_NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION; + + //=== VK_NV_scissor_exclusive === + VULKAN_HPP_CONSTEXPR_INLINE auto NVScissorExclusiveExtensionName = VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVScissorExclusiveSpecVersion = VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION; + + //=== VK_NV_device_diagnostic_checkpoints === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceDiagnosticCheckpointsExtensionName = VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceDiagnosticCheckpointsSpecVersion = VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION; + + //=== VK_KHR_timeline_semaphore === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRTimelineSemaphoreExtensionName = VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRTimelineSemaphoreSpecVersion = VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION; + + //=== VK_INTEL_shader_integer_functions2 === + VULKAN_HPP_CONSTEXPR_INLINE auto INTELShaderIntegerFunctions2ExtensionName = VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto INTELShaderIntegerFunctions2SpecVersion = VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_SPEC_VERSION; + + //=== VK_INTEL_performance_query === + VULKAN_HPP_CONSTEXPR_INLINE auto INTELPerformanceQueryExtensionName = VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto INTELPerformanceQuerySpecVersion = VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION; + + //=== VK_KHR_vulkan_memory_model === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVulkanMemoryModelExtensionName = VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVulkanMemoryModelSpecVersion = VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION; + + //=== VK_EXT_pci_bus_info === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPciBusInfoExtensionName = VK_EXT_PCI_BUS_INFO_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPciBusInfoSpecVersion = VK_EXT_PCI_BUS_INFO_SPEC_VERSION; + + //=== VK_AMD_display_native_hdr === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDisplayNativeHdrExtensionName = VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDisplayNativeHdrSpecVersion = VK_AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAImagepipeSurfaceExtensionName = VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAImagepipeSurfaceSpecVersion = VK_FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_KHR_shader_terminate_invocation === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderTerminateInvocationExtensionName = VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderTerminateInvocationSpecVersion = VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMetalSurfaceExtensionName = VK_EXT_METAL_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMetalSurfaceSpecVersion = VK_EXT_METAL_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentDensityMapExtensionName = VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentDensityMapSpecVersion = VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION; + + //=== VK_EXT_scalar_block_layout === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTScalarBlockLayoutExtensionName = VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTScalarBlockLayoutSpecVersion = VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION; + + //=== VK_GOOGLE_hlsl_functionality1 === + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEHlslFunctionality1ExtensionName = VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEHlslFunctionality1SpecVersion = VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION; + + //=== VK_GOOGLE_decorate_string === + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEDecorateStringExtensionName = VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEDecorateStringSpecVersion = VK_GOOGLE_DECORATE_STRING_SPEC_VERSION; + + //=== VK_EXT_subgroup_size_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubgroupSizeControlExtensionName = VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubgroupSizeControlSpecVersion = VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION; + + //=== VK_KHR_fragment_shading_rate === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFragmentShadingRateExtensionName = VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFragmentShadingRateSpecVersion = VK_KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION; + + //=== VK_AMD_shader_core_properties2 === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderCoreProperties2ExtensionName = VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderCoreProperties2SpecVersion = VK_AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION; + + //=== VK_AMD_device_coherent_memory === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDeviceCoherentMemoryExtensionName = VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDeviceCoherentMemorySpecVersion = VK_AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION; + + //=== VK_KHR_dynamic_rendering_local_read === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDynamicRenderingLocalReadExtensionName = VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDynamicRenderingLocalReadSpecVersion = VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_SPEC_VERSION; + + //=== VK_EXT_shader_image_atomic_int64 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderImageAtomicInt64ExtensionName = VK_EXT_SHADER_IMAGE_ATOMIC_INT64_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderImageAtomicInt64SpecVersion = VK_EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION; + + //=== VK_KHR_shader_quad_control === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderQuadControlExtensionName = VK_KHR_SHADER_QUAD_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderQuadControlSpecVersion = VK_KHR_SHADER_QUAD_CONTROL_SPEC_VERSION; + + //=== VK_KHR_spirv_1_4 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSpirv14ExtensionName = VK_KHR_SPIRV_1_4_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSpirv14SpecVersion = VK_KHR_SPIRV_1_4_SPEC_VERSION; + + //=== VK_EXT_memory_budget === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMemoryBudgetExtensionName = VK_EXT_MEMORY_BUDGET_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMemoryBudgetSpecVersion = VK_EXT_MEMORY_BUDGET_SPEC_VERSION; + + //=== VK_EXT_memory_priority === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMemoryPriorityExtensionName = VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMemoryPrioritySpecVersion = VK_EXT_MEMORY_PRIORITY_SPEC_VERSION; + + //=== VK_KHR_surface_protected_capabilities === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSurfaceProtectedCapabilitiesExtensionName = VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSurfaceProtectedCapabilitiesSpecVersion = VK_KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION; + + //=== VK_NV_dedicated_allocation_image_aliasing === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDedicatedAllocationImageAliasingExtensionName = VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDedicatedAllocationImageAliasingSpecVersion = VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION; + + //=== VK_KHR_separate_depth_stencil_layouts === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSeparateDepthStencilLayoutsExtensionName = VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSeparateDepthStencilLayoutsSpecVersion = VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION; + + //=== VK_EXT_buffer_device_address === + VULKAN_HPP_DEPRECATED( "The VK_EXT_buffer_device_address extension has been deprecated by VK_KHR_buffer_device_address." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBufferDeviceAddressExtensionName = VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_buffer_device_address extension has been deprecated by VK_KHR_buffer_device_address." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBufferDeviceAddressSpecVersion = VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION; + + //=== VK_EXT_tooling_info === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTToolingInfoExtensionName = VK_EXT_TOOLING_INFO_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTToolingInfoSpecVersion = VK_EXT_TOOLING_INFO_SPEC_VERSION; + + //=== VK_EXT_separate_stencil_usage === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSeparateStencilUsageExtensionName = VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSeparateStencilUsageSpecVersion = VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION; + + //=== VK_EXT_validation_features === + VULKAN_HPP_DEPRECATED( "The VK_EXT_validation_features extension has been deprecated by VK_EXT_layer_settings." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationFeaturesExtensionName = VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME; + VULKAN_HPP_DEPRECATED( "The VK_EXT_validation_features extension has been deprecated by VK_EXT_layer_settings." ) + VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationFeaturesSpecVersion = VK_EXT_VALIDATION_FEATURES_SPEC_VERSION; + + //=== VK_KHR_present_wait === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentWaitExtensionName = VK_KHR_PRESENT_WAIT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentWaitSpecVersion = VK_KHR_PRESENT_WAIT_SPEC_VERSION; + + //=== VK_NV_cooperative_matrix === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrixExtensionName = VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrixSpecVersion = VK_NV_COOPERATIVE_MATRIX_SPEC_VERSION; + + //=== VK_NV_coverage_reduction_mode === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCoverageReductionModeExtensionName = VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCoverageReductionModeSpecVersion = VK_NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION; + + //=== VK_EXT_fragment_shader_interlock === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentShaderInterlockExtensionName = VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentShaderInterlockSpecVersion = VK_EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION; + + //=== VK_EXT_ycbcr_image_arrays === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcrImageArraysExtensionName = VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcrImageArraysSpecVersion = VK_EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION; + + //=== VK_KHR_uniform_buffer_standard_layout === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRUniformBufferStandardLayoutExtensionName = VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRUniformBufferStandardLayoutSpecVersion = VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION; + + //=== VK_EXT_provoking_vertex === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTProvokingVertexExtensionName = VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTProvokingVertexSpecVersion = VK_EXT_PROVOKING_VERTEX_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFullScreenExclusiveExtensionName = VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFullScreenExclusiveSpecVersion = VK_EXT_FULL_SCREEN_EXCLUSIVE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHeadlessSurfaceExtensionName = VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHeadlessSurfaceSpecVersion = VK_EXT_HEADLESS_SURFACE_SPEC_VERSION; + + //=== VK_KHR_buffer_device_address === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRBufferDeviceAddressExtensionName = VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRBufferDeviceAddressSpecVersion = VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION; + + //=== VK_EXT_line_rasterization === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLineRasterizationExtensionName = VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLineRasterizationSpecVersion = VK_EXT_LINE_RASTERIZATION_SPEC_VERSION; + + //=== VK_EXT_shader_atomic_float === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloatExtensionName = VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloatSpecVersion = VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION; + + //=== VK_EXT_host_query_reset === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostQueryResetExtensionName = VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostQueryResetSpecVersion = VK_EXT_HOST_QUERY_RESET_SPEC_VERSION; + + //=== VK_EXT_index_type_uint8 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTIndexTypeUint8ExtensionName = VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTIndexTypeUint8SpecVersion = VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION; + + //=== VK_EXT_extended_dynamic_state === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicStateExtensionName = VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicStateSpecVersion = VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION; + + //=== VK_KHR_deferred_host_operations === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeferredHostOperationsExtensionName = VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeferredHostOperationsSpecVersion = VK_KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION; + + //=== VK_KHR_pipeline_executable_properties === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineExecutablePropertiesExtensionName = VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineExecutablePropertiesSpecVersion = VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION; + + //=== VK_EXT_host_image_copy === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostImageCopyExtensionName = VK_EXT_HOST_IMAGE_COPY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostImageCopySpecVersion = VK_EXT_HOST_IMAGE_COPY_SPEC_VERSION; + + //=== VK_KHR_map_memory2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMapMemory2ExtensionName = VK_KHR_MAP_MEMORY_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMapMemory2SpecVersion = VK_KHR_MAP_MEMORY_2_SPEC_VERSION; + + //=== VK_EXT_map_memory_placed === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMapMemoryPlacedExtensionName = VK_EXT_MAP_MEMORY_PLACED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMapMemoryPlacedSpecVersion = VK_EXT_MAP_MEMORY_PLACED_SPEC_VERSION; + + //=== VK_EXT_shader_atomic_float2 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloat2ExtensionName = VK_EXT_SHADER_ATOMIC_FLOAT_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloat2SpecVersion = VK_EXT_SHADER_ATOMIC_FLOAT_2_SPEC_VERSION; + + //=== VK_EXT_surface_maintenance1 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSurfaceMaintenance1ExtensionName = VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSurfaceMaintenance1SpecVersion = VK_EXT_SURFACE_MAINTENANCE_1_SPEC_VERSION; + + //=== VK_EXT_swapchain_maintenance1 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSwapchainMaintenance1ExtensionName = VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSwapchainMaintenance1SpecVersion = VK_EXT_SWAPCHAIN_MAINTENANCE_1_SPEC_VERSION; + + //=== VK_EXT_shader_demote_to_helper_invocation === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderDemoteToHelperInvocationExtensionName = VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderDemoteToHelperInvocationSpecVersion = VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION; + + //=== VK_NV_device_generated_commands === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceGeneratedCommandsExtensionName = VK_NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceGeneratedCommandsSpecVersion = VK_NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION; + + //=== VK_NV_inherited_viewport_scissor === + VULKAN_HPP_CONSTEXPR_INLINE auto NVInheritedViewportScissorExtensionName = VK_NV_INHERITED_VIEWPORT_SCISSOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVInheritedViewportScissorSpecVersion = VK_NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION; + + //=== VK_KHR_shader_integer_dot_product === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderIntegerDotProductExtensionName = VK_KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderIntegerDotProductSpecVersion = VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION; + + //=== VK_EXT_texel_buffer_alignment === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTexelBufferAlignmentExtensionName = VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTexelBufferAlignmentSpecVersion = VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION; + + //=== VK_QCOM_render_pass_transform === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassTransformExtensionName = VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassTransformSpecVersion = VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION; + + //=== VK_EXT_depth_bias_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthBiasControlExtensionName = VK_EXT_DEPTH_BIAS_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthBiasControlSpecVersion = VK_EXT_DEPTH_BIAS_CONTROL_SPEC_VERSION; + + //=== VK_EXT_device_memory_report === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceMemoryReportExtensionName = VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceMemoryReportSpecVersion = VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION; + + //=== VK_EXT_acquire_drm_display === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAcquireDrmDisplayExtensionName = VK_EXT_ACQUIRE_DRM_DISPLAY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAcquireDrmDisplaySpecVersion = VK_EXT_ACQUIRE_DRM_DISPLAY_SPEC_VERSION; + + //=== VK_EXT_robustness2 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRobustness2ExtensionName = VK_EXT_ROBUSTNESS_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRobustness2SpecVersion = VK_EXT_ROBUSTNESS_2_SPEC_VERSION; + + //=== VK_EXT_custom_border_color === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTCustomBorderColorExtensionName = VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTCustomBorderColorSpecVersion = VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION; + + //=== VK_GOOGLE_user_type === + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEUserTypeExtensionName = VK_GOOGLE_USER_TYPE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEUserTypeSpecVersion = VK_GOOGLE_USER_TYPE_SPEC_VERSION; + + //=== VK_KHR_pipeline_library === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineLibraryExtensionName = VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineLibrarySpecVersion = VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION; + + //=== VK_NV_present_barrier === + VULKAN_HPP_CONSTEXPR_INLINE auto NVPresentBarrierExtensionName = VK_NV_PRESENT_BARRIER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVPresentBarrierSpecVersion = VK_NV_PRESENT_BARRIER_SPEC_VERSION; + + //=== VK_KHR_shader_non_semantic_info === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderNonSemanticInfoExtensionName = VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderNonSemanticInfoSpecVersion = VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION; + + //=== VK_KHR_present_id === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentIdExtensionName = VK_KHR_PRESENT_ID_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentIdSpecVersion = VK_KHR_PRESENT_ID_SPEC_VERSION; + + //=== VK_EXT_private_data === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrivateDataExtensionName = VK_EXT_PRIVATE_DATA_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrivateDataSpecVersion = VK_EXT_PRIVATE_DATA_SPEC_VERSION; + + //=== VK_EXT_pipeline_creation_cache_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationCacheControlExtensionName = VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationCacheControlSpecVersion = VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION; + + //=== VK_KHR_video_encode_queue === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeQueueExtensionName = VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeQueueSpecVersion = VK_KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION; + + //=== VK_NV_device_diagnostics_config === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceDiagnosticsConfigExtensionName = VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceDiagnosticsConfigSpecVersion = VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION; + + //=== VK_QCOM_render_pass_store_ops === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassStoreOpsExtensionName = VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassStoreOpsSpecVersion = VK_QCOM_RENDER_PASS_STORE_OPS_SPEC_VERSION; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCudaKernelLaunchExtensionName = VK_NV_CUDA_KERNEL_LAUNCH_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCudaKernelLaunchSpecVersion = VK_NV_CUDA_KERNEL_LAUNCH_SPEC_VERSION; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_low_latency === + VULKAN_HPP_CONSTEXPR_INLINE auto NVLowLatencyExtensionName = VK_NV_LOW_LATENCY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVLowLatencySpecVersion = VK_NV_LOW_LATENCY_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMetalObjectsExtensionName = VK_EXT_METAL_OBJECTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMetalObjectsSpecVersion = VK_EXT_METAL_OBJECTS_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSynchronization2ExtensionName = VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSynchronization2SpecVersion = VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION; + + //=== VK_EXT_descriptor_buffer === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorBufferExtensionName = VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorBufferSpecVersion = VK_EXT_DESCRIPTOR_BUFFER_SPEC_VERSION; + + //=== VK_EXT_graphics_pipeline_library === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGraphicsPipelineLibraryExtensionName = VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGraphicsPipelineLibrarySpecVersion = VK_EXT_GRAPHICS_PIPELINE_LIBRARY_SPEC_VERSION; + + //=== VK_AMD_shader_early_and_late_fragment_tests === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderEarlyAndLateFragmentTestsExtensionName = VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderEarlyAndLateFragmentTestsSpecVersion = VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_SPEC_VERSION; + + //=== VK_KHR_fragment_shader_barycentric === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFragmentShaderBarycentricExtensionName = VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFragmentShaderBarycentricSpecVersion = VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION; + + //=== VK_KHR_shader_subgroup_uniform_control_flow === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupUniformControlFlowExtensionName = VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupUniformControlFlowSpecVersion = VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION; + + //=== VK_KHR_zero_initialize_workgroup_memory === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRZeroInitializeWorkgroupMemoryExtensionName = VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRZeroInitializeWorkgroupMemorySpecVersion = VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION; + + //=== VK_NV_fragment_shading_rate_enums === + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShadingRateEnumsExtensionName = VK_NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShadingRateEnumsSpecVersion = VK_NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION; + + //=== VK_NV_ray_tracing_motion_blur === + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingMotionBlurExtensionName = VK_NV_RAY_TRACING_MOTION_BLUR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingMotionBlurSpecVersion = VK_NV_RAY_TRACING_MOTION_BLUR_SPEC_VERSION; + + //=== VK_EXT_mesh_shader === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMeshShaderExtensionName = VK_EXT_MESH_SHADER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMeshShaderSpecVersion = VK_EXT_MESH_SHADER_SPEC_VERSION; + + //=== VK_EXT_ycbcr_2plane_444_formats === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcr2Plane444FormatsExtensionName = VK_EXT_YCBCR_2PLANE_444_FORMATS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcr2Plane444FormatsSpecVersion = VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION; + + //=== VK_EXT_fragment_density_map2 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentDensityMap2ExtensionName = VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentDensityMap2SpecVersion = VK_EXT_FRAGMENT_DENSITY_MAP_2_SPEC_VERSION; + + //=== VK_QCOM_rotated_copy_commands === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRotatedCopyCommandsExtensionName = VK_QCOM_ROTATED_COPY_COMMANDS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRotatedCopyCommandsSpecVersion = VK_QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION; + + //=== VK_EXT_image_robustness === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageRobustnessExtensionName = VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageRobustnessSpecVersion = VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION; + + //=== VK_KHR_workgroup_memory_explicit_layout === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWorkgroupMemoryExplicitLayoutExtensionName = VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRWorkgroupMemoryExplicitLayoutSpecVersion = VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION; + + //=== VK_KHR_copy_commands2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCopyCommands2ExtensionName = VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCopyCommands2SpecVersion = VK_KHR_COPY_COMMANDS_2_SPEC_VERSION; + + //=== VK_EXT_image_compression_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageCompressionControlExtensionName = VK_EXT_IMAGE_COMPRESSION_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageCompressionControlSpecVersion = VK_EXT_IMAGE_COMPRESSION_CONTROL_SPEC_VERSION; + + //=== VK_EXT_attachment_feedback_loop_layout === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAttachmentFeedbackLoopLayoutExtensionName = VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAttachmentFeedbackLoopLayoutSpecVersion = VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_SPEC_VERSION; + + //=== VK_EXT_4444_formats === + VULKAN_HPP_CONSTEXPR_INLINE auto EXT4444FormatsExtensionName = VK_EXT_4444_FORMATS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXT4444FormatsSpecVersion = VK_EXT_4444_FORMATS_SPEC_VERSION; + + //=== VK_EXT_device_fault === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceFaultExtensionName = VK_EXT_DEVICE_FAULT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceFaultSpecVersion = VK_EXT_DEVICE_FAULT_SPEC_VERSION; + + //=== VK_ARM_rasterization_order_attachment_access === + VULKAN_HPP_CONSTEXPR_INLINE auto ARMRasterizationOrderAttachmentAccessExtensionName = VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ARMRasterizationOrderAttachmentAccessSpecVersion = VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION; + + //=== VK_EXT_rgba10x6_formats === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRgba10X6FormatsExtensionName = VK_EXT_RGBA10X6_FORMATS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRgba10X6FormatsSpecVersion = VK_EXT_RGBA10X6_FORMATS_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + VULKAN_HPP_CONSTEXPR_INLINE auto NVAcquireWinrtDisplayExtensionName = VK_NV_ACQUIRE_WINRT_DISPLAY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVAcquireWinrtDisplaySpecVersion = VK_NV_ACQUIRE_WINRT_DISPLAY_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDirectfbSurfaceExtensionName = VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDirectfbSurfaceSpecVersion = VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_VALVE_mutable_descriptor_type === + VULKAN_HPP_CONSTEXPR_INLINE auto VALVEMutableDescriptorTypeExtensionName = VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto VALVEMutableDescriptorTypeSpecVersion = VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION; + + //=== VK_EXT_vertex_input_dynamic_state === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexInputDynamicStateExtensionName = VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexInputDynamicStateSpecVersion = VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION; + + //=== VK_EXT_physical_device_drm === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPhysicalDeviceDrmExtensionName = VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPhysicalDeviceDrmSpecVersion = VK_EXT_PHYSICAL_DEVICE_DRM_SPEC_VERSION; + + //=== VK_EXT_device_address_binding_report === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceAddressBindingReportExtensionName = VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceAddressBindingReportSpecVersion = VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_SPEC_VERSION; + + //=== VK_EXT_depth_clip_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClipControlExtensionName = VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClipControlSpecVersion = VK_EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION; + + //=== VK_EXT_primitive_topology_list_restart === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrimitiveTopologyListRestartExtensionName = VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrimitiveTopologyListRestartSpecVersion = VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION; + + //=== VK_KHR_format_feature_flags2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFormatFeatureFlags2ExtensionName = VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFormatFeatureFlags2SpecVersion = VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION; + + //=== VK_EXT_present_mode_fifo_latest_ready === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPresentModeFifoLatestReadyExtensionName = VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPresentModeFifoLatestReadySpecVersion = VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAExternalMemoryExtensionName = VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAExternalMemorySpecVersion = VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAExternalSemaphoreExtensionName = VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAExternalSemaphoreSpecVersion = VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIABufferCollectionExtensionName = VK_FUCHSIA_BUFFER_COLLECTION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIABufferCollectionSpecVersion = VK_FUCHSIA_BUFFER_COLLECTION_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEISubpassShadingExtensionName = VK_HUAWEI_SUBPASS_SHADING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEISubpassShadingSpecVersion = VK_HUAWEI_SUBPASS_SHADING_SPEC_VERSION; + + //=== VK_HUAWEI_invocation_mask === + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIInvocationMaskExtensionName = VK_HUAWEI_INVOCATION_MASK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIInvocationMaskSpecVersion = VK_HUAWEI_INVOCATION_MASK_SPEC_VERSION; + + //=== VK_NV_external_memory_rdma === + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryRdmaExtensionName = VK_NV_EXTERNAL_MEMORY_RDMA_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVExternalMemoryRdmaSpecVersion = VK_NV_EXTERNAL_MEMORY_RDMA_SPEC_VERSION; + + //=== VK_EXT_pipeline_properties === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelinePropertiesExtensionName = VK_EXT_PIPELINE_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelinePropertiesSpecVersion = VK_EXT_PIPELINE_PROPERTIES_SPEC_VERSION; + + //=== VK_EXT_frame_boundary === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFrameBoundaryExtensionName = VK_EXT_FRAME_BOUNDARY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTFrameBoundarySpecVersion = VK_EXT_FRAME_BOUNDARY_SPEC_VERSION; + + //=== VK_EXT_multisampled_render_to_single_sampled === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMultisampledRenderToSingleSampledExtensionName = VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMultisampledRenderToSingleSampledSpecVersion = VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_SPEC_VERSION; + + //=== VK_EXT_extended_dynamic_state2 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState2ExtensionName = VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState2SpecVersion = VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + VULKAN_HPP_CONSTEXPR_INLINE auto QNXScreenSurfaceExtensionName = VK_QNX_SCREEN_SURFACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QNXScreenSurfaceSpecVersion = VK_QNX_SCREEN_SURFACE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTColorWriteEnableExtensionName = VK_EXT_COLOR_WRITE_ENABLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTColorWriteEnableSpecVersion = VK_EXT_COLOR_WRITE_ENABLE_SPEC_VERSION; + + //=== VK_EXT_primitives_generated_query === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrimitivesGeneratedQueryExtensionName = VK_EXT_PRIMITIVES_GENERATED_QUERY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrimitivesGeneratedQuerySpecVersion = VK_EXT_PRIMITIVES_GENERATED_QUERY_SPEC_VERSION; + + //=== VK_KHR_ray_tracing_maintenance1 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingMaintenance1ExtensionName = VK_KHR_RAY_TRACING_MAINTENANCE_1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingMaintenance1SpecVersion = VK_KHR_RAY_TRACING_MAINTENANCE_1_SPEC_VERSION; + + //=== VK_EXT_global_priority_query === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityQueryExtensionName = VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityQuerySpecVersion = VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION; + + //=== VK_EXT_image_view_min_lod === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageViewMinLodExtensionName = VK_EXT_IMAGE_VIEW_MIN_LOD_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageViewMinLodSpecVersion = VK_EXT_IMAGE_VIEW_MIN_LOD_SPEC_VERSION; + + //=== VK_EXT_multi_draw === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMultiDrawExtensionName = VK_EXT_MULTI_DRAW_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMultiDrawSpecVersion = VK_EXT_MULTI_DRAW_SPEC_VERSION; + + //=== VK_EXT_image_2d_view_of_3d === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImage2DViewOf3DExtensionName = VK_EXT_IMAGE_2D_VIEW_OF_3D_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImage2DViewOf3DSpecVersion = VK_EXT_IMAGE_2D_VIEW_OF_3D_SPEC_VERSION; + + //=== VK_KHR_portability_enumeration === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPortabilityEnumerationExtensionName = VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPortabilityEnumerationSpecVersion = VK_KHR_PORTABILITY_ENUMERATION_SPEC_VERSION; + + //=== VK_EXT_shader_tile_image === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderTileImageExtensionName = VK_EXT_SHADER_TILE_IMAGE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderTileImageSpecVersion = VK_EXT_SHADER_TILE_IMAGE_SPEC_VERSION; + + //=== VK_EXT_opacity_micromap === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTOpacityMicromapExtensionName = VK_EXT_OPACITY_MICROMAP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTOpacityMicromapSpecVersion = VK_EXT_OPACITY_MICROMAP_SPEC_VERSION; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDisplacementMicromapExtensionName = VK_NV_DISPLACEMENT_MICROMAP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDisplacementMicromapSpecVersion = VK_NV_DISPLACEMENT_MICROMAP_SPEC_VERSION; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_load_store_op_none === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLoadStoreOpNoneExtensionName = VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLoadStoreOpNoneSpecVersion = VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION; + + //=== VK_HUAWEI_cluster_culling_shader === + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIClusterCullingShaderExtensionName = VK_HUAWEI_CLUSTER_CULLING_SHADER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIClusterCullingShaderSpecVersion = VK_HUAWEI_CLUSTER_CULLING_SHADER_SPEC_VERSION; + + //=== VK_EXT_border_color_swizzle === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBorderColorSwizzleExtensionName = VK_EXT_BORDER_COLOR_SWIZZLE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTBorderColorSwizzleSpecVersion = VK_EXT_BORDER_COLOR_SWIZZLE_SPEC_VERSION; + + //=== VK_EXT_pageable_device_local_memory === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPageableDeviceLocalMemoryExtensionName = VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPageableDeviceLocalMemorySpecVersion = VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_SPEC_VERSION; + + //=== VK_KHR_maintenance4 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance4ExtensionName = VK_KHR_MAINTENANCE_4_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance4SpecVersion = VK_KHR_MAINTENANCE_4_SPEC_VERSION; + + //=== VK_ARM_shader_core_properties === + VULKAN_HPP_CONSTEXPR_INLINE auto ARMShaderCorePropertiesExtensionName = VK_ARM_SHADER_CORE_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ARMShaderCorePropertiesSpecVersion = VK_ARM_SHADER_CORE_PROPERTIES_SPEC_VERSION; + + //=== VK_KHR_shader_subgroup_rotate === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupRotateExtensionName = VK_KHR_SHADER_SUBGROUP_ROTATE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupRotateSpecVersion = VK_KHR_SHADER_SUBGROUP_ROTATE_SPEC_VERSION; + + //=== VK_ARM_scheduling_controls === + VULKAN_HPP_CONSTEXPR_INLINE auto ARMSchedulingControlsExtensionName = VK_ARM_SCHEDULING_CONTROLS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ARMSchedulingControlsSpecVersion = VK_ARM_SCHEDULING_CONTROLS_SPEC_VERSION; + + //=== VK_EXT_image_sliced_view_of_3d === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageSlicedViewOf3DExtensionName = VK_EXT_IMAGE_SLICED_VIEW_OF_3D_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageSlicedViewOf3DSpecVersion = VK_EXT_IMAGE_SLICED_VIEW_OF_3D_SPEC_VERSION; + + //=== VK_VALVE_descriptor_set_host_mapping === + VULKAN_HPP_CONSTEXPR_INLINE auto VALVEDescriptorSetHostMappingExtensionName = VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto VALVEDescriptorSetHostMappingSpecVersion = VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_SPEC_VERSION; + + //=== VK_EXT_depth_clamp_zero_one === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampZeroOneExtensionName = VK_EXT_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampZeroOneSpecVersion = VK_EXT_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION; + + //=== VK_EXT_non_seamless_cube_map === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTNonSeamlessCubeMapExtensionName = VK_EXT_NON_SEAMLESS_CUBE_MAP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTNonSeamlessCubeMapSpecVersion = VK_EXT_NON_SEAMLESS_CUBE_MAP_SPEC_VERSION; + + //=== VK_ARM_render_pass_striped === + VULKAN_HPP_CONSTEXPR_INLINE auto ARMRenderPassStripedExtensionName = VK_ARM_RENDER_PASS_STRIPED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ARMRenderPassStripedSpecVersion = VK_ARM_RENDER_PASS_STRIPED_SPEC_VERSION; + + //=== VK_QCOM_fragment_density_map_offset === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFragmentDensityMapOffsetExtensionName = VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFragmentDensityMapOffsetSpecVersion = VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION; + + //=== VK_NV_copy_memory_indirect === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCopyMemoryIndirectExtensionName = VK_NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCopyMemoryIndirectSpecVersion = VK_NV_COPY_MEMORY_INDIRECT_SPEC_VERSION; + + //=== VK_NV_memory_decompression === + VULKAN_HPP_CONSTEXPR_INLINE auto NVMemoryDecompressionExtensionName = VK_NV_MEMORY_DECOMPRESSION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVMemoryDecompressionSpecVersion = VK_NV_MEMORY_DECOMPRESSION_SPEC_VERSION; + + //=== VK_NV_device_generated_commands_compute === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceGeneratedCommandsComputeExtensionName = VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceGeneratedCommandsComputeSpecVersion = VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_SPEC_VERSION; + + //=== VK_NV_ray_tracing_linear_swept_spheres === + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingLinearSweptSpheresExtensionName = VK_NV_RAY_TRACING_LINEAR_SWEPT_SPHERES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingLinearSweptSpheresSpecVersion = VK_NV_RAY_TRACING_LINEAR_SWEPT_SPHERES_SPEC_VERSION; + + //=== VK_NV_linear_color_attachment === + VULKAN_HPP_CONSTEXPR_INLINE auto NVLinearColorAttachmentExtensionName = VK_NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVLinearColorAttachmentSpecVersion = VK_NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION; + + //=== VK_GOOGLE_surfaceless_query === + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLESurfacelessQueryExtensionName = VK_GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLESurfacelessQuerySpecVersion = VK_GOOGLE_SURFACELESS_QUERY_SPEC_VERSION; + + //=== VK_KHR_shader_maximal_reconvergence === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderMaximalReconvergenceExtensionName = VK_KHR_SHADER_MAXIMAL_RECONVERGENCE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderMaximalReconvergenceSpecVersion = VK_KHR_SHADER_MAXIMAL_RECONVERGENCE_SPEC_VERSION; + + //=== VK_EXT_image_compression_control_swapchain === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageCompressionControlSwapchainExtensionName = VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageCompressionControlSwapchainSpecVersion = VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_SPEC_VERSION; + + //=== VK_QCOM_image_processing === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMImageProcessingExtensionName = VK_QCOM_IMAGE_PROCESSING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMImageProcessingSpecVersion = VK_QCOM_IMAGE_PROCESSING_SPEC_VERSION; + + //=== VK_EXT_nested_command_buffer === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTNestedCommandBufferExtensionName = VK_EXT_NESTED_COMMAND_BUFFER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTNestedCommandBufferSpecVersion = VK_EXT_NESTED_COMMAND_BUFFER_SPEC_VERSION; + + //=== VK_EXT_external_memory_acquire_unmodified === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryAcquireUnmodifiedExtensionName = VK_EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryAcquireUnmodifiedSpecVersion = VK_EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_SPEC_VERSION; + + //=== VK_EXT_extended_dynamic_state3 === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState3ExtensionName = VK_EXT_EXTENDED_DYNAMIC_STATE_3_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState3SpecVersion = VK_EXT_EXTENDED_DYNAMIC_STATE_3_SPEC_VERSION; + + //=== VK_EXT_subpass_merge_feedback === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubpassMergeFeedbackExtensionName = VK_EXT_SUBPASS_MERGE_FEEDBACK_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubpassMergeFeedbackSpecVersion = VK_EXT_SUBPASS_MERGE_FEEDBACK_SPEC_VERSION; + + //=== VK_LUNARG_direct_driver_loading === + VULKAN_HPP_CONSTEXPR_INLINE auto LUNARGDirectDriverLoadingExtensionName = VK_LUNARG_DIRECT_DRIVER_LOADING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto LUNARGDirectDriverLoadingSpecVersion = VK_LUNARG_DIRECT_DRIVER_LOADING_SPEC_VERSION; + + //=== VK_EXT_shader_module_identifier === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderModuleIdentifierExtensionName = VK_EXT_SHADER_MODULE_IDENTIFIER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderModuleIdentifierSpecVersion = VK_EXT_SHADER_MODULE_IDENTIFIER_SPEC_VERSION; + + //=== VK_EXT_rasterization_order_attachment_access === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRasterizationOrderAttachmentAccessExtensionName = VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTRasterizationOrderAttachmentAccessSpecVersion = VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION; + + //=== VK_NV_optical_flow === + VULKAN_HPP_CONSTEXPR_INLINE auto NVOpticalFlowExtensionName = VK_NV_OPTICAL_FLOW_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVOpticalFlowSpecVersion = VK_NV_OPTICAL_FLOW_SPEC_VERSION; + + //=== VK_EXT_legacy_dithering === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLegacyDitheringExtensionName = VK_EXT_LEGACY_DITHERING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLegacyDitheringSpecVersion = VK_EXT_LEGACY_DITHERING_SPEC_VERSION; + + //=== VK_EXT_pipeline_protected_access === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineProtectedAccessExtensionName = VK_EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineProtectedAccessSpecVersion = VK_EXT_PIPELINE_PROTECTED_ACCESS_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_format_resolve === + VULKAN_HPP_CONSTEXPR_INLINE auto ANDROIDExternalFormatResolveExtensionName = VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ANDROIDExternalFormatResolveSpecVersion = VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_KHR_maintenance5 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance5ExtensionName = VK_KHR_MAINTENANCE_5_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance5SpecVersion = VK_KHR_MAINTENANCE_5_SPEC_VERSION; + + //=== VK_AMD_anti_lag === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDAntiLagExtensionName = VK_AMD_ANTI_LAG_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDAntiLagSpecVersion = VK_AMD_ANTI_LAG_SPEC_VERSION; + + //=== VK_KHR_ray_tracing_position_fetch === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPositionFetchExtensionName = VK_KHR_RAY_TRACING_POSITION_FETCH_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPositionFetchSpecVersion = VK_KHR_RAY_TRACING_POSITION_FETCH_SPEC_VERSION; + + //=== VK_EXT_shader_object === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderObjectExtensionName = VK_EXT_SHADER_OBJECT_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderObjectSpecVersion = VK_EXT_SHADER_OBJECT_SPEC_VERSION; + + //=== VK_KHR_pipeline_binary === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineBinaryExtensionName = VK_KHR_PIPELINE_BINARY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineBinarySpecVersion = VK_KHR_PIPELINE_BINARY_SPEC_VERSION; + + //=== VK_QCOM_tile_properties === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMTilePropertiesExtensionName = VK_QCOM_TILE_PROPERTIES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMTilePropertiesSpecVersion = VK_QCOM_TILE_PROPERTIES_SPEC_VERSION; + + //=== VK_SEC_amigo_profiling === + VULKAN_HPP_CONSTEXPR_INLINE auto SECAmigoProfilingExtensionName = VK_SEC_AMIGO_PROFILING_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto SECAmigoProfilingSpecVersion = VK_SEC_AMIGO_PROFILING_SPEC_VERSION; + + //=== VK_QCOM_multiview_per_view_viewports === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMMultiviewPerViewViewportsExtensionName = VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMMultiviewPerViewViewportsSpecVersion = VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_SPEC_VERSION; + + //=== VK_NV_ray_tracing_invocation_reorder === + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingInvocationReorderExtensionName = VK_NV_RAY_TRACING_INVOCATION_REORDER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingInvocationReorderSpecVersion = VK_NV_RAY_TRACING_INVOCATION_REORDER_SPEC_VERSION; + + //=== VK_NV_cooperative_vector === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeVectorExtensionName = VK_NV_COOPERATIVE_VECTOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeVectorSpecVersion = VK_NV_COOPERATIVE_VECTOR_SPEC_VERSION; + + //=== VK_NV_extended_sparse_address_space === + VULKAN_HPP_CONSTEXPR_INLINE auto NVExtendedSparseAddressSpaceExtensionName = VK_NV_EXTENDED_SPARSE_ADDRESS_SPACE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVExtendedSparseAddressSpaceSpecVersion = VK_NV_EXTENDED_SPARSE_ADDRESS_SPACE_SPEC_VERSION; + + //=== VK_EXT_mutable_descriptor_type === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMutableDescriptorTypeExtensionName = VK_EXT_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTMutableDescriptorTypeSpecVersion = VK_EXT_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION; + + //=== VK_EXT_legacy_vertex_attributes === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLegacyVertexAttributesExtensionName = VK_EXT_LEGACY_VERTEX_ATTRIBUTES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLegacyVertexAttributesSpecVersion = VK_EXT_LEGACY_VERTEX_ATTRIBUTES_SPEC_VERSION; + + //=== VK_EXT_layer_settings === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLayerSettingsExtensionName = VK_EXT_LAYER_SETTINGS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLayerSettingsSpecVersion = VK_EXT_LAYER_SETTINGS_SPEC_VERSION; + + //=== VK_ARM_shader_core_builtins === + VULKAN_HPP_CONSTEXPR_INLINE auto ARMShaderCoreBuiltinsExtensionName = VK_ARM_SHADER_CORE_BUILTINS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ARMShaderCoreBuiltinsSpecVersion = VK_ARM_SHADER_CORE_BUILTINS_SPEC_VERSION; + + //=== VK_EXT_pipeline_library_group_handles === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineLibraryGroupHandlesExtensionName = VK_EXT_PIPELINE_LIBRARY_GROUP_HANDLES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineLibraryGroupHandlesSpecVersion = VK_EXT_PIPELINE_LIBRARY_GROUP_HANDLES_SPEC_VERSION; + + //=== VK_EXT_dynamic_rendering_unused_attachments === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDynamicRenderingUnusedAttachmentsExtensionName = VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDynamicRenderingUnusedAttachmentsSpecVersion = VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_SPEC_VERSION; + + //=== VK_NV_low_latency2 === + VULKAN_HPP_CONSTEXPR_INLINE auto NVLowLatency2ExtensionName = VK_NV_LOW_LATENCY_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVLowLatency2SpecVersion = VK_NV_LOW_LATENCY_2_SPEC_VERSION; + + //=== VK_KHR_cooperative_matrix === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCooperativeMatrixExtensionName = VK_KHR_COOPERATIVE_MATRIX_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCooperativeMatrixSpecVersion = VK_KHR_COOPERATIVE_MATRIX_SPEC_VERSION; + + //=== VK_QCOM_multiview_per_view_render_areas === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMMultiviewPerViewRenderAreasExtensionName = VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMMultiviewPerViewRenderAreasSpecVersion = VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_SPEC_VERSION; + + //=== VK_KHR_compute_shader_derivatives === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRComputeShaderDerivativesExtensionName = VK_KHR_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRComputeShaderDerivativesSpecVersion = VK_KHR_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION; + + //=== VK_KHR_video_decode_av1 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeAv1ExtensionName = VK_KHR_VIDEO_DECODE_AV1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeAv1SpecVersion = VK_KHR_VIDEO_DECODE_AV1_SPEC_VERSION; + + //=== VK_KHR_video_encode_av1 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeAv1ExtensionName = VK_KHR_VIDEO_ENCODE_AV1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeAv1SpecVersion = VK_KHR_VIDEO_ENCODE_AV1_SPEC_VERSION; + + //=== VK_KHR_video_maintenance1 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoMaintenance1ExtensionName = VK_KHR_VIDEO_MAINTENANCE_1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoMaintenance1SpecVersion = VK_KHR_VIDEO_MAINTENANCE_1_SPEC_VERSION; + + //=== VK_NV_per_stage_descriptor_set === + VULKAN_HPP_CONSTEXPR_INLINE auto NVPerStageDescriptorSetExtensionName = VK_NV_PER_STAGE_DESCRIPTOR_SET_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVPerStageDescriptorSetSpecVersion = VK_NV_PER_STAGE_DESCRIPTOR_SET_SPEC_VERSION; + + //=== VK_QCOM_image_processing2 === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMImageProcessing2ExtensionName = VK_QCOM_IMAGE_PROCESSING_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMImageProcessing2SpecVersion = VK_QCOM_IMAGE_PROCESSING_2_SPEC_VERSION; + + //=== VK_QCOM_filter_cubic_weights === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFilterCubicWeightsExtensionName = VK_QCOM_FILTER_CUBIC_WEIGHTS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFilterCubicWeightsSpecVersion = VK_QCOM_FILTER_CUBIC_WEIGHTS_SPEC_VERSION; + + //=== VK_QCOM_ycbcr_degamma === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMYcbcrDegammaExtensionName = VK_QCOM_YCBCR_DEGAMMA_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMYcbcrDegammaSpecVersion = VK_QCOM_YCBCR_DEGAMMA_SPEC_VERSION; + + //=== VK_QCOM_filter_cubic_clamp === + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFilterCubicClampExtensionName = VK_QCOM_FILTER_CUBIC_CLAMP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QCOMFilterCubicClampSpecVersion = VK_QCOM_FILTER_CUBIC_CLAMP_SPEC_VERSION; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAttachmentFeedbackLoopDynamicStateExtensionName = VK_EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTAttachmentFeedbackLoopDynamicStateSpecVersion = VK_EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_SPEC_VERSION; + + //=== VK_KHR_vertex_attribute_divisor === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVertexAttributeDivisorExtensionName = VK_KHR_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVertexAttributeDivisorSpecVersion = VK_KHR_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION; + + //=== VK_KHR_load_store_op_none === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRLoadStoreOpNoneExtensionName = VK_KHR_LOAD_STORE_OP_NONE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRLoadStoreOpNoneSpecVersion = VK_KHR_LOAD_STORE_OP_NONE_SPEC_VERSION; + + //=== VK_KHR_shader_float_controls2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControls2ExtensionName = VK_KHR_SHADER_FLOAT_CONTROLS_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControls2SpecVersion = VK_KHR_SHADER_FLOAT_CONTROLS_2_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + VULKAN_HPP_CONSTEXPR_INLINE auto QNXExternalMemoryScreenBufferExtensionName = VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto QNXExternalMemoryScreenBufferSpecVersion = VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_MSFT_layered_driver === + VULKAN_HPP_CONSTEXPR_INLINE auto MSFTLayeredDriverExtensionName = VK_MSFT_LAYERED_DRIVER_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto MSFTLayeredDriverSpecVersion = VK_MSFT_LAYERED_DRIVER_SPEC_VERSION; + + //=== VK_KHR_index_type_uint8 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRIndexTypeUint8ExtensionName = VK_KHR_INDEX_TYPE_UINT8_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRIndexTypeUint8SpecVersion = VK_KHR_INDEX_TYPE_UINT8_SPEC_VERSION; + + //=== VK_KHR_line_rasterization === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRLineRasterizationExtensionName = VK_KHR_LINE_RASTERIZATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRLineRasterizationSpecVersion = VK_KHR_LINE_RASTERIZATION_SPEC_VERSION; + + //=== VK_KHR_calibrated_timestamps === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCalibratedTimestampsExtensionName = VK_KHR_CALIBRATED_TIMESTAMPS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCalibratedTimestampsSpecVersion = VK_KHR_CALIBRATED_TIMESTAMPS_SPEC_VERSION; + + //=== VK_KHR_shader_expect_assume === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderExpectAssumeExtensionName = VK_KHR_SHADER_EXPECT_ASSUME_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderExpectAssumeSpecVersion = VK_KHR_SHADER_EXPECT_ASSUME_SPEC_VERSION; + + //=== VK_KHR_maintenance6 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance6ExtensionName = VK_KHR_MAINTENANCE_6_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance6SpecVersion = VK_KHR_MAINTENANCE_6_SPEC_VERSION; + + //=== VK_NV_descriptor_pool_overallocation === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDescriptorPoolOverallocationExtensionName = VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDescriptorPoolOverallocationSpecVersion = VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_SPEC_VERSION; + + //=== VK_NV_display_stereo === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDisplayStereoExtensionName = VK_NV_DISPLAY_STEREO_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDisplayStereoSpecVersion = VK_NV_DISPLAY_STEREO_SPEC_VERSION; + + //=== VK_KHR_video_encode_quantization_map === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeQuantizationMapExtensionName = VK_KHR_VIDEO_ENCODE_QUANTIZATION_MAP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeQuantizationMapSpecVersion = VK_KHR_VIDEO_ENCODE_QUANTIZATION_MAP_SPEC_VERSION; + + //=== VK_NV_raw_access_chains === + VULKAN_HPP_CONSTEXPR_INLINE auto NVRawAccessChainsExtensionName = VK_NV_RAW_ACCESS_CHAINS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVRawAccessChainsSpecVersion = VK_NV_RAW_ACCESS_CHAINS_SPEC_VERSION; + + //=== VK_KHR_shader_relaxed_extended_instruction === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderRelaxedExtendedInstructionExtensionName = VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderRelaxedExtendedInstructionSpecVersion = VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_SPEC_VERSION; + + //=== VK_NV_command_buffer_inheritance === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCommandBufferInheritanceExtensionName = VK_NV_COMMAND_BUFFER_INHERITANCE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCommandBufferInheritanceSpecVersion = VK_NV_COMMAND_BUFFER_INHERITANCE_SPEC_VERSION; + + //=== VK_KHR_maintenance7 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance7ExtensionName = VK_KHR_MAINTENANCE_7_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance7SpecVersion = VK_KHR_MAINTENANCE_7_SPEC_VERSION; + + //=== VK_NV_shader_atomic_float16_vector === + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderAtomicFloat16VectorExtensionName = VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderAtomicFloat16VectorSpecVersion = VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_SPEC_VERSION; + + //=== VK_EXT_shader_replicated_composites === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderReplicatedCompositesExtensionName = VK_EXT_SHADER_REPLICATED_COMPOSITES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderReplicatedCompositesSpecVersion = VK_EXT_SHADER_REPLICATED_COMPOSITES_SPEC_VERSION; + + //=== VK_NV_ray_tracing_validation === + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingValidationExtensionName = VK_NV_RAY_TRACING_VALIDATION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingValidationSpecVersion = VK_NV_RAY_TRACING_VALIDATION_SPEC_VERSION; + + //=== VK_NV_cluster_acceleration_structure === + VULKAN_HPP_CONSTEXPR_INLINE auto NVClusterAccelerationStructureExtensionName = VK_NV_CLUSTER_ACCELERATION_STRUCTURE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVClusterAccelerationStructureSpecVersion = VK_NV_CLUSTER_ACCELERATION_STRUCTURE_SPEC_VERSION; + + //=== VK_NV_partitioned_acceleration_structure === + VULKAN_HPP_CONSTEXPR_INLINE auto NVPartitionedAccelerationStructureExtensionName = VK_NV_PARTITIONED_ACCELERATION_STRUCTURE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVPartitionedAccelerationStructureSpecVersion = VK_NV_PARTITIONED_ACCELERATION_STRUCTURE_SPEC_VERSION; + + //=== VK_EXT_device_generated_commands === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceGeneratedCommandsExtensionName = VK_EXT_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceGeneratedCommandsSpecVersion = VK_EXT_DEVICE_GENERATED_COMMANDS_SPEC_VERSION; + + //=== VK_KHR_maintenance8 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance8ExtensionName = VK_KHR_MAINTENANCE_8_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance8SpecVersion = VK_KHR_MAINTENANCE_8_SPEC_VERSION; + + //=== VK_MESA_image_alignment_control === + VULKAN_HPP_CONSTEXPR_INLINE auto MESAImageAlignmentControlExtensionName = VK_MESA_IMAGE_ALIGNMENT_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto MESAImageAlignmentControlSpecVersion = VK_MESA_IMAGE_ALIGNMENT_CONTROL_SPEC_VERSION; + + //=== VK_EXT_depth_clamp_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampControlExtensionName = VK_EXT_DEPTH_CLAMP_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampControlSpecVersion = VK_EXT_DEPTH_CLAMP_CONTROL_SPEC_VERSION; + + //=== VK_KHR_video_maintenance2 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoMaintenance2ExtensionName = VK_KHR_VIDEO_MAINTENANCE_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoMaintenance2SpecVersion = VK_KHR_VIDEO_MAINTENANCE_2_SPEC_VERSION; + + //=== VK_HUAWEI_hdr_vivid === + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIHdrVividExtensionName = VK_HUAWEI_HDR_VIVID_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIHdrVividSpecVersion = VK_HUAWEI_HDR_VIVID_SPEC_VERSION; + + //=== VK_NV_cooperative_matrix2 === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrix2ExtensionName = VK_NV_COOPERATIVE_MATRIX_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrix2SpecVersion = VK_NV_COOPERATIVE_MATRIX_2_SPEC_VERSION; + + //=== VK_ARM_pipeline_opacity_micromap === + VULKAN_HPP_CONSTEXPR_INLINE auto ARMPipelineOpacityMicromapExtensionName = VK_ARM_PIPELINE_OPACITY_MICROMAP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto ARMPipelineOpacityMicromapSpecVersion = VK_ARM_PIPELINE_OPACITY_MICROMAP_SPEC_VERSION; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryMetalExtensionName = VK_EXT_EXTERNAL_MEMORY_METAL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryMetalSpecVersion = VK_EXT_EXTERNAL_MEMORY_METAL_SPEC_VERSION; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_depth_clamp_zero_one === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDepthClampZeroOneExtensionName = VK_KHR_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDepthClampZeroOneSpecVersion = VK_KHR_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION; + + //=== VK_EXT_vertex_attribute_robustness === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeRobustnessExtensionName = VK_EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeRobustnessSpecVersion = VK_EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_SPEC_VERSION; + +} // namespace VULKAN_HPP_NAMESPACE + +// clang-format off +#include +#include +#include + +// clang-format on + +namespace VULKAN_HPP_NAMESPACE +{ +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + + //======================= + //=== STRUCTS EXTENDS === + //======================= + + //=== VK_VERSION_1_0 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_VERSION_1_1 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_VERSION_1_2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_VERSION_1_3 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_VERSION_1_4 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_swapchain === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_display_swapchain === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_debug_report === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_AMD_rasterization_order === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_queue === + template <> + struct StructExtends + { + enum { - return ::vkGetDescriptorSetLayoutHostMappingInfoVALVE( device, pBindingReference, pHostMapping ); - } + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - void vkGetDescriptorSetHostMappingVALVE( VkDevice device, VkDescriptorSet descriptorSet, void ** ppData ) const VULKAN_HPP_NOEXCEPT + template <> + struct StructExtends + { + enum { - return ::vkGetDescriptorSetHostMappingVALVE( device, descriptorSet, ppData ); - } + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_decode_queue === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_dedicated_allocation === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_EXT_transform_feedback === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NVX_binary_import === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_encode_h264 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - //=== VK_EXT_shader_module_identifier === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - void vkGetShaderModuleIdentifierEXT( VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT + template <> + struct StructExtends + { + enum { - return ::vkGetShaderModuleIdentifierEXT( device, shaderModule, pIdentifier ); - } + value = true + }; + }; - void vkGetShaderModuleCreateInfoIdentifierEXT( VkDevice device, - const VkShaderModuleCreateInfo * pCreateInfo, - VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT + template <> + struct StructExtends + { + enum { - return ::vkGetShaderModuleCreateInfoIdentifierEXT( device, pCreateInfo, pIdentifier ); - } - - //=== VK_QCOM_tile_properties === + value = true + }; + }; - VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device, - VkFramebuffer framebuffer, - uint32_t * pPropertiesCount, - VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT + template <> + struct StructExtends + { + enum { - return ::vkGetFramebufferTilePropertiesQCOM( device, framebuffer, pPropertiesCount, pProperties ); - } + value = true + }; + }; - VkResult vkGetDynamicRenderingTilePropertiesQCOM( VkDevice device, - const VkRenderingInfo * pRenderingInfo, - VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT + template <> + struct StructExtends + { + enum { - return ::vkGetDynamicRenderingTilePropertiesQCOM( device, pRenderingInfo, pProperties ); - } + value = true + }; }; -#endif - - class DispatchLoaderDynamic; -#if !defined( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC ) -# if defined( VK_NO_PROTOTYPES ) -# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1 -# else -# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0 -# endif -#endif - -#if !defined( VULKAN_HPP_STORAGE_API ) -# if defined( VULKAN_HPP_STORAGE_SHARED ) -# if defined( _MSC_VER ) -# if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT ) -# define VULKAN_HPP_STORAGE_API __declspec( dllexport ) -# else -# define VULKAN_HPP_STORAGE_API __declspec( dllimport ) -# endif -# elif defined( __clang__ ) || defined( __GNUC__ ) -# if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT ) -# define VULKAN_HPP_STORAGE_API __attribute__( ( visibility( "default" ) ) ) -# else -# define VULKAN_HPP_STORAGE_API -# endif -# else -# define VULKAN_HPP_STORAGE_API -# pragma warning Unknown import / export semantics -# endif -# else -# define VULKAN_HPP_STORAGE_API -# endif -#endif -#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER ) -# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 -# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic -# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE \ - namespace VULKAN_HPP_NAMESPACE \ - { \ - VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; \ - } - extern VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; -# else - static inline ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic & getDispatchLoaderStatic() + //=== VK_KHR_video_encode_h265 === + template <> + struct StructExtends { - static ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic dls; - return dls; - } -# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::getDispatchLoaderStatic() -# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE -# endif -#endif - -#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ) -# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 -# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic -# else -# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic -# endif -#endif - -#if defined( VULKAN_HPP_NO_DEFAULT_DISPATCHER ) -# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT -# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT -# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT -#else -# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT = {} -# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT = nullptr -# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT = VULKAN_HPP_DEFAULT_DISPATCHER -#endif - -#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) - struct AllocationCallbacks; + enum + { + value = true + }; + }; - template - class ObjectDestroy + template <> + struct StructExtends { - public: - ObjectDestroy() = default; - - ObjectDestroy( OwnerType owner, - Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) + enum { - } + value = true + }; + }; - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT - { - return m_owner; - } - Optional getAllocator() const VULKAN_HPP_NOEXCEPT + template <> + struct StructExtends + { + enum { - return m_allocationCallbacks; - } + value = true + }; + }; - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT + template <> + struct StructExtends + { + enum { - VULKAN_HPP_ASSERT( m_owner && m_dispatch ); - m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); - } - - private: - OwnerType m_owner = {}; - Optional m_allocationCallbacks = nullptr; - Dispatch const * m_dispatch = nullptr; + value = true + }; }; - class NoParent; - - template - class ObjectDestroy + template <> + struct StructExtends { - public: - ObjectDestroy() = default; - - ObjectDestroy( Optional allocationCallbacks, - Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) + enum { - } + value = true + }; + }; - Optional getAllocator() const VULKAN_HPP_NOEXCEPT + template <> + struct StructExtends + { + enum { - return m_allocationCallbacks; - } + value = true + }; + }; - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT + template <> + struct StructExtends + { + enum { - VULKAN_HPP_ASSERT( m_dispatch ); - t.destroy( m_allocationCallbacks, *m_dispatch ); - } - - private: - Optional m_allocationCallbacks = nullptr; - Dispatch const * m_dispatch = nullptr; + value = true + }; }; - template - class ObjectFree + template <> + struct StructExtends { - public: - ObjectFree() = default; - - ObjectFree( OwnerType owner, - Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) + enum { - } + value = true + }; + }; - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + template <> + struct StructExtends + { + enum { - return m_owner; - } + value = true + }; + }; - Optional getAllocator() const VULKAN_HPP_NOEXCEPT + template <> + struct StructExtends + { + enum { - return m_allocationCallbacks; - } + value = true + }; + }; - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT + template <> + struct StructExtends + { + enum { - VULKAN_HPP_ASSERT( m_owner && m_dispatch ); - ( m_owner.free )( t, m_allocationCallbacks, *m_dispatch ); - } - - private: - OwnerType m_owner = {}; - Optional m_allocationCallbacks = nullptr; - Dispatch const * m_dispatch = nullptr; + value = true + }; }; - template - class ObjectRelease + template <> + struct StructExtends { - public: - ObjectRelease() = default; - - ObjectRelease( OwnerType owner, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_dispatch( &dispatch ) + enum { - } + value = true + }; + }; - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + template <> + struct StructExtends + { + enum { - return m_owner; - } + value = true + }; + }; - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT + template <> + struct StructExtends + { + enum { - VULKAN_HPP_ASSERT( m_owner && m_dispatch ); - m_owner.release( t, *m_dispatch ); - } - - private: - OwnerType m_owner = {}; - Dispatch const * m_dispatch = nullptr; + value = true + }; }; - template - class PoolFree + template <> + struct StructExtends { - public: - PoolFree() = default; - - PoolFree( OwnerType owner, PoolType pool, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_pool( pool ) - , m_dispatch( &dispatch ) + enum { - } + value = true + }; + }; - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT - { - return m_owner; - } - PoolType getPool() const VULKAN_HPP_NOEXCEPT + //=== VK_KHR_video_decode_h264 === + template <> + struct StructExtends + { + enum { - return m_pool; - } + value = true + }; + }; - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT + template <> + struct StructExtends + { + enum { - ( m_owner.free )( m_pool, t, *m_dispatch ); - } - - private: - OwnerType m_owner = OwnerType(); - PoolType m_pool = PoolType(); - Dispatch const * m_dispatch = nullptr; + value = true + }; }; -#endif // !VULKAN_HPP_NO_SMART_HANDLE - - //================== - //=== BASE TYPEs === - //================== - - using Bool32 = uint32_t; - using DeviceAddress = uint64_t; - using DeviceSize = uint64_t; - using RemoteAddressNV = void *; - using SampleMask = uint32_t; - -} // namespace VULKAN_HPP_NAMESPACE - -#include -#if !defined( VULKAN_HPP_NO_TO_STRING ) -# include -#endif - -#ifndef VULKAN_HPP_NO_EXCEPTIONS -namespace std -{ template <> - struct is_error_code_enum : public true_type + struct StructExtends { + enum + { + value = true + }; }; -} // namespace std -#endif -namespace VULKAN_HPP_NAMESPACE -{ -#ifndef VULKAN_HPP_NO_EXCEPTIONS - class ErrorCategoryImpl : public std::error_category + template <> + struct StructExtends { - public: - virtual const char * name() const VULKAN_HPP_NOEXCEPT override - { - return VULKAN_HPP_NAMESPACE_STRING "::Result"; - } - virtual std::string message( int ev ) const override + enum { -# if defined( VULKAN_HPP_NO_TO_STRING ) - return std::to_string( ev ); -# else - return VULKAN_HPP_NAMESPACE::to_string( static_cast( ev ) ); -# endif - } + value = true + }; }; - class Error + template <> + struct StructExtends { - public: - Error() VULKAN_HPP_NOEXCEPT = default; - Error( const Error & ) VULKAN_HPP_NOEXCEPT = default; - virtual ~Error() VULKAN_HPP_NOEXCEPT = default; + enum + { + value = true + }; + }; - virtual const char * what() const VULKAN_HPP_NOEXCEPT = 0; + template <> + struct StructExtends + { + enum + { + value = true + }; }; - class LogicError - : public Error - , public std::logic_error + template <> + struct StructExtends { - public: - explicit LogicError( const std::string & what ) : Error(), std::logic_error( what ) {} - explicit LogicError( char const * what ) : Error(), std::logic_error( what ) {} - - virtual const char * what() const VULKAN_HPP_NOEXCEPT + enum { - return std::logic_error::what(); - } + value = true + }; }; - class SystemError - : public Error - , public std::system_error + //=== VK_AMD_texture_gather_bias_lod === + template <> + struct StructExtends { - public: - SystemError( std::error_code ec ) : Error(), std::system_error( ec ) {} - SystemError( std::error_code ec, std::string const & what ) : Error(), std::system_error( ec, what ) {} - SystemError( std::error_code ec, char const * what ) : Error(), std::system_error( ec, what ) {} - SystemError( int ev, std::error_category const & ecat ) : Error(), std::system_error( ev, ecat ) {} - SystemError( int ev, std::error_category const & ecat, std::string const & what ) : Error(), std::system_error( ev, ecat, what ) {} - SystemError( int ev, std::error_category const & ecat, char const * what ) : Error(), std::system_error( ev, ecat, what ) {} - - virtual const char * what() const VULKAN_HPP_NOEXCEPT + enum { - return std::system_error::what(); - } + value = true + }; }; - VULKAN_HPP_INLINE const std::error_category & errorCategory() VULKAN_HPP_NOEXCEPT + //=== VK_NV_corner_sampled_image === + template <> + struct StructExtends { - static ErrorCategoryImpl instance; - return instance; - } + enum + { + value = true + }; + }; - VULKAN_HPP_INLINE std::error_code make_error_code( Result e ) VULKAN_HPP_NOEXCEPT + template <> + struct StructExtends { - return std::error_code( static_cast( e ), errorCategory() ); - } + enum + { + value = true + }; + }; - VULKAN_HPP_INLINE std::error_condition make_error_condition( Result e ) VULKAN_HPP_NOEXCEPT + //=== VK_NV_external_memory === + template <> + struct StructExtends { - return std::error_condition( static_cast( e ), errorCategory() ); - } + enum + { + value = true + }; + }; - class OutOfHostMemoryError : public SystemError + template <> + struct StructExtends { - public: - OutOfHostMemoryError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} - OutOfHostMemoryError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + enum + { + value = true + }; }; - class OutOfDeviceMemoryError : public SystemError +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + template <> + struct StructExtends { - public: - OutOfDeviceMemoryError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} - OutOfDeviceMemoryError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + enum + { + value = true + }; }; - class InitializationFailedError : public SystemError + template <> + struct StructExtends { - public: - InitializationFailedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} - InitializationFailedError( char const * message ) : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + enum + { + value = true + }; }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - class DeviceLostError : public SystemError +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_win32_keyed_mutex === + template <> + struct StructExtends { - public: - DeviceLostError( std::string const & message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} - DeviceLostError( char const * message ) : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + enum + { + value = true + }; }; - class MemoryMapFailedError : public SystemError + template <> + struct StructExtends { - public: - MemoryMapFailedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} - MemoryMapFailedError( char const * message ) : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + enum + { + value = true + }; }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - class LayerNotPresentError : public SystemError + //=== VK_EXT_validation_flags === + template <> + struct StructExtends { - public: - LayerNotPresentError( std::string const & message ) : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} - LayerNotPresentError( char const * message ) : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + enum + { + value = true + }; }; - class ExtensionNotPresentError : public SystemError + //=== VK_EXT_astc_decode_mode === + template <> + struct StructExtends { - public: - ExtensionNotPresentError( std::string const & message ) : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} - ExtensionNotPresentError( char const * message ) : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + enum + { + value = true + }; }; - class FeatureNotPresentError : public SystemError + template <> + struct StructExtends { - public: - FeatureNotPresentError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} - FeatureNotPresentError( char const * message ) : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + enum + { + value = true + }; }; - class IncompatibleDriverError : public SystemError + template <> + struct StructExtends { - public: - IncompatibleDriverError( std::string const & message ) : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} - IncompatibleDriverError( char const * message ) : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + enum + { + value = true + }; }; - class TooManyObjectsError : public SystemError +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + template <> + struct StructExtends { - public: - TooManyObjectsError( std::string const & message ) : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} - TooManyObjectsError( char const * message ) : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + enum + { + value = true + }; }; - class FormatNotSupportedError : public SystemError + template <> + struct StructExtends { - public: - FormatNotSupportedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} - FormatNotSupportedError( char const * message ) : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + enum + { + value = true + }; }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - class FragmentedPoolError : public SystemError + //=== VK_KHR_external_memory_fd === + template <> + struct StructExtends { - public: - FragmentedPoolError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} - FragmentedPoolError( char const * message ) : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + enum + { + value = true + }; }; - class UnknownError : public SystemError +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_keyed_mutex === + template <> + struct StructExtends { - public: - UnknownError( std::string const & message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} - UnknownError( char const * message ) : SystemError( make_error_code( Result::eErrorUnknown ), message ) {} + enum + { + value = true + }; }; - class OutOfPoolMemoryError : public SystemError + template <> + struct StructExtends { - public: - OutOfPoolMemoryError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} - OutOfPoolMemoryError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + enum + { + value = true + }; }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - class InvalidExternalHandleError : public SystemError +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + template <> + struct StructExtends { - public: - InvalidExternalHandleError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} - InvalidExternalHandleError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + enum + { + value = true + }; }; - class FragmentationError : public SystemError + template <> + struct StructExtends { - public: - FragmentationError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {} - FragmentationError( char const * message ) : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {} + enum + { + value = true + }; }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - class InvalidOpaqueCaptureAddressError : public SystemError + //=== VK_EXT_conditional_rendering === + template <> + struct StructExtends { - public: - InvalidOpaqueCaptureAddressError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {} - InvalidOpaqueCaptureAddressError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {} + enum + { + value = true + }; }; - class SurfaceLostKHRError : public SystemError + template <> + struct StructExtends { - public: - SurfaceLostKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} - SurfaceLostKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + enum + { + value = true + }; }; - class NativeWindowInUseKHRError : public SystemError + template <> + struct StructExtends { - public: - NativeWindowInUseKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} - NativeWindowInUseKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + enum + { + value = true + }; }; - class OutOfDateKHRError : public SystemError + //=== VK_KHR_incremental_present === + template <> + struct StructExtends { - public: - OutOfDateKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} - OutOfDateKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + enum + { + value = true + }; }; - class IncompatibleDisplayKHRError : public SystemError + //=== VK_NV_clip_space_w_scaling === + template <> + struct StructExtends { - public: - IncompatibleDisplayKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} - IncompatibleDisplayKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + enum + { + value = true + }; }; - class ValidationFailedEXTError : public SystemError + //=== VK_EXT_display_control === + template <> + struct StructExtends { - public: - ValidationFailedEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} - ValidationFailedEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + enum + { + value = true + }; }; - class InvalidShaderNVError : public SystemError + //=== VK_GOOGLE_display_timing === + template <> + struct StructExtends { - public: - InvalidShaderNVError( std::string const & message ) : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} - InvalidShaderNVError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} + enum + { + value = true + }; }; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - class ImageUsageNotSupportedKHRError : public SystemError + //=== VK_NVX_multiview_per_view_attributes === + template <> + struct StructExtends { - public: - ImageUsageNotSupportedKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorImageUsageNotSupportedKHR ), message ) {} - ImageUsageNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorImageUsageNotSupportedKHR ), message ) {} + enum + { + value = true + }; }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - class VideoPictureLayoutNotSupportedKHRError : public SystemError + template <> + struct StructExtends { - public: - VideoPictureLayoutNotSupportedKHRError( std::string const & message ) - : SystemError( make_error_code( Result::eErrorVideoPictureLayoutNotSupportedKHR ), message ) - { - } - VideoPictureLayoutNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoPictureLayoutNotSupportedKHR ), message ) + enum { - } + value = true + }; }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - class VideoProfileOperationNotSupportedKHRError : public SystemError + template <> + struct StructExtends { - public: - VideoProfileOperationNotSupportedKHRError( std::string const & message ) - : SystemError( make_error_code( Result::eErrorVideoProfileOperationNotSupportedKHR ), message ) - { - } - VideoProfileOperationNotSupportedKHRError( char const * message ) - : SystemError( make_error_code( Result::eErrorVideoProfileOperationNotSupportedKHR ), message ) + enum { - } + value = true + }; }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - class VideoProfileFormatNotSupportedKHRError : public SystemError + template <> + struct StructExtends { - public: - VideoProfileFormatNotSupportedKHRError( std::string const & message ) - : SystemError( make_error_code( Result::eErrorVideoProfileFormatNotSupportedKHR ), message ) - { - } - VideoProfileFormatNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoProfileFormatNotSupportedKHR ), message ) + enum { - } + value = true + }; }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - class VideoProfileCodecNotSupportedKHRError : public SystemError + //=== VK_NV_viewport_swizzle === + template <> + struct StructExtends { - public: - VideoProfileCodecNotSupportedKHRError( std::string const & message ) - : SystemError( make_error_code( Result::eErrorVideoProfileCodecNotSupportedKHR ), message ) + enum { - } - VideoProfileCodecNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoProfileCodecNotSupportedKHR ), message ) {} + value = true + }; }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - class VideoStdVersionNotSupportedKHRError : public SystemError + //=== VK_EXT_discard_rectangles === + template <> + struct StructExtends { - public: - VideoStdVersionNotSupportedKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorVideoStdVersionNotSupportedKHR ), message ) + enum { - } - VideoStdVersionNotSupportedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorVideoStdVersionNotSupportedKHR ), message ) {} + value = true + }; }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError + template <> + struct StructExtends { - public: - InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const & message ) - : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) - { - } - InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message ) - : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) + enum { - } + value = true + }; }; - class NotPermittedKHRError : public SystemError + //=== VK_EXT_conservative_rasterization === + template <> + struct StructExtends { - public: - NotPermittedKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorNotPermittedKHR ), message ) {} - NotPermittedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorNotPermittedKHR ), message ) {} + enum + { + value = true + }; }; -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - class FullScreenExclusiveModeLostEXTError : public SystemError + template <> + struct StructExtends { - public: - FullScreenExclusiveModeLostEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) + enum { - } - FullScreenExclusiveModeLostEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {} + value = true + }; }; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - class CompressionExhaustedEXTError : public SystemError + //=== VK_EXT_depth_clip_enable === + template <> + struct StructExtends { - public: - CompressionExhaustedEXTError( std::string const & message ) : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {} - CompressionExhaustedEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {} + enum + { + value = true + }; }; - namespace + template <> + struct StructExtends { - [[noreturn]] void throwResultException( Result result, char const * message ) + enum { - switch ( result ) - { - case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError( message ); - case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError( message ); - case Result::eErrorInitializationFailed: throw InitializationFailedError( message ); - case Result::eErrorDeviceLost: throw DeviceLostError( message ); - case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError( message ); - case Result::eErrorLayerNotPresent: throw LayerNotPresentError( message ); - case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError( message ); - case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError( message ); - case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError( message ); - case Result::eErrorTooManyObjects: throw TooManyObjectsError( message ); - case Result::eErrorFormatNotSupported: throw FormatNotSupportedError( message ); - case Result::eErrorFragmentedPool: throw FragmentedPoolError( message ); - case Result::eErrorUnknown: throw UnknownError( message ); - case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError( message ); - case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError( message ); - case Result::eErrorFragmentation: throw FragmentationError( message ); - case Result::eErrorInvalidOpaqueCaptureAddress: throw InvalidOpaqueCaptureAddressError( message ); - case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError( message ); - case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError( message ); - case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError( message ); - case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message ); - case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message ); - case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message ); -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - case Result::eErrorImageUsageNotSupportedKHR: throw ImageUsageNotSupportedKHRError( message ); -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - case Result::eErrorVideoPictureLayoutNotSupportedKHR: throw VideoPictureLayoutNotSupportedKHRError( message ); -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - case Result::eErrorVideoProfileOperationNotSupportedKHR: throw VideoProfileOperationNotSupportedKHRError( message ); -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - case Result::eErrorVideoProfileFormatNotSupportedKHR: throw VideoProfileFormatNotSupportedKHRError( message ); -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - case Result::eErrorVideoProfileCodecNotSupportedKHR: throw VideoProfileCodecNotSupportedKHRError( message ); -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - case Result::eErrorVideoStdVersionNotSupportedKHR: throw VideoStdVersionNotSupportedKHRError( message ); -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message ); - case Result::eErrorNotPermittedKHR: throw NotPermittedKHRError( message ); -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message ); -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - case Result::eErrorCompressionExhaustedEXT: throw CompressionExhaustedEXTError( message ); - default: throw SystemError( make_error_code( result ) ); - } - } - } // namespace -#endif - - template - void ignore( T const & ) VULKAN_HPP_NOEXCEPT - { - } + value = true + }; + }; - template - struct ResultValue + template <> + struct StructExtends { -#ifdef VULKAN_HPP_HAS_NOEXCEPT - ResultValue( Result r, T & v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( v ) ) ) -#else - ResultValue( Result r, T & v ) -#endif - : result( r ), value( v ) + enum { - } + value = true + }; + }; -#ifdef VULKAN_HPP_HAS_NOEXCEPT - ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT( VULKAN_HPP_NOEXCEPT( T( std::move( v ) ) ) ) -#else - ResultValue( Result r, T && v ) -#endif - : result( r ), value( std::move( v ) ) + //=== VK_IMG_relaxed_line_rasterization === + template <> + struct StructExtends + { + enum { - } - - Result result; - T value; + value = true + }; + }; - operator std::tuple() VULKAN_HPP_NOEXCEPT + template <> + struct StructExtends + { + enum { - return std::tuple( result, value ); - } + value = true + }; }; -#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) - template - struct ResultValue> + //=== VK_KHR_shared_presentable_image === + template <> + struct StructExtends { -# ifdef VULKAN_HPP_HAS_NOEXCEPT - ResultValue( Result r, UniqueHandle && v ) VULKAN_HPP_NOEXCEPT -# else - ResultValue( Result r, UniqueHandle && v ) -# endif - : result( r ) - , value( std::move( v ) ) + enum { - } + value = true + }; + }; - std::tuple> asTuple() +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + template <> + struct StructExtends + { + enum { - return std::make_tuple( result, std::move( value ) ); - } - - Result result; - UniqueHandle value; + value = true + }; }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - struct ResultValue>> + //=== VK_KHR_performance_query === + template <> + struct StructExtends { -# ifdef VULKAN_HPP_HAS_NOEXCEPT - ResultValue( Result r, std::vector> && v ) VULKAN_HPP_NOEXCEPT -# else - ResultValue( Result r, std::vector> && v ) -# endif - : result( r ) - , value( std::move( v ) ) + enum { - } + value = true + }; + }; - std::tuple>> asTuple() + template <> + struct StructExtends + { + enum { - return std::make_tuple( result, std::move( value ) ); - } - - Result result; - std::vector> value; + value = true + }; }; -#endif - template - struct ResultValueType + template <> + struct StructExtends { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - typedef ResultValue type; -#else - typedef T type; -#endif + enum + { + value = true + }; }; template <> - struct ResultValueType + struct StructExtends { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - typedef Result type; -#else - typedef void type; -#endif + enum + { + value = true + }; }; - VULKAN_HPP_INLINE typename ResultValueType::type createResultValueType( Result result ) + template <> + struct StructExtends { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - return result; -#else - ignore( result ); -#endif - } + enum + { + value = true + }; + }; - template - VULKAN_HPP_INLINE typename ResultValueType::type createResultValueType( Result result, T & data ) + template <> + struct StructExtends { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - return ResultValue( result, data ); -#else - ignore( result ); - return data; -#endif - } + enum + { + value = true + }; + }; - template - VULKAN_HPP_INLINE typename ResultValueType::type createResultValueType( Result result, T && data ) + //=== VK_EXT_debug_utils === + template <> + struct StructExtends { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - return ResultValue( result, std::move( data ) ); -#else - ignore( result ); - return std::move( data ); -#endif - } + enum + { + value = true + }; + }; - VULKAN_HPP_INLINE void resultCheck( Result result, char const * message ) + template <> + struct StructExtends { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty - ignore( message ); - VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); -#else - if ( result != Result::eSuccess ) + enum { - throwResultException( result, message ); - } -#endif - } + value = true + }; + }; - VULKAN_HPP_INLINE void resultCheck( Result result, char const * message, std::initializer_list successCodes ) +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + template <> + struct StructExtends { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty - ignore( message ); - ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty - VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); -#else - if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + enum { - throwResultException( result, message ); - } -#endif - } -} // namespace VULKAN_HPP_NAMESPACE + value = true + }; + }; -// clang-format off -#include -#include -#include -// clang-format on + template <> + struct StructExtends + { + enum + { + value = true + }; + }; -namespace VULKAN_HPP_NAMESPACE -{ -#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - //======================= - //=== STRUCTS EXTENDS === - //======================= + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - //=== VK_VERSION_1_0 === template <> - struct StructExtends + struct StructExtends { enum { @@ -6230,377 +12092,441 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_VERSION_1_1 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_mixed_attachment_samples === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_sample_locations === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_blend_operation_advanced === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_fragment_coverage_to_color === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_acceleration_structure === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_ray_tracing_pipeline === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_ray_query === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_framebuffer_mixed_samples === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_shader_sm_builtins === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_image_drm_format_modifier === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_validation_cache === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_portability_subset === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_shading_rate_image === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -6608,465 +12534,548 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_VERSION_1_2 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_ray_tracing === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_representative_fragment_test === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_filter_cubic === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_external_memory_host === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_shader_clock === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_AMD_pipeline_compiler_control === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_shader_core_properties === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_video_decode_h265 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_AMD_memory_overallocation_behavior === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_vertex_attribute_divisor === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_frame_token === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_mesh_shader === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_shader_image_footprint === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_scissor_exclusive === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_device_diagnostic_checkpoints === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_INTEL_shader_integer_functions2 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_INTEL_performance_query === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_pci_bus_info === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_AMD_display_native_hdr === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_fragment_density_map === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_fragment_shading_rate === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_AMD_shader_core_properties2 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_AMD_device_coherent_memory === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_shader_image_atomic_int64 === template <> - struct StructExtends + struct StructExtends { enum { @@ -7074,369 +13083,433 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_VERSION_1_3 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_shader_quad_control === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_memory_budget === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_memory_priority === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_surface_protected_capabilities === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_dedicated_allocation_image_aliasing === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_buffer_device_address === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_validation_features === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_present_wait === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_cooperative_matrix === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_coverage_reduction_mode === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_fragment_shader_interlock === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_ycbcr_image_arrays === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_provoking_vertex === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_shader_atomic_float === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_extended_dynamic_state === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_pipeline_executable_properties === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_map_memory_placed === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -7444,33 +13517,37 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_swapchain === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_shader_atomic_float2 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_surface_maintenance1 === template <> - struct StructExtends + struct StructExtends { enum { @@ -7478,9 +13555,8 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_display_swapchain === template <> - struct StructExtends + struct StructExtends { enum { @@ -7488,9 +13564,8 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_debug_report === template <> - struct StructExtends + struct StructExtends { enum { @@ -7498,9 +13573,9 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_AMD_rasterization_order === + //=== VK_EXT_swapchain_maintenance1 === template <> - struct StructExtends + struct StructExtends { enum { @@ -7508,105 +13583,109 @@ namespace VULKAN_HPP_NAMESPACE }; }; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_queue === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_device_generated_commands === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_decode_queue === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_NV_dedicated_allocation === + //=== VK_NV_inherited_viewport_scissor === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -7614,33 +13693,37 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_transform_feedback === + //=== VK_EXT_texel_buffer_alignment === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_QCOM_render_pass_transform === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -7648,245 +13731,268 @@ namespace VULKAN_HPP_NAMESPACE }; }; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_encode_h264 === + //=== VK_EXT_depth_bias_control === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_device_memory_report === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_robustness2 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_encode_h265 === + //=== VK_EXT_custom_border_color === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_pipeline_library === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_present_barrier === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_present_id === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_decode_h264 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_video_encode_queue === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_AMD_texture_gather_bias_lod === template <> - struct StructExtends + struct StructExtends { enum { @@ -7894,75 +14000,87 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_dynamic_rendering === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_device_diagnostics_config === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_NV_corner_sampled_image === + //=== VK_NV_low_latency === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === template <> - struct StructExtends + struct StructExtends { enum { @@ -7970,17 +14088,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_external_memory === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -7988,49 +14106,44 @@ namespace VULKAN_HPP_NAMESPACE }; }; -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_external_memory_win32 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_win32_keyed_mutex === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_EXT_validation_flags === template <> - struct StructExtends + struct StructExtends { enum { @@ -8038,25 +14151,26 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_astc_decode_mode === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -8064,87 +14178,91 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_pipeline_robustness === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_memory_win32 === + //=== VK_EXT_descriptor_buffer === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_external_memory_fd === template <> - struct StructExtends + struct StructExtends { enum { @@ -8152,49 +14270,44 @@ namespace VULKAN_HPP_NAMESPACE }; }; -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_win32_keyed_mutex === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_semaphore_win32 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_push_descriptor === template <> - struct StructExtends + struct StructExtends { enum { @@ -8202,25 +14315,26 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_conditional_rendering === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -8228,9 +14342,9 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_incremental_present === + //=== VK_EXT_graphics_pipeline_library === template <> - struct StructExtends + struct StructExtends { enum { @@ -8238,9 +14352,8 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_clip_space_w_scaling === template <> - struct StructExtends + struct StructExtends { enum { @@ -8248,9 +14361,8 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_display_control === template <> - struct StructExtends + struct StructExtends { enum { @@ -8258,9 +14370,8 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_GOOGLE_display_timing === template <> - struct StructExtends + struct StructExtends { enum { @@ -8268,9 +14379,9 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NVX_multiview_per_view_attributes === + //=== VK_AMD_shader_early_and_late_fragment_tests === template <> - struct StructExtends + struct StructExtends { enum { @@ -8278,9 +14389,8 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_viewport_swizzle === template <> - struct StructExtends + struct StructExtends { enum { @@ -8288,17 +14398,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_discard_rectangles === + //=== VK_KHR_fragment_shader_barycentric === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -8306,17 +14417,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_conservative_rasterization === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_shader_subgroup_uniform_control_flow === template <> - struct StructExtends + struct StructExtends { enum { @@ -8324,25 +14436,27 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_depth_clip_enable === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_fragment_shading_rate_enums === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -8350,9 +14464,8 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_shared_presentable_image === template <> - struct StructExtends + struct StructExtends { enum { @@ -8360,61 +14473,64 @@ namespace VULKAN_HPP_NAMESPACE }; }; -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_fence_win32 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_performance_query === + //=== VK_NV_ray_tracing_motion_blur === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_mesh_shader === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -8422,17 +14538,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_debug_utils === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_ycbcr_2plane_444_formats === template <> - struct StructExtends + struct StructExtends { enum { @@ -8440,93 +14557,102 @@ namespace VULKAN_HPP_NAMESPACE }; }; -# if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_ANDROID_external_memory_android_hardware_buffer === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_fragment_density_map2 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_QCOM_rotated_copy_commands === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - //=== VK_EXT_sample_locations === + //=== VK_KHR_workgroup_memory_explicit_layout === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_image_compression_control === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -8534,33 +14660,35 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_blend_operation_advanced === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -8568,9 +14696,8 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_fragment_coverage_to_color === template <> - struct StructExtends + struct StructExtends { enum { @@ -8578,33 +14705,37 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_acceleration_structure === + //=== VK_EXT_attachment_feedback_loop_layout === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_4444_formats === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -8612,9 +14743,9 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_framebuffer_mixed_samples === + //=== VK_EXT_device_fault === template <> - struct StructExtends + struct StructExtends { enum { @@ -8622,25 +14753,27 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_shader_sm_builtins === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_rgba10x6_formats === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -8648,41 +14781,47 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_image_drm_format_modifier === + //=== VK_EXT_vertex_input_dynamic_state === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_physical_device_drm === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_device_address_binding_report === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -8690,17 +14829,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_validation_cache === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_depth_clip_control === template <> - struct StructExtends + struct StructExtends { enum { @@ -8708,69 +14848,78 @@ namespace VULKAN_HPP_NAMESPACE }; }; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_portability_subset === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_primitive_topology_list_restart === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_NV_shading_rate_image === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_present_mode_fifo_latest_ready === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === template <> - struct StructExtends + struct StructExtends { enum { @@ -8778,43 +14927,46 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_ray_tracing === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_NV_representative_fragment_test === + //=== VK_HUAWEI_subpass_shading === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -8822,17 +14974,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_filter_cubic === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_HUAWEI_invocation_mask === template <> - struct StructExtends + struct StructExtends { enum { @@ -8840,17 +14993,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_external_memory_host === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_external_memory_rdma === template <> - struct StructExtends + struct StructExtends { enum { @@ -8858,17 +15012,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_shader_clock === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_pipeline_properties === template <> - struct StructExtends + struct StructExtends { enum { @@ -8876,17 +15031,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_AMD_pipeline_compiler_control === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_frame_boundary === template <> - struct StructExtends + struct StructExtends { enum { @@ -8894,9 +15050,8 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_AMD_shader_core_properties === template <> - struct StructExtends + struct StructExtends { enum { @@ -8904,93 +15059,100 @@ namespace VULKAN_HPP_NAMESPACE }; }; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_decode_h265 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_multisampled_render_to_single_sampled === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_KHR_global_priority === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_extended_dynamic_state2 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -8998,9 +15160,9 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_AMD_memory_overallocation_behavior === + //=== VK_EXT_color_write_enable === template <> - struct StructExtends + struct StructExtends { enum { @@ -9008,33 +15170,36 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_vertex_attribute_divisor === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_primitives_generated_query === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9042,29 +15207,28 @@ namespace VULKAN_HPP_NAMESPACE }; }; -# if defined( VK_USE_PLATFORM_GGP ) - //=== VK_GGP_frame_token === + //=== VK_KHR_ray_tracing_maintenance1 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_USE_PLATFORM_GGP*/ - //=== VK_NV_compute_shader_derivatives === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_image_view_min_lod === template <> - struct StructExtends + struct StructExtends { enum { @@ -9072,25 +15236,27 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_mesh_shader === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_multi_draw === template <> - struct StructExtends + struct StructExtends { enum { @@ -9098,17 +15264,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_shader_image_footprint === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9116,25 +15282,28 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_scissor_exclusive === + //=== VK_EXT_image_2d_view_of_3d === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_shader_tile_image === template <> - struct StructExtends + struct StructExtends { enum { @@ -9142,9 +15311,8 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_device_diagnostic_checkpoints === template <> - struct StructExtends + struct StructExtends { enum { @@ -9152,17 +15320,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_INTEL_shader_integer_functions2 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_opacity_micromap === template <> - struct StructExtends + struct StructExtends { enum { @@ -9170,9 +15339,8 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_INTEL_performance_query === template <> - struct StructExtends + struct StructExtends { enum { @@ -9180,9 +15348,8 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_pci_bus_info === template <> - struct StructExtends + struct StructExtends { enum { @@ -9190,17 +15357,19 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_AMD_display_native_hdr === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === template <> - struct StructExtends + struct StructExtends { enum { @@ -9208,41 +15377,46 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_fragment_density_map === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_HUAWEI_cluster_culling_shader === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9250,41 +15424,45 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_fragment_shading_rate === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_border_color_swizzle === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9292,9 +15470,9 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_AMD_shader_core_properties2 === + //=== VK_EXT_pageable_device_local_memory === template <> - struct StructExtends + struct StructExtends { enum { @@ -9302,17 +15480,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_AMD_device_coherent_memory === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_ARM_shader_core_properties === template <> - struct StructExtends + struct StructExtends { enum { @@ -9320,17 +15499,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_shader_image_atomic_int64 === + //=== VK_ARM_scheduling_controls === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9338,9 +15518,8 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_memory_budget === template <> - struct StructExtends + struct StructExtends { enum { @@ -9348,25 +15527,27 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_memory_priority === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_image_sliced_view_of_3d === template <> - struct StructExtends + struct StructExtends { enum { @@ -9374,9 +15555,8 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_surface_protected_capabilities === template <> - struct StructExtends + struct StructExtends { enum { @@ -9384,17 +15564,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_dedicated_allocation_image_aliasing === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_VALVE_descriptor_set_host_mapping === template <> - struct StructExtends + struct StructExtends { enum { @@ -9402,25 +15583,27 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_buffer_device_address === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_non_seamless_cube_map === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9428,9 +15611,9 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_validation_features === + //=== VK_ARM_render_pass_striped === template <> - struct StructExtends + struct StructExtends { enum { @@ -9438,17 +15621,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_present_wait === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9456,25 +15639,26 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_cooperative_matrix === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9482,25 +15666,27 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_coverage_reduction_mode === + //=== VK_QCOM_fragment_density_map_offset === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9508,17 +15694,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_fragment_shader_interlock === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_copy_memory_indirect === template <> - struct StructExtends + struct StructExtends { enum { @@ -9526,17 +15713,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_ycbcr_image_arrays === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9544,33 +15731,37 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_provoking_vertex === + //=== VK_NV_memory_decompression === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_device_generated_commands_compute === template <> - struct StructExtends + struct StructExtends { enum { @@ -9578,77 +15769,84 @@ namespace VULKAN_HPP_NAMESPACE }; }; -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_ray_tracing_linear_swept_spheres === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_linear_color_attachment === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_EXT_line_rasterization === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_shader_maximal_reconvergence === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_image_compression_control_swapchain === template <> - struct StructExtends + struct StructExtends { enum { @@ -9656,17 +15854,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_shader_atomic_float === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_QCOM_image_processing === template <> - struct StructExtends + struct StructExtends { enum { @@ -9674,17 +15873,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_index_type_uint8 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9692,17 +15891,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_extended_dynamic_state === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_nested_command_buffer === template <> - struct StructExtends + struct StructExtends { enum { @@ -9710,17 +15910,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_pipeline_executable_properties === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9728,17 +15928,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_shader_atomic_float2 === + //=== VK_EXT_external_memory_acquire_unmodified === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9746,33 +15947,36 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_device_generated_commands === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_extended_dynamic_state3 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9780,25 +15984,27 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_inherited_viewport_scissor === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_subpass_merge_feedback === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9806,17 +16012,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_texel_buffer_alignment === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9824,17 +16030,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_QCOM_render_pass_transform === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9842,25 +16048,28 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_device_memory_report === + //=== VK_LUNARG_direct_driver_loading === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_shader_module_identifier === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9868,25 +16077,27 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_robustness2 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_rasterization_order_attachment_access === template <> - struct StructExtends + struct StructExtends { enum { @@ -9894,33 +16105,36 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_custom_border_color === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_optical_flow === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -9928,9 +16142,8 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_pipeline_library === template <> - struct StructExtends + struct StructExtends { enum { @@ -9938,25 +16151,27 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_present_id === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_legacy_dithering === template <> - struct StructExtends + struct StructExtends { enum { @@ -9964,61 +16179,66 @@ namespace VULKAN_HPP_NAMESPACE }; }; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_encode_queue === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_format_resolve === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_NV_device_diagnostics_config === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_AMD_anti_lag === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10026,157 +16246,176 @@ namespace VULKAN_HPP_NAMESPACE }; }; -# if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === + //=== VK_KHR_ray_tracing_position_fetch === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_shader_object === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_pipeline_binary === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_QCOM_tile_properties === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_SEC_amigo_profiling === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_QCOM_multiview_per_view_viewports === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_synchronization2 === template <> - struct StructExtends + struct StructExtends { enum { @@ -10184,33 +16423,37 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_graphics_pipeline_library === + //=== VK_NV_ray_tracing_invocation_reorder === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_cooperative_vector === template <> - struct StructExtends + struct StructExtends { enum { @@ -10218,17 +16461,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_AMD_shader_early_and_late_fragment_tests === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10236,25 +16479,27 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_fragment_shader_barycentric === + //=== VK_NV_extended_sparse_address_space === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10262,17 +16507,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_shader_subgroup_uniform_control_flow === + //=== VK_EXT_mutable_descriptor_type === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10280,33 +16526,36 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_fragment_shading_rate_enums === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_legacy_vertex_attributes === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10314,33 +16563,37 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_ray_tracing_motion_blur === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_layer_settings === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_ARM_shader_core_builtins === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10348,25 +16601,27 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_mesh_shader === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_pipeline_library_group_handles === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10374,17 +16629,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_ycbcr_2plane_444_formats === + //=== VK_EXT_dynamic_rendering_unused_attachments === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10392,25 +16648,27 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_fragment_density_map2 === + //=== VK_NV_low_latency2 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10418,17 +16676,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_QCOM_rotated_copy_commands === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_cooperative_matrix === template <> - struct StructExtends + struct StructExtends { enum { @@ -10436,17 +16695,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_workgroup_memory_explicit_layout === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10454,65 +16713,74 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_image_compression_control === + //=== VK_QCOM_multiview_per_view_render_areas === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_compute_shader_derivatives === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_video_decode_av1 === template <> - struct StructExtends + struct StructExtends { enum { @@ -10520,17 +16788,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_attachment_feedback_loop_layout === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10538,17 +16806,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_4444_formats === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10556,17 +16824,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_rgba10x6_formats === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_video_encode_av1 === template <> - struct StructExtends + struct StructExtends { enum { @@ -10574,25 +16843,26 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_ray_tracing_pipeline === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10600,17 +16870,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_ray_query === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10618,17 +16888,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_vertex_input_dynamic_state === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10636,9 +16906,8 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_physical_device_drm === template <> - struct StructExtends + struct StructExtends { enum { @@ -10646,25 +16915,26 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_depth_clip_control === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10672,17 +16942,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_primitive_topology_list_restart === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10690,73 +16960,74 @@ namespace VULKAN_HPP_NAMESPACE }; }; -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_memory === + //=== VK_KHR_video_maintenance1 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_USE_PLATFORM_FUCHSIA*/ -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; -# endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_HUAWEI_subpass_shading === + //=== VK_NV_per_stage_descriptor_set === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_QCOM_image_processing2 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10764,17 +17035,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_HUAWEI_invocation_mask === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10782,17 +17053,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_external_memory_rdma === + //=== VK_QCOM_filter_cubic_weights === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10800,17 +17072,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_pipeline_properties === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10818,41 +17090,46 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_multisampled_render_to_single_sampled === + //=== VK_QCOM_ycbcr_degamma === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_QCOM_filter_cubic_clamp === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10860,17 +17137,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_extended_dynamic_state2 === + //=== VK_EXT_attachment_feedback_loop_dynamic_state === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10878,25 +17156,28 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_color_write_enable === +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10904,17 +17185,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_primitives_generated_query === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10922,17 +17203,19 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_ray_tracing_maintenance1 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_MSFT_layered_driver === template <> - struct StructExtends + struct StructExtends { enum { @@ -10940,25 +17223,28 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_image_view_min_lod === + //=== VK_NV_descriptor_pool_overallocation === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_display_stereo === template <> - struct StructExtends + struct StructExtends { enum { @@ -10966,25 +17252,27 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_multi_draw === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_video_encode_quantization_map === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -10992,17 +17280,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_image_2d_view_of_3d === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -11010,25 +17298,26 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_border_color_swizzle === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -11036,17 +17325,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_pageable_device_local_memory === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -11054,17 +17343,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_VALVE_descriptor_set_host_mapping === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -11072,17 +17361,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_depth_clamp_zero_one === + //=== VK_NV_raw_access_chains === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -11090,17 +17380,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_non_seamless_cube_map === + //=== VK_KHR_shader_relaxed_extended_instruction === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -11108,33 +17399,37 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_QCOM_fragment_density_map_offset === + //=== VK_NV_command_buffer_inheritance === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_maintenance7 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -11142,17 +17437,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_NV_linear_color_attachment === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -11160,17 +17455,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_image_compression_control_swapchain === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_shader_atomic_float16_vector === template <> - struct StructExtends + struct StructExtends { enum { @@ -11178,33 +17474,37 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_QCOM_image_processing === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_EXT_shader_replicated_composites === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_ray_tracing_validation === template <> - struct StructExtends + struct StructExtends { enum { @@ -11212,49 +17512,55 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_subpass_merge_feedback === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_cluster_acceleration_structure === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_NV_partitioned_acceleration_structure === template <> - struct StructExtends + struct StructExtends { enum { @@ -11262,33 +17568,35 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_shader_module_identifier === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -11296,17 +17604,18 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_rasterization_order_attachment_access === + //=== VK_EXT_device_generated_commands === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -11314,17 +17623,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_legacy_dithering === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -11332,17 +17641,17 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_QCOM_tile_properties === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -11350,25 +17659,27 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_SEC_amigo_profiling === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_KHR_maintenance8 === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { @@ -11376,33 +17687,36 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_mutable_descriptor_type === template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + template <> - struct StructExtends + struct StructExtends { enum { value = true }; }; + + //=== VK_MESA_image_alignment_control === template <> - struct StructExtends + struct StructExtends { enum { @@ -11410,3123 +17724,4293 @@ namespace VULKAN_HPP_NAMESPACE }; }; -#endif // VULKAN_HPP_DISABLE_ENHANCED_MODE - -#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL - class DynamicLoader + template <> + struct StructExtends { - public: -# ifdef VULKAN_HPP_NO_EXCEPTIONS - DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT -# else - DynamicLoader( std::string const & vulkanLibraryName = {} ) -# endif - { - if ( !vulkanLibraryName.empty() ) - { -# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) - m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL ); -# elif defined( _WIN32 ) - m_library = ::LoadLibraryA( vulkanLibraryName.c_str() ); -# else -# error unsupported platform -# endif - } - else - { -# if defined( __unix__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) - m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); - if ( m_library == nullptr ) - { - m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL ); - } -# elif defined( __APPLE__ ) - m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); -# elif defined( _WIN32 ) - m_library = ::LoadLibraryA( "vulkan-1.dll" ); -# else -# error unsupported platform -# endif - } - -# ifndef VULKAN_HPP_NO_EXCEPTIONS - if ( m_library == nullptr ) - { - // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function. - throw std::runtime_error( "Failed to load vulkan library!" ); - } -# endif - } - - DynamicLoader( DynamicLoader const & ) = delete; - - DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library( other.m_library ) - { - other.m_library = nullptr; - } - - DynamicLoader & operator=( DynamicLoader const & ) = delete; - - DynamicLoader & operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT - { - std::swap( m_library, other.m_library ); - return *this; - } - - ~DynamicLoader() VULKAN_HPP_NOEXCEPT - { - if ( m_library ) - { -# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) - dlclose( m_library ); -# elif defined( _WIN32 ) - ::FreeLibrary( m_library ); -# else -# error unsupported platform -# endif - } - } - - template - T getProcAddress( const char * function ) const VULKAN_HPP_NOEXCEPT - { -# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) - return (T)dlsym( m_library, function ); -# elif defined( _WIN32 ) - return ( T )::GetProcAddress( m_library, function ); -# else -# error unsupported platform -# endif - } - - bool success() const VULKAN_HPP_NOEXCEPT + enum { - return m_library != nullptr; - } - - private: -# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined( __Fuchsia__ ) - void * m_library; -# elif defined( _WIN32 ) - ::HINSTANCE m_library; -# else -# error unsupported platform -# endif + value = true + }; }; -#endif - using PFN_dummy = void ( * )(); - - class DispatchLoaderDynamic : public DispatchLoaderBase + template <> + struct StructExtends { - public: - //=== VK_VERSION_1_0 === - PFN_vkCreateInstance vkCreateInstance = 0; - PFN_vkDestroyInstance vkDestroyInstance = 0; - PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; - PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; - PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; - PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; - PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; - PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; - PFN_vkCreateDevice vkCreateDevice = 0; - PFN_vkDestroyDevice vkDestroyDevice = 0; - PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; - PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; - PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; - PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; - PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; - PFN_vkQueueSubmit vkQueueSubmit = 0; - PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; - PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; - PFN_vkAllocateMemory vkAllocateMemory = 0; - PFN_vkFreeMemory vkFreeMemory = 0; - PFN_vkMapMemory vkMapMemory = 0; - PFN_vkUnmapMemory vkUnmapMemory = 0; - PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; - PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; - PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; - PFN_vkBindBufferMemory vkBindBufferMemory = 0; - PFN_vkBindImageMemory vkBindImageMemory = 0; - PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; - PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; - PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; - PFN_vkQueueBindSparse vkQueueBindSparse = 0; - PFN_vkCreateFence vkCreateFence = 0; - PFN_vkDestroyFence vkDestroyFence = 0; - PFN_vkResetFences vkResetFences = 0; - PFN_vkGetFenceStatus vkGetFenceStatus = 0; - PFN_vkWaitForFences vkWaitForFences = 0; - PFN_vkCreateSemaphore vkCreateSemaphore = 0; - PFN_vkDestroySemaphore vkDestroySemaphore = 0; - PFN_vkCreateEvent vkCreateEvent = 0; - PFN_vkDestroyEvent vkDestroyEvent = 0; - PFN_vkGetEventStatus vkGetEventStatus = 0; - PFN_vkSetEvent vkSetEvent = 0; - PFN_vkResetEvent vkResetEvent = 0; - PFN_vkCreateQueryPool vkCreateQueryPool = 0; - PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; - PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; - PFN_vkCreateBuffer vkCreateBuffer = 0; - PFN_vkDestroyBuffer vkDestroyBuffer = 0; - PFN_vkCreateBufferView vkCreateBufferView = 0; - PFN_vkDestroyBufferView vkDestroyBufferView = 0; - PFN_vkCreateImage vkCreateImage = 0; - PFN_vkDestroyImage vkDestroyImage = 0; - PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; - PFN_vkCreateImageView vkCreateImageView = 0; - PFN_vkDestroyImageView vkDestroyImageView = 0; - PFN_vkCreateShaderModule vkCreateShaderModule = 0; - PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; - PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; - PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; - PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; - PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; - PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; - PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; - PFN_vkDestroyPipeline vkDestroyPipeline = 0; - PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; - PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; - PFN_vkCreateSampler vkCreateSampler = 0; - PFN_vkDestroySampler vkDestroySampler = 0; - PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; - PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; - PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; - PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; - PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; - PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; - PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; - PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; - PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; - PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; - PFN_vkCreateRenderPass vkCreateRenderPass = 0; - PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; - PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; - PFN_vkCreateCommandPool vkCreateCommandPool = 0; - PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; - PFN_vkResetCommandPool vkResetCommandPool = 0; - PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; - PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; - PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; - PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; - PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; - PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; - PFN_vkCmdSetViewport vkCmdSetViewport = 0; - PFN_vkCmdSetScissor vkCmdSetScissor = 0; - PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; - PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; - PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; - PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; - PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; - PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; - PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; - PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; - PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; - PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; - PFN_vkCmdDraw vkCmdDraw = 0; - PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; - PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; - PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; - PFN_vkCmdDispatch vkCmdDispatch = 0; - PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; - PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; - PFN_vkCmdCopyImage vkCmdCopyImage = 0; - PFN_vkCmdBlitImage vkCmdBlitImage = 0; - PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; - PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; - PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; - PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; - PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; - PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; - PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; - PFN_vkCmdResolveImage vkCmdResolveImage = 0; - PFN_vkCmdSetEvent vkCmdSetEvent = 0; - PFN_vkCmdResetEvent vkCmdResetEvent = 0; - PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; - PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; - PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; - PFN_vkCmdEndQuery vkCmdEndQuery = 0; - PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; - PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; - PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; - PFN_vkCmdPushConstants vkCmdPushConstants = 0; - PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; - PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; - PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; - PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; - - //=== VK_VERSION_1_1 === - PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; - PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; - PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; - PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; - PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; - PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; - PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; - PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; - PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; - PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; - PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; - PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; - PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; - PFN_vkTrimCommandPool vkTrimCommandPool = 0; - PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; - PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; - PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; - PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; - PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; - PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; - PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; - PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; - PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; - - //=== VK_VERSION_1_2 === - PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; - PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; - PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; - PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; - PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; - PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; - PFN_vkResetQueryPool vkResetQueryPool = 0; - PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; - PFN_vkWaitSemaphores vkWaitSemaphores = 0; - PFN_vkSignalSemaphore vkSignalSemaphore = 0; - PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; - PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; - PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; - - //=== VK_VERSION_1_3 === - PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; - PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; - PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; - PFN_vkSetPrivateData vkSetPrivateData = 0; - PFN_vkGetPrivateData vkGetPrivateData = 0; - PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; - PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; - PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; - PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; - PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; - PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; - PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; - PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; - PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; - PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; - PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; - PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; - PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; - PFN_vkCmdEndRendering vkCmdEndRendering = 0; - PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; - PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; - PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; - PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; - PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; - PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; - PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; - PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; - PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; - PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; - PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; - PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; - PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; - PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; - PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; - PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; - PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; - PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; - - //=== VK_KHR_surface === - PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; - PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; - - //=== VK_KHR_swapchain === - PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; - PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; - PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; - PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; - PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; - PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; - PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; - PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; - PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; - - //=== VK_KHR_display === - PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; - PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; - PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; - PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; - PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; - PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; - PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; - - //=== VK_KHR_display_swapchain === - PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; - -#if defined( VK_USE_PLATFORM_XLIB_KHR ) - //=== VK_KHR_xlib_surface === - PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; -#else - PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - -#if defined( VK_USE_PLATFORM_XCB_KHR ) - //=== VK_KHR_xcb_surface === - PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; -#else - PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - -#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) - //=== VK_KHR_wayland_surface === - PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; -#else - PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -#if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_KHR_android_surface === - PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; -#else - PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_win32_surface === - PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; - PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; -#else - PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_EXT_debug_report === - PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; - PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; - PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; - - //=== VK_EXT_debug_marker === - PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; - PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; - PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; - PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; - PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_queue === - PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; - PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; - PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; - PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; - PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; - PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; - PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; - PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; - PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; - PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; - PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; - PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; -#else - PFN_dummy vkGetPhysicalDeviceVideoCapabilitiesKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceVideoFormatPropertiesKHR_placeholder = 0; - PFN_dummy vkCreateVideoSessionKHR_placeholder = 0; - PFN_dummy vkDestroyVideoSessionKHR_placeholder = 0; - PFN_dummy vkGetVideoSessionMemoryRequirementsKHR_placeholder = 0; - PFN_dummy vkBindVideoSessionMemoryKHR_placeholder = 0; - PFN_dummy vkCreateVideoSessionParametersKHR_placeholder = 0; - PFN_dummy vkUpdateVideoSessionParametersKHR_placeholder = 0; - PFN_dummy vkDestroyVideoSessionParametersKHR_placeholder = 0; - PFN_dummy vkCmdBeginVideoCodingKHR_placeholder = 0; - PFN_dummy vkCmdEndVideoCodingKHR_placeholder = 0; - PFN_dummy vkCmdControlVideoCodingKHR_placeholder = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_decode_queue === - PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; -#else - PFN_dummy vkCmdDecodeVideoKHR_placeholder = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - //=== VK_EXT_transform_feedback === - PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; - PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; - PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; - PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; - PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; - PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; - - //=== VK_NVX_binary_import === - PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; - PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; - PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; - PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; - PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; - - //=== VK_NVX_image_view_handle === - PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; - PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; - - //=== VK_AMD_draw_indirect_count === - PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; - PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; - - //=== VK_AMD_shader_info === - PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; - - //=== VK_KHR_dynamic_rendering === - PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; - PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; - -#if defined( VK_USE_PLATFORM_GGP ) - //=== VK_GGP_stream_descriptor_surface === - PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; -#else - PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; -#endif /*VK_USE_PLATFORM_GGP*/ - - //=== VK_NV_external_memory_capabilities === - PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_external_memory_win32 === - PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; -#else - PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_get_physical_device_properties2 === - PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; - PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; - PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; - - //=== VK_KHR_device_group === - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; - PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; - PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; - -#if defined( VK_USE_PLATFORM_VI_NN ) - //=== VK_NN_vi_surface === - PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; -#else - PFN_dummy vkCreateViSurfaceNN_placeholder = 0; -#endif /*VK_USE_PLATFORM_VI_NN*/ - - //=== VK_KHR_maintenance1 === - PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; - - //=== VK_KHR_device_group_creation === - PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; - - //=== VK_KHR_external_memory_capabilities === - PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_memory_win32 === - PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; - PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; -#else - PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_memory_fd === - PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; - PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; - - //=== VK_KHR_external_semaphore_capabilities === - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_semaphore_win32 === - PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; - PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; -#else - PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_semaphore_fd === - PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; - PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; - - //=== VK_KHR_push_descriptor === - PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; - PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; - - //=== VK_EXT_conditional_rendering === - PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; - PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; - - //=== VK_KHR_descriptor_update_template === - PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; - PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; - PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; - - //=== VK_NV_clip_space_w_scaling === - PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; - - //=== VK_EXT_direct_mode_display === - PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; - -#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) - //=== VK_EXT_acquire_xlib_display === - PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; - PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; -#else - PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; - PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - - //=== VK_EXT_display_surface_counter === - PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; - - //=== VK_EXT_display_control === - PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; - PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; - PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; - PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; - - //=== VK_GOOGLE_display_timing === - PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; - PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; - - //=== VK_EXT_discard_rectangles === - PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; - - //=== VK_EXT_hdr_metadata === - PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; - - //=== VK_KHR_create_renderpass2 === - PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; - PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; - PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; - PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; - - //=== VK_KHR_shared_presentable_image === - PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; - - //=== VK_KHR_external_fence_capabilities === - PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_fence_win32 === - PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; - PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; -#else - PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_fence_fd === - PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; - PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; - - //=== VK_KHR_performance_query === - PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; - PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; - PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; - - //=== VK_KHR_get_surface_capabilities2 === - PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; - - //=== VK_KHR_get_display_properties2 === - PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; - PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; - PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; - PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; - -#if defined( VK_USE_PLATFORM_IOS_MVK ) - //=== VK_MVK_ios_surface === - PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; -#else - PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - -#if defined( VK_USE_PLATFORM_MACOS_MVK ) - //=== VK_MVK_macos_surface === - PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; -#else - PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - - //=== VK_EXT_debug_utils === - PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; - PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; - PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; - PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; - PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; - PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; - PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; - PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; - PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; - PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; - PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; - -#if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_ANDROID_external_memory_android_hardware_buffer === - PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; - PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; -#else - PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; - PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - //=== VK_EXT_sample_locations === - PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; - PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; - - //=== VK_KHR_get_memory_requirements2 === - PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; - PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; - PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; - - //=== VK_KHR_acceleration_structure === - PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; - PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; - PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; - PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; - PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; - PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; - PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; - PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; - PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; - PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; - PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; - PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; - PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; - PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; - - //=== VK_KHR_sampler_ycbcr_conversion === - PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; - PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; - - //=== VK_KHR_bind_memory2 === - PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; - PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; - - //=== VK_EXT_image_drm_format_modifier === - PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; - - //=== VK_EXT_validation_cache === - PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; - PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; - PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; - PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; - - //=== VK_NV_shading_rate_image === - PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; - PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; - PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; - - //=== VK_NV_ray_tracing === - PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; - PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; - PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; - PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; - PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; - PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; - PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; - PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; - PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; - PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; - PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; - PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; - - //=== VK_KHR_maintenance3 === - PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; - - //=== VK_KHR_draw_indirect_count === - PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; - PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; - - //=== VK_EXT_external_memory_host === - PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; - - //=== VK_AMD_buffer_marker === - PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; - - //=== VK_EXT_calibrated_timestamps === - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; - PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; - - //=== VK_NV_mesh_shader === - PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; - PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; - PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; - - //=== VK_NV_scissor_exclusive === - PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; - - //=== VK_NV_device_diagnostic_checkpoints === - PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; - PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; - - //=== VK_KHR_timeline_semaphore === - PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; - PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; - PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; - - //=== VK_INTEL_performance_query === - PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; - PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; - PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; - PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; - PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; - PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; - PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; - PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; - PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; - - //=== VK_AMD_display_native_hdr === - PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; - -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_imagepipe_surface === - PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; -#else - PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - -#if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_surface === - PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; -#else - PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_KHR_fragment_shading_rate === - PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; - PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; - - //=== VK_EXT_buffer_device_address === - PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; - - //=== VK_EXT_tooling_info === - PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; - - //=== VK_KHR_present_wait === - PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; - - //=== VK_NV_cooperative_matrix === - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; - - //=== VK_NV_coverage_reduction_mode === - PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; - PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; - PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; - PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; -#else - PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; - PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; - PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; - PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_EXT_headless_surface === - PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; - - //=== VK_KHR_buffer_device_address === - PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; - PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; - - //=== VK_EXT_line_rasterization === - PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; - - //=== VK_EXT_host_query_reset === - PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; - - //=== VK_EXT_extended_dynamic_state === - PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; - PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; - PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; - PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; - PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; - PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; - PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; - PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; - PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; - PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; - PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; - PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; - - //=== VK_KHR_deferred_host_operations === - PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; - PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; - PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; - PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; - PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; - - //=== VK_KHR_pipeline_executable_properties === - PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; - PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; - PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; - - //=== VK_NV_device_generated_commands === - PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; - PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; - PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; - PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; - PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; - PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; - - //=== VK_EXT_acquire_drm_display === - PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; - PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; - - //=== VK_EXT_private_data === - PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; - PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; - PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; - PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_encode_queue === - PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; -#else - PFN_dummy vkCmdEncodeVideoKHR_placeholder = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === - PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; -#else - PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ + enum + { + value = true + }; + }; - //=== VK_KHR_synchronization2 === - PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; - PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; - PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; - PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; - PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; - PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; - PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; - PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; - - //=== VK_NV_fragment_shading_rate_enums === - PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; - - //=== VK_EXT_mesh_shader === - PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; - PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; - PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; - - //=== VK_KHR_copy_commands2 === - PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; - PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; - PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; - PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; - PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; - PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; - - //=== VK_EXT_image_compression_control === - PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_acquire_winrt_display === - PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; - PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; -#else - PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; - PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_EXT_depth_clamp_control === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; -#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) - //=== VK_EXT_directfb_surface === - PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; - PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; -#else - PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; -#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - //=== VK_KHR_ray_tracing_pipeline === - PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; - PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; - PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; - PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; - PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; - PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - //=== VK_EXT_vertex_input_dynamic_state === - PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; + //=== VK_KHR_video_maintenance2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_memory === - PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; -#else - PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; - PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_semaphore === - PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; - PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; -#else - PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; - PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === - PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; - PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; - PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; -#else - PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; - PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; - PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; - PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; - PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - //=== VK_HUAWEI_subpass_shading === - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; - PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - //=== VK_HUAWEI_invocation_mask === - PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; + //=== VK_HUAWEI_hdr_vivid === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - //=== VK_NV_external_memory_rdma === - PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - //=== VK_EXT_pipeline_properties === - PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - //=== VK_EXT_extended_dynamic_state2 === - PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; - PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; - PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; - PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; - PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; + //=== VK_NV_cooperative_matrix2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; -#if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_screen_surface === - PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; - PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; -#else - PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; -#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - //=== VK_EXT_color_write_enable === - PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - //=== VK_KHR_ray_tracing_maintenance1 === - PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; + //=== VK_ARM_pipeline_opacity_micromap === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - //=== VK_EXT_multi_draw === - PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; - PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - //=== VK_EXT_pageable_device_local_memory === - PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_maintenance4 === - PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; - PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; + //=== VK_KHR_depth_clamp_zero_one === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - //=== VK_VALVE_descriptor_set_host_mapping === - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; - PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - //=== VK_EXT_shader_module_identifier === - PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; - PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; + //=== VK_EXT_vertex_attribute_robustness === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - //=== VK_QCOM_tile_properties === - PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; - PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; - public: - DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; - DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif // VULKAN_HPP_DISABLE_ENHANCED_MODE -#if !defined( VK_NO_PROTOTYPES ) - // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library. - template - void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device, DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT + namespace detail + { +#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + class DynamicLoader { - PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress( "vkGetInstanceProcAddr" ); - PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress( "vkGetDeviceProcAddr" ); - init( static_cast( instance ), getInstanceProcAddr, static_cast( device ), device ? getDeviceProcAddr : nullptr ); - } + public: +# ifdef VULKAN_HPP_NO_EXCEPTIONS + DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT +# else + DynamicLoader( std::string const & vulkanLibraryName = {} ) +# endif + { + if ( !vulkanLibraryName.empty() ) + { +# if defined( _WIN32 ) + m_library = ::LoadLibraryA( vulkanLibraryName.c_str() ); +# elif defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL ); +# else +# error unsupported platform +# endif + } + else + { +# if defined( _WIN32 ) + m_library = ::LoadLibraryA( "vulkan-1.dll" ); +# elif defined( __APPLE__ ) + m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); + if ( !m_library ) + { + m_library = dlopen( "libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL ); + } + if ( !m_library ) + { + m_library = dlopen( "libMoltenVK.dylib", RTLD_NOW | RTLD_LOCAL ); + } + // Add support for using Vulkan and MoltenVK in a Framework. App store rules for iOS + // strictly enforce no .dylib's. If they aren't found it just falls through + if ( !m_library ) + { + m_library = dlopen( "vulkan.framework/vulkan", RTLD_NOW | RTLD_LOCAL ); + } + if ( !m_library ) + { + m_library = dlopen( "MoltenVK.framework/MoltenVK", RTLD_NOW | RTLD_LOCAL ); + } + // modern versions of macOS don't search /usr/local/lib automatically contrary to what man dlopen says + // Vulkan SDK uses this as the system-wide installation location, so we're going to fallback to this if all else fails + if ( !m_library && ( getenv( "DYLD_FALLBACK_LIBRARY_PATH" ) == NULL ) ) + { + m_library = dlopen( "/usr/local/lib/libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); + } +# elif defined( __unix__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); + if ( !m_library ) + { + m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL ); + } +# else +# error unsupported platform +# endif + } - // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library. - template - void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device ) VULKAN_HPP_NOEXCEPT - { - static DynamicLoader dl; - init( instance, device, dl ); - } -#endif // !defined( VK_NO_PROTOTYPES ) + } - DispatchLoaderDynamic( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT - { - init( getInstanceProcAddr ); - } + DynamicLoader( DynamicLoader const & ) = delete; - void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( getInstanceProcAddr ); + DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library( other.m_library ) + { + other.m_library = nullptr; + } - vkGetInstanceProcAddr = getInstanceProcAddr; + DynamicLoader & operator=( DynamicLoader const & ) = delete; - //=== VK_VERSION_1_0 === - vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) ); - vkEnumerateInstanceExtensionProperties = - PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ); - vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ); + DynamicLoader & operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_library, other.m_library ); + return *this; + } - //=== VK_VERSION_1_1 === - vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) ); - } + ~DynamicLoader() VULKAN_HPP_NOEXCEPT + { + if ( m_library ) + { +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + dlclose( m_library ); +# elif defined( _WIN32 ) + ::FreeLibrary( m_library ); +# else +# error unsupported platform +# endif + } + } - // This interface does not require a linked vulkan library. - DispatchLoaderDynamic( VkInstance instance, - PFN_vkGetInstanceProcAddr getInstanceProcAddr, - VkDevice device = {}, - PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT - { - init( instance, getInstanceProcAddr, device, getDeviceProcAddr ); - } + template + T getProcAddress( const char * function ) const VULKAN_HPP_NOEXCEPT + { +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + return (T)dlsym( m_library, function ); +# elif defined( _WIN32 ) + return ( T )::GetProcAddress( m_library, function ); +# else +# error unsupported platform +# endif + } - // This interface does not require a linked vulkan library. - void init( VkInstance instance, - PFN_vkGetInstanceProcAddr getInstanceProcAddr, - VkDevice device = {}, - PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( instance && getInstanceProcAddr ); - vkGetInstanceProcAddr = getInstanceProcAddr; - init( VULKAN_HPP_NAMESPACE::Instance( instance ) ); - if ( device ) + bool success() const VULKAN_HPP_NOEXCEPT { - init( VULKAN_HPP_NAMESPACE::Device( device ) ); + return m_library != nullptr; } - } - void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT - { - VkInstance instance = static_cast( instanceCpp ); + private: +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + void * m_library; +# elif defined( _WIN32 ) + ::HINSTANCE m_library; +# else +# error unsupported platform +# endif + }; +#endif + using PFN_dummy = void ( * )(); + + class DispatchLoaderDynamic : public DispatchLoaderBase + { + public: //=== VK_VERSION_1_0 === - vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); - vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); - vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); - vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); - vkGetPhysicalDeviceImageFormatProperties = - PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); - vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); - vkGetPhysicalDeviceQueueFamilyProperties = - PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); - vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); - vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); - vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); - vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); - vkEnumerateDeviceExtensionProperties = - PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); - vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); - vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); - vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); - vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) ); - vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) ); - vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); - vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); - vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); - vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); - vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); - vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) ); - vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); - vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); - vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); - vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); - vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); - vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) ); - vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); - vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); - vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); - vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) ); - vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); - vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); - vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); - vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); - vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); - vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) ); - vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); - vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); - vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) ); - vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); - vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) ); - vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); - vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); - vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); - vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); - vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); - vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); - vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); - vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) ); - vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); - vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) ); - vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) ); - vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) ); - vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); - vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) ); - vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) ); - vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); - vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); - vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) ); - vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); - vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); - vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) ); - vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); - vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); - vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); - vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); - vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); - vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); - vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); - vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) ); - vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); - vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) ); - vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) ); - vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); - vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) ); - vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); - vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) ); - vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); - vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); - vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) ); - vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); - vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); - vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); - vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); - vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); - vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); - vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); - vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); - vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); - vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); - vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); - vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); - vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); - vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); - vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); - vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); - vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); - vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); - vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); - vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); - vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); - vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); - vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); - vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); - vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); - vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); + PFN_vkCreateInstance vkCreateInstance = 0; + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkCreateDevice vkCreateDevice = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkFreeMemory vkFreeMemory = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; + PFN_vkWaitForFences vkWaitForFences = 0; + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkCreateImage vkCreateImage = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; //=== VK_VERSION_1_1 === - vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); - vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); - vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); - vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); - vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); - vkGetPhysicalDeviceFormatProperties2 = - PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); - vkGetPhysicalDeviceImageFormatProperties2 = - PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); - vkGetPhysicalDeviceQueueFamilyProperties2 = - PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); - vkGetPhysicalDeviceMemoryProperties2 = - PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties2 = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); - vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); - vkGetPhysicalDeviceExternalBufferProperties = - PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); - vkGetPhysicalDeviceExternalFenceProperties = - PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); - vkGetPhysicalDeviceExternalSemaphoreProperties = - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; //=== VK_VERSION_1_2 === - vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); - vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); - vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) ); - vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) ); - vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); - vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) ); - vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) ); - vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) ); - vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); - vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) ); - vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) ); - vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); - vkGetDeviceMemoryOpaqueCaptureAddress = - PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; + PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; + PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; + PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; + PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; + PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; + PFN_vkResetQueryPool vkResetQueryPool = 0; + PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; + PFN_vkWaitSemaphores vkWaitSemaphores = 0; + PFN_vkSignalSemaphore vkSignalSemaphore = 0; + PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; + PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; //=== VK_VERSION_1_3 === - vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); - vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlot" ) ); - vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlot" ) ); - vkSetPrivateData = PFN_vkSetPrivateData( vkGetInstanceProcAddr( instance, "vkSetPrivateData" ) ); - vkGetPrivateData = PFN_vkGetPrivateData( vkGetInstanceProcAddr( instance, "vkGetPrivateData" ) ); - vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2" ) ); - vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2" ) ); - vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2" ) ); - vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2" ) ); - vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2" ) ); - vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetInstanceProcAddr( instance, "vkQueueSubmit2" ) ); - vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2" ) ); - vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2" ) ); - vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2" ) ); - vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2" ) ); - vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2" ) ); - vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2" ) ); - vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetInstanceProcAddr( instance, "vkCmdBeginRendering" ) ); - vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetInstanceProcAddr( instance, "vkCmdEndRendering" ) ); - vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetInstanceProcAddr( instance, "vkCmdSetCullMode" ) ); - vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFace" ) ); - vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopology" ) ); - vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCount" ) ); - vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCount" ) ); - vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2" ) ); - vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnable" ) ); - vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnable" ) ); - vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOp" ) ); - vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnable" ) ); - vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnable" ) ); - vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOp" ) ); - vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnable" ) ); - vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnable" ) ); - vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnable" ) ); - vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirements" ) ); - vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirements" ) ); - vkGetDeviceImageSparseMemoryRequirements = - PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirements" ) ); + PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; + PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; + PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; + PFN_vkSetPrivateData vkSetPrivateData = 0; + PFN_vkGetPrivateData vkGetPrivateData = 0; + PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; + PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; + PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; + PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; + PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; + PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; + PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; + PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; + PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; + PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; + PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; + PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; + PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; + PFN_vkCmdEndRendering vkCmdEndRendering = 0; + PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; + PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; + PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; + PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; + PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; + PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; + PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; + PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; + PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; + PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; + PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; + PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; + PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; + PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; + PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; + PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; + PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; + PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; + + //=== VK_VERSION_1_4 === + PFN_vkCmdSetLineStipple vkCmdSetLineStipple = 0; + PFN_vkMapMemory2 vkMapMemory2 = 0; + PFN_vkUnmapMemory2 vkUnmapMemory2 = 0; + PFN_vkCmdBindIndexBuffer2 vkCmdBindIndexBuffer2 = 0; + PFN_vkGetRenderingAreaGranularity vkGetRenderingAreaGranularity = 0; + PFN_vkGetDeviceImageSubresourceLayout vkGetDeviceImageSubresourceLayout = 0; + PFN_vkGetImageSubresourceLayout2 vkGetImageSubresourceLayout2 = 0; + PFN_vkCmdPushDescriptorSet vkCmdPushDescriptorSet = 0; + PFN_vkCmdPushDescriptorSetWithTemplate vkCmdPushDescriptorSetWithTemplate = 0; + PFN_vkCmdSetRenderingAttachmentLocations vkCmdSetRenderingAttachmentLocations = 0; + PFN_vkCmdSetRenderingInputAttachmentIndices vkCmdSetRenderingInputAttachmentIndices = 0; + PFN_vkCmdBindDescriptorSets2 vkCmdBindDescriptorSets2 = 0; + PFN_vkCmdPushConstants2 vkCmdPushConstants2 = 0; + PFN_vkCmdPushDescriptorSet2 vkCmdPushDescriptorSet2 = 0; + PFN_vkCmdPushDescriptorSetWithTemplate2 vkCmdPushDescriptorSetWithTemplate2 = 0; + PFN_vkCopyMemoryToImage vkCopyMemoryToImage = 0; + PFN_vkCopyImageToMemory vkCopyImageToMemory = 0; + PFN_vkCopyImageToImage vkCopyImageToImage = 0; + PFN_vkTransitionImageLayout vkTransitionImageLayout = 0; //=== VK_KHR_surface === - vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); - vkGetPhysicalDeviceSurfaceSupportKHR = - PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); - vkGetPhysicalDeviceSurfaceCapabilitiesKHR = - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); - vkGetPhysicalDeviceSurfaceFormatsKHR = - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); - vkGetPhysicalDeviceSurfacePresentModesKHR = - PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; //=== VK_KHR_swapchain === - vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) ); - vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) ); - vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) ); - vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) ); - vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) ); - vkGetDeviceGroupPresentCapabilitiesKHR = - PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); - vkGetDeviceGroupSurfacePresentModesKHR = - PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); - vkGetPhysicalDevicePresentRectanglesKHR = - PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); - vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; //=== VK_KHR_display === - vkGetPhysicalDeviceDisplayPropertiesKHR = - PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); - vkGetPhysicalDeviceDisplayPlanePropertiesKHR = - PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); - vkGetDisplayPlaneSupportedDisplaysKHR = - PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); - vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); - vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); - vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); - vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; //=== VK_KHR_display_swapchain === - vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; #if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === - vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); - vkGetPhysicalDeviceXlibPresentationSupportKHR = - PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; +#else + PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_XLIB_KHR*/ #if defined( VK_USE_PLATFORM_XCB_KHR ) //=== VK_KHR_xcb_surface === - vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); - vkGetPhysicalDeviceXcbPresentationSupportKHR = - PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; +#else + PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_XCB_KHR*/ #if defined( VK_USE_PLATFORM_WAYLAND_KHR ) //=== VK_KHR_wayland_surface === - vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); - vkGetPhysicalDeviceWaylandPresentationSupportKHR = - PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; +#else + PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_KHR_android_surface === - vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; +#else + PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_win32_surface === - vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); - vkGetPhysicalDeviceWin32PresentationSupportKHR = - PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +#else + PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_debug_report === - vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); - vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); - vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; //=== VK_EXT_debug_marker === - vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); - vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); - vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_queue === - vkGetPhysicalDeviceVideoCapabilitiesKHR = - PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); - vkGetPhysicalDeviceVideoFormatPropertiesKHR = - PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); - vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionKHR" ) ); - vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionKHR" ) ); - vkGetVideoSessionMemoryRequirementsKHR = - PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetVideoSessionMemoryRequirementsKHR" ) ); - vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindVideoSessionMemoryKHR" ) ); - vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionParametersKHR" ) ); - vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkUpdateVideoSessionParametersKHR" ) ); - vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionParametersKHR" ) ); - vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginVideoCodingKHR" ) ); - vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) ); - vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; + PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; + PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; + PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; + PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; + PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; + PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; + PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; + PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; + PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; + PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_decode_queue === - vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdDecodeVideoKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; //=== VK_EXT_transform_feedback === - vkCmdBindTransformFeedbackBuffersEXT = - PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; //=== VK_NVX_binary_import === - vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) ); - vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) ); - vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuModuleNVX" ) ); - vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuFunctionNVX" ) ); - vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) ); + PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; + PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; + PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; + PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; + PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; //=== VK_NVX_image_view_handle === - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageViewHandle64NVX vkGetImageViewHandle64NVX = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; //=== VK_AMD_draw_indirect_count === - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; //=== VK_AMD_shader_info === - vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) ); + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; //=== VK_KHR_dynamic_rendering === - vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderingKHR" ) ); - if ( !vkCmdBeginRendering ) - vkCmdBeginRendering = vkCmdBeginRenderingKHR; - vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderingKHR" ) ); - if ( !vkCmdEndRendering ) - vkCmdEndRendering = vkCmdEndRenderingKHR; + PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; + PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === - vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); + PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; +#else + PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; #endif /*VK_USE_PLATFORM_GGP*/ //=== VK_NV_external_memory_capabilities === - vkGetPhysicalDeviceExternalImageFormatPropertiesNV = - PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_external_memory_win32 === - vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); + PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; +#else + PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_get_physical_device_properties2 === - vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); - if ( !vkGetPhysicalDeviceFeatures2 ) - vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; - vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceProperties2 ) - vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; - vkGetPhysicalDeviceFormatProperties2KHR = - PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceFormatProperties2 ) - vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; - vkGetPhysicalDeviceImageFormatProperties2KHR = - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceImageFormatProperties2 ) - vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; - vkGetPhysicalDeviceQueueFamilyProperties2KHR = - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) - vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; - vkGetPhysicalDeviceMemoryProperties2KHR = - PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceMemoryProperties2 ) - vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; - vkGetPhysicalDeviceSparseImageFormatProperties2KHR = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) - vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; //=== VK_KHR_device_group === - vkGetDeviceGroupPeerMemoryFeaturesKHR = - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); - if ( !vkGetDeviceGroupPeerMemoryFeatures ) - vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; - vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); - if ( !vkCmdSetDeviceMask ) - vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); - if ( !vkCmdDispatchBase ) - vkCmdDispatchBase = vkCmdDispatchBaseKHR; + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; #if defined( VK_USE_PLATFORM_VI_NN ) //=== VK_NN_vi_surface === - vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; +#else + PFN_dummy vkCreateViSurfaceNN_placeholder = 0; #endif /*VK_USE_PLATFORM_VI_NN*/ //=== VK_KHR_maintenance1 === - vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); - if ( !vkTrimCommandPool ) - vkTrimCommandPool = vkTrimCommandPoolKHR; + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; //=== VK_KHR_device_group_creation === - vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); - if ( !vkEnumeratePhysicalDeviceGroups ) - vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; //=== VK_KHR_external_memory_capabilities === - vkGetPhysicalDeviceExternalBufferPropertiesKHR = - PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalBufferProperties ) - vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_memory_win32 === - vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); - vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; +#else + PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_memory_fd === - vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) ); - vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; //=== VK_KHR_external_semaphore_capabilities === - vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) - vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_semaphore_win32 === - vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); - vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +#else + PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_semaphore_fd === - vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); - vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) ); + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; //=== VK_KHR_push_descriptor === - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); - vkCmdPushDescriptorSetWithTemplateKHR = - PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; //=== VK_EXT_conditional_rendering === - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; //=== VK_KHR_descriptor_update_template === - vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); - if ( !vkCreateDescriptorUpdateTemplate ) - vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; - vkDestroyDescriptorUpdateTemplateKHR = - PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); - if ( !vkDestroyDescriptorUpdateTemplate ) - vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; - vkUpdateDescriptorSetWithTemplateKHR = - PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); - if ( !vkUpdateDescriptorSetWithTemplate ) - vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; //=== VK_NV_clip_space_w_scaling === - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; //=== VK_EXT_direct_mode_display === - vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; #if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) //=== VK_EXT_acquire_xlib_display === - vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); - vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; + PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; +#else + PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; + PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ //=== VK_EXT_display_surface_counter === - vkGetPhysicalDeviceSurfaceCapabilities2EXT = - PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; //=== VK_EXT_display_control === - vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); - vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; //=== VK_GOOGLE_display_timing === - vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); - vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; //=== VK_EXT_discard_rectangles === - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; + PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0; + PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0; //=== VK_EXT_hdr_metadata === - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; //=== VK_KHR_create_renderpass2 === - vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); - if ( !vkCreateRenderPass2 ) - vkCreateRenderPass2 = vkCreateRenderPass2KHR; - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); - if ( !vkCmdBeginRenderPass2 ) - vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; - vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); - if ( !vkCmdNextSubpass2 ) - vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); - if ( !vkCmdEndRenderPass2 ) - vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; //=== VK_KHR_shared_presentable_image === - vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; //=== VK_KHR_external_fence_capabilities === - vkGetPhysicalDeviceExternalFencePropertiesKHR = - PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalFenceProperties ) - vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_fence_win32 === - vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); - vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; +#else + PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_fence_fd === - vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) ); - vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; //=== VK_KHR_performance_query === - vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( - vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); - vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); - vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) ); - vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; + PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; + PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; //=== VK_KHR_get_surface_capabilities2 === - vkGetPhysicalDeviceSurfaceCapabilities2KHR = - PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); - vkGetPhysicalDeviceSurfaceFormats2KHR = - PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; //=== VK_KHR_get_display_properties2 === - vkGetPhysicalDeviceDisplayProperties2KHR = - PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); - vkGetPhysicalDeviceDisplayPlaneProperties2KHR = - PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); - vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); - vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; #if defined( VK_USE_PLATFORM_IOS_MVK ) //=== VK_MVK_ios_surface === - vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; +#else + PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; #endif /*VK_USE_PLATFORM_IOS_MVK*/ #if defined( VK_USE_PLATFORM_MACOS_MVK ) //=== VK_MVK_macos_surface === - vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; +#else + PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; #endif /*VK_USE_PLATFORM_MACOS_MVK*/ //=== VK_EXT_debug_utils === - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); - vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); - vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); - vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); - vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); - vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); - vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; #if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_ANDROID_external_memory_android_hardware_buffer === - vkGetAndroidHardwareBufferPropertiesANDROID = - PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); - vkGetMemoryAndroidHardwareBufferANDROID = - PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; + PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; +#else + PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; + PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0; + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0; + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0; + PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0; + PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; + PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; + PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; +#else + PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0; + PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_sample_locations === - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); - vkGetPhysicalDeviceMultisamplePropertiesEXT = - PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; //=== VK_KHR_get_memory_requirements2 === - vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); - if ( !vkGetImageMemoryRequirements2 ) - vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; - vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); - if ( !vkGetBufferMemoryRequirements2 ) - vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; - vkGetImageSparseMemoryRequirements2KHR = - PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); - if ( !vkGetImageSparseMemoryRequirements2 ) - vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; //=== VK_KHR_acceleration_structure === - vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) ); - vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) ); - vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) ); - vkCmdBuildAccelerationStructuresIndirectKHR = - PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); - vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) ); - vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) ); - vkCopyAccelerationStructureToMemoryKHR = - PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) ); - vkCopyMemoryToAccelerationStructureKHR = - PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) ); - vkWriteAccelerationStructuresPropertiesKHR = - PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); - vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); - vkCmdCopyAccelerationStructureToMemoryKHR = - PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); - vkCmdCopyMemoryToAccelerationStructureKHR = - PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); - vkGetAccelerationStructureDeviceAddressKHR = - PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) ); - vkCmdWriteAccelerationStructuresPropertiesKHR = - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); - vkGetDeviceAccelerationStructureCompatibilityKHR = - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); - vkGetAccelerationStructureBuildSizesKHR = - PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) ); + PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; + PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; + PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; + PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; + PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; + PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; + PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; + PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; + PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; + + //=== VK_KHR_ray_tracing_pipeline === + PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; + PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; + PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; + PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; + PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; + PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; //=== VK_KHR_sampler_ycbcr_conversion === - vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); - if ( !vkCreateSamplerYcbcrConversion ) - vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; - vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); - if ( !vkDestroySamplerYcbcrConversion ) - vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; //=== VK_KHR_bind_memory2 === - vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); - if ( !vkBindBufferMemory2 ) - vkBindBufferMemory2 = vkBindBufferMemory2KHR; - vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); - if ( !vkBindImageMemory2 ) - vkBindImageMemory2 = vkBindImageMemory2KHR; + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; //=== VK_EXT_image_drm_format_modifier === - vkGetImageDrmFormatModifierPropertiesEXT = - PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; //=== VK_EXT_validation_cache === - vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); - vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); - vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); - vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; //=== VK_NV_shading_rate_image === - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); - vkCmdSetViewportShadingRatePaletteNV = - PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; //=== VK_NV_ray_tracing === - vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); - vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); - vkGetAccelerationStructureMemoryRequirementsNV = - PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); - vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); - vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); - vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); - if ( !vkGetRayTracingShaderGroupHandlesKHR ) - vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; - vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); - vkCmdWriteAccelerationStructuresPropertiesNV = - PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); + PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; + PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; + PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; + PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; + PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; + PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; //=== VK_KHR_maintenance3 === - vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); - if ( !vkGetDescriptorSetLayoutSupport ) - vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; //=== VK_KHR_draw_indirect_count === - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; //=== VK_EXT_external_memory_host === - vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; //=== VK_AMD_buffer_marker === - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; //=== VK_EXT_calibrated_timestamps === - vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); - vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; //=== VK_NV_mesh_shader === - vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; //=== VK_NV_scissor_exclusive === - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); + PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0; + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; //=== VK_NV_device_diagnostic_checkpoints === - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; + PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; //=== VK_KHR_timeline_semaphore === - vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); - if ( !vkGetSemaphoreCounterValue ) - vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; - vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) ); - if ( !vkWaitSemaphores ) - vkWaitSemaphores = vkWaitSemaphoresKHR; - vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) ); - if ( !vkSignalSemaphore ) - vkSignalSemaphore = vkSignalSemaphoreKHR; + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; + PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; //=== VK_INTEL_performance_query === - vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = - PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkAcquirePerformanceConfigurationINTEL = - PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) ); - vkReleasePerformanceConfigurationINTEL = - PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); - vkQueueSetPerformanceConfigurationINTEL = - PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); - vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); + PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; + PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; + PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; + PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; + PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; + PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; + PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; + PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; //=== VK_AMD_display_native_hdr === - vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_imagepipe_surface === - vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); + PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; +#else + PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_surface === - vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); + PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; +#else + PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; #endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_fragment_shading_rate === - vkGetPhysicalDeviceFragmentShadingRatesKHR = - PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); - vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) ); + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; + PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; + + //=== VK_KHR_dynamic_rendering_local_read === + PFN_vkCmdSetRenderingAttachmentLocationsKHR vkCmdSetRenderingAttachmentLocationsKHR = 0; + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR vkCmdSetRenderingInputAttachmentIndicesKHR = 0; //=== VK_EXT_buffer_device_address === - vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; //=== VK_EXT_tooling_info === - vkGetPhysicalDeviceToolPropertiesEXT = - PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); - if ( !vkGetPhysicalDeviceToolProperties ) - vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; + PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; //=== VK_KHR_present_wait === - vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetInstanceProcAddr( instance, "vkWaitForPresentKHR" ) ); + PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; //=== VK_NV_cooperative_matrix === - vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; //=== VK_NV_coverage_reduction_mode === - vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_EXT_full_screen_exclusive === - vkGetPhysicalDeviceSurfacePresentModes2EXT = - PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); - vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); - vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); - vkGetDeviceGroupSurfacePresentModes2EXT = - PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; + PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; + PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; + PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; +#else + PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; + PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_headless_surface === - vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; //=== VK_KHR_buffer_device_address === - vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; - vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) ); - if ( !vkGetBufferOpaqueCaptureAddress ) - vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; - vkGetDeviceMemoryOpaqueCaptureAddressKHR = - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); - if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) - vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; + PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; //=== VK_EXT_line_rasterization === - vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); + PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; //=== VK_EXT_host_query_reset === - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); - if ( !vkResetQueryPool ) - vkResetQueryPool = vkResetQueryPoolEXT; + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; //=== VK_EXT_extended_dynamic_state === - vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) ); - if ( !vkCmdSetCullMode ) - vkCmdSetCullMode = vkCmdSetCullModeEXT; - vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) ); - if ( !vkCmdSetFrontFace ) - vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; - vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) ); - if ( !vkCmdSetPrimitiveTopology ) - vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; - vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) ); - if ( !vkCmdSetViewportWithCount ) - vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; - vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) ); - if ( !vkCmdSetScissorWithCount ) - vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; - vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) ); - if ( !vkCmdBindVertexBuffers2 ) - vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; - vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) ); - if ( !vkCmdSetDepthTestEnable ) - vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; - vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) ); - if ( !vkCmdSetDepthWriteEnable ) - vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; - vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) ); - if ( !vkCmdSetDepthCompareOp ) - vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; - vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) ); - if ( !vkCmdSetDepthBoundsTestEnable ) - vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; - vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) ); - if ( !vkCmdSetStencilTestEnable ) - vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; - vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) ); - if ( !vkCmdSetStencilOp ) - vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; + PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; + PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; //=== VK_KHR_deferred_host_operations === - vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) ); - vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) ); - vkGetDeferredOperationMaxConcurrencyKHR = - PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); - vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); - vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); + PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; + PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; + PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; + PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; + PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; //=== VK_KHR_pipeline_executable_properties === - vkGetPipelineExecutablePropertiesKHR = - PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); - vkGetPipelineExecutableStatisticsKHR = - PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); - vkGetPipelineExecutableInternalRepresentationsKHR = - PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; + PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; + + //=== VK_EXT_host_image_copy === + PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; + PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; + PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0; + PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; + PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; + + //=== VK_KHR_map_memory2 === + PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; + PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; + + //=== VK_EXT_swapchain_maintenance1 === + PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0; //=== VK_NV_device_generated_commands === - vkGetGeneratedCommandsMemoryRequirementsNV = - PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); - vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) ); - vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) ); - vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) ); - vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); - vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); + PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; + + //=== VK_EXT_depth_bias_control === + PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0; //=== VK_EXT_acquire_drm_display === - vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); - vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); + PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; + PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; //=== VK_EXT_private_data === - vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) ); - if ( !vkCreatePrivateDataSlot ) - vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; - vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) ); - if ( !vkDestroyPrivateDataSlot ) - vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; - vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) ); - if ( !vkSetPrivateData ) - vkSetPrivateData = vkSetPrivateDataEXT; - vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) ); - if ( !vkGetPrivateData ) - vkGetPrivateData = vkGetPrivateDataEXT; + PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; + PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; + PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; + PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_encode_queue === - vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) ); + PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0; + PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0; + PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0; + PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0; + PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0; + PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0; + PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0; + PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0; +#else + PFN_dummy vkCreateCudaModuleNV_placeholder = 0; + PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0; + PFN_dummy vkCreateCudaFunctionNV_placeholder = 0; + PFN_dummy vkDestroyCudaModuleNV_placeholder = 0; + PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0; + PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_objects === - vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetInstanceProcAddr( instance, "vkExportMetalObjectsEXT" ) ); + PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; +#else + PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; #endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_synchronization2 === - vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) ); - if ( !vkCmdSetEvent2 ) - vkCmdSetEvent2 = vkCmdSetEvent2KHR; - vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2KHR" ) ); - if ( !vkCmdResetEvent2 ) - vkCmdResetEvent2 = vkCmdResetEvent2KHR; - vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) ); - if ( !vkCmdWaitEvents2 ) - vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; - vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) ); - if ( !vkCmdPipelineBarrier2 ) - vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; - vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) ); - if ( !vkCmdWriteTimestamp2 ) - vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; - vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) ); - if ( !vkQueueSubmit2 ) - vkQueueSubmit2 = vkQueueSubmit2KHR; - vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) ); - vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) ); + PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; + PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; + PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; + PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; + PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; + PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; + + //=== VK_EXT_descriptor_buffer === + PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0; + PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0; + PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0; + PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0; + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0; //=== VK_NV_fragment_shading_rate_enums === - vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) ); + PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; //=== VK_EXT_mesh_shader === - vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksEXT" ) ); - vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectEXT" ) ); - vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; //=== VK_KHR_copy_commands2 === - vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) ); - if ( !vkCmdCopyBuffer2 ) - vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; - vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) ); - if ( !vkCmdCopyImage2 ) - vkCmdCopyImage2 = vkCmdCopyImage2KHR; - vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) ); - if ( !vkCmdCopyBufferToImage2 ) - vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; - vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) ); - if ( !vkCmdCopyImageToBuffer2 ) - vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; - vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) ); - if ( !vkCmdBlitImage2 ) - vkCmdBlitImage2 = vkCmdBlitImage2KHR; - vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) ); - if ( !vkCmdResolveImage2 ) - vkCmdResolveImage2 = vkCmdResolveImage2KHR; - - //=== VK_EXT_image_compression_control === - vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) ); + PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; + PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; + PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; + PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; + PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; + PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; + + //=== VK_EXT_device_fault === + PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === - vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); - vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); + PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; + PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; +#else + PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; + PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) //=== VK_EXT_directfb_surface === - vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); - vkGetPhysicalDeviceDirectFBPresentationSupportEXT = - PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); + PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; +#else + PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - //=== VK_KHR_ray_tracing_pipeline === - vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) ); - vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) ); - vkGetRayTracingShaderGroupHandlesKHR = - PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) ); - vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); - vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); - vkGetRayTracingShaderGroupStackSizeKHR = - PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); - vkCmdSetRayTracingPipelineStackSizeKHR = - PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); - //=== VK_EXT_vertex_input_dynamic_state === - vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) ); + PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === - vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) ); - vkGetMemoryZirconHandlePropertiesFUCHSIA = - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); + PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; +#else + PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_semaphore === - vkImportSemaphoreZirconHandleFUCHSIA = - PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); - vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); + PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; + PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; +#else + PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_buffer_collection === - vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateBufferCollectionFUCHSIA" ) ); - vkSetBufferCollectionImageConstraintsFUCHSIA = - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); - vkSetBufferCollectionBufferConstraintsFUCHSIA = - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); - vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkDestroyBufferCollectionFUCHSIA" ) ); - vkGetBufferCollectionPropertiesFUCHSIA = - PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); + PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; + PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; + PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; +#else + PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; #endif /*VK_USE_PLATFORM_FUCHSIA*/ //=== VK_HUAWEI_subpass_shading === - vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetInstanceProcAddr( instance, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); - vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdSubpassShadingHUAWEI" ) ); + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; + PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; //=== VK_HUAWEI_invocation_mask === - vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdBindInvocationMaskHUAWEI" ) ); + PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; //=== VK_NV_external_memory_rdma === - vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetInstanceProcAddr( instance, "vkGetMemoryRemoteAddressNV" ) ); + PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; //=== VK_EXT_pipeline_properties === - vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPipelinePropertiesEXT" ) ); + PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; //=== VK_EXT_extended_dynamic_state2 === - vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPatchControlPointsEXT" ) ); - vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnableEXT" ) ); - if ( !vkCmdSetRasterizerDiscardEnable ) - vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; - vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnableEXT" ) ); - if ( !vkCmdSetDepthBiasEnable ) - vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; - vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEXT" ) ); - vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) ); - if ( !vkCmdSetPrimitiveRestartEnable ) - vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; + PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; + PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; + PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; + PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; #if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === - vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); - vkGetPhysicalDeviceScreenPresentationSupportQNX = - PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); + PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; +#else + PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ //=== VK_EXT_color_write_enable === - vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) ); + PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; //=== VK_KHR_ray_tracing_maintenance1 === - vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirect2KHR" ) ); + PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; //=== VK_EXT_multi_draw === - vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiEXT" ) ); - vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiIndexedEXT" ) ); + PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; + PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; + + //=== VK_EXT_opacity_micromap === + PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0; + PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0; + PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0; + PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0; + PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0; + PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0; + PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0; + PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0; + PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0; + PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0; + PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0; + PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0; + PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0; + PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0; + + //=== VK_HUAWEI_cluster_culling_shader === + PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0; + PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0; //=== VK_EXT_pageable_device_local_memory === - vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetInstanceProcAddr( instance, "vkSetDeviceMemoryPriorityEXT" ) ); + PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; //=== VK_KHR_maintenance4 === - vkGetDeviceBufferMemoryRequirementsKHR = - PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceBufferMemoryRequirements ) - vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; - vkGetDeviceImageMemoryRequirementsKHR = - PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageMemoryRequirements ) - vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; - vkGetDeviceImageSparseMemoryRequirementsKHR = - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageSparseMemoryRequirements ) - vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; //=== VK_VALVE_descriptor_set_host_mapping === - vkGetDescriptorSetLayoutHostMappingInfoVALVE = - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); - vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetHostMappingVALVE" ) ); + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; + PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; + + //=== VK_NV_copy_memory_indirect === + PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; + PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; + + //=== VK_NV_memory_decompression === + PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; + PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; + + //=== VK_NV_device_generated_commands_compute === + PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; + PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; + PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; + + //=== VK_EXT_extended_dynamic_state3 === + PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; + PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0; + PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0; + PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0; + PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0; + PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0; + PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0; + PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0; + PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0; + PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0; + PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; + PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0; + PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0; + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0; + PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0; + PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0; + PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0; + PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0; + PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0; + PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0; + PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0; + PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0; + PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0; + PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0; + PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0; + PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0; + PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0; + PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0; + PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0; + PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0; + PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0; //=== VK_EXT_shader_module_identifier === - vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleIdentifierEXT" ) ); - vkGetShaderModuleCreateInfoIdentifierEXT = - PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; + PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; + + //=== VK_NV_optical_flow === + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0; + PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0; + PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0; + PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; + PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; + + //=== VK_KHR_maintenance5 === + PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0; + PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0; + PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; + PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; + + //=== VK_AMD_anti_lag === + PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0; + + //=== VK_EXT_shader_object === + PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; + PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; + PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; + PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; + PFN_vkCmdSetDepthClampRangeEXT vkCmdSetDepthClampRangeEXT = 0; + + //=== VK_KHR_pipeline_binary === + PFN_vkCreatePipelineBinariesKHR vkCreatePipelineBinariesKHR = 0; + PFN_vkDestroyPipelineBinaryKHR vkDestroyPipelineBinaryKHR = 0; + PFN_vkGetPipelineKeyKHR vkGetPipelineKeyKHR = 0; + PFN_vkGetPipelineBinaryDataKHR vkGetPipelineBinaryDataKHR = 0; + PFN_vkReleaseCapturedPipelineDataKHR vkReleaseCapturedPipelineDataKHR = 0; //=== VK_QCOM_tile_properties === - vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetFramebufferTilePropertiesQCOM" ) ); - vkGetDynamicRenderingTilePropertiesQCOM = - PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); - } + PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; + PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; - void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT - { - VkDevice device = static_cast( deviceCpp ); + //=== VK_NV_cooperative_vector === + PFN_vkGetPhysicalDeviceCooperativeVectorPropertiesNV vkGetPhysicalDeviceCooperativeVectorPropertiesNV = 0; + PFN_vkConvertCooperativeVectorMatrixNV vkConvertCooperativeVectorMatrixNV = 0; + PFN_vkCmdConvertCooperativeVectorMatrixNV vkCmdConvertCooperativeVectorMatrixNV = 0; - //=== VK_VERSION_1_0 === - vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); - vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); - vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); - vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); - vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); - vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); - vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); - vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); - vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); - vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); - vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); - vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); - vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); - vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); - vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); - vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); - vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); - vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); - vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); - vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); - vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); - vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); - vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); - vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); - vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); - vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); - vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); - vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); - vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); - vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); - vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); - vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); - vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); - vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); - vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); - vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); - vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); - vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); - vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); - vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); - vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); - vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); - vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); - vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); - vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); - vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); - vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); - vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); - vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); - vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); - vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); - vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); - vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); - vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); - vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); - vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); - vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); - vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); - vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); - vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); - vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); - vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); - vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); - vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); - vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); - vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); - vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); - vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); - vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); - vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); - vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); - vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); - vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); - vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); - vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); - vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); - vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); - vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); - vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); - vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); - vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); - vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); - vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); - vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); - vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); - vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); - vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); - vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); - vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); - vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); - vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); - vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); - vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); - vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); - vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); - vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); - vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); - vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + //=== VK_NV_low_latency2 === + PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0; + PFN_vkLatencySleepNV vkLatencySleepNV = 0; + PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV = 0; + PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0; + PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0; - //=== VK_VERSION_1_1 === - vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); - vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); - vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + //=== VK_KHR_cooperative_matrix === + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0; - //=== VK_VERSION_1_2 === - vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); - vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); - vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); - vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); - vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); - vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); - vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); - vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); - vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); - vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); - vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); - vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); - vkGetDeviceMemoryOpaqueCaptureAddress = - PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0; - //=== VK_VERSION_1_3 === - vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); - vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); - vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); - vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); - vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); - vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); - vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); - vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); - vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); - vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); - vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); - vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); - vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); - vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); - vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); - vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); - vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); - vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); - vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); - vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); - vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); - vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); - vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); - vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); - vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); - vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); - vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); - vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); - vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); - vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); - vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); - vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); - vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); - vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); - vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); - vkGetDeviceImageSparseMemoryRequirements = - PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0; +#else + PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - //=== VK_KHR_swapchain === - vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); - vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); - vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); - vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); - vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); - vkGetDeviceGroupPresentCapabilitiesKHR = - PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); - vkGetDeviceGroupSurfacePresentModesKHR = - PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); - vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + //=== VK_KHR_line_rasterization === + PFN_vkCmdSetLineStippleKHR vkCmdSetLineStippleKHR = 0; + + //=== VK_KHR_calibrated_timestamps === + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0; + PFN_vkGetCalibratedTimestampsKHR vkGetCalibratedTimestampsKHR = 0; + + //=== VK_KHR_maintenance6 === + PFN_vkCmdBindDescriptorSets2KHR vkCmdBindDescriptorSets2KHR = 0; + PFN_vkCmdPushConstants2KHR vkCmdPushConstants2KHR = 0; + PFN_vkCmdPushDescriptorSet2KHR vkCmdPushDescriptorSet2KHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplate2KHR vkCmdPushDescriptorSetWithTemplate2KHR = 0; + PFN_vkCmdSetDescriptorBufferOffsets2EXT vkCmdSetDescriptorBufferOffsets2EXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = 0; + + //=== VK_NV_cluster_acceleration_structure === + PFN_vkGetClusterAccelerationStructureBuildSizesNV vkGetClusterAccelerationStructureBuildSizesNV = 0; + PFN_vkCmdBuildClusterAccelerationStructureIndirectNV vkCmdBuildClusterAccelerationStructureIndirectNV = 0; + + //=== VK_NV_partitioned_acceleration_structure === + PFN_vkGetPartitionedAccelerationStructuresBuildSizesNV vkGetPartitionedAccelerationStructuresBuildSizesNV = 0; + PFN_vkCmdBuildPartitionedAccelerationStructuresNV vkCmdBuildPartitionedAccelerationStructuresNV = 0; + + //=== VK_EXT_device_generated_commands === + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT vkGetGeneratedCommandsMemoryRequirementsEXT = 0; + PFN_vkCmdPreprocessGeneratedCommandsEXT vkCmdPreprocessGeneratedCommandsEXT = 0; + PFN_vkCmdExecuteGeneratedCommandsEXT vkCmdExecuteGeneratedCommandsEXT = 0; + PFN_vkCreateIndirectCommandsLayoutEXT vkCreateIndirectCommandsLayoutEXT = 0; + PFN_vkDestroyIndirectCommandsLayoutEXT vkDestroyIndirectCommandsLayoutEXT = 0; + PFN_vkCreateIndirectExecutionSetEXT vkCreateIndirectExecutionSetEXT = 0; + PFN_vkDestroyIndirectExecutionSetEXT vkDestroyIndirectExecutionSetEXT = 0; + PFN_vkUpdateIndirectExecutionSetPipelineEXT vkUpdateIndirectExecutionSetPipelineEXT = 0; + PFN_vkUpdateIndirectExecutionSetShaderEXT vkUpdateIndirectExecutionSetShaderEXT = 0; + + //=== VK_NV_cooperative_matrix2 === + PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = 0; - //=== VK_KHR_display_swapchain === - vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + PFN_vkGetMemoryMetalHandleEXT vkGetMemoryMetalHandleEXT = 0; + PFN_vkGetMemoryMetalHandlePropertiesEXT vkGetMemoryMetalHandlePropertiesEXT = 0; +#else + PFN_dummy vkGetMemoryMetalHandleEXT_placeholder = 0; + PFN_dummy vkGetMemoryMetalHandlePropertiesEXT_placeholder = 0; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_EXT_debug_marker === - vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); - vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); - vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + public: + DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; + DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_queue === - vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); - vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); - vkGetVideoSessionMemoryRequirementsKHR = - PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); - vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); - vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); - vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); - vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); - vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); - vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); - vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + DispatchLoaderDynamic( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT + { + init( getInstanceProcAddr ); + } -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_decode_queue === - vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + // This interface does not require a linked vulkan library. + DispatchLoaderDynamic( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = {}, + PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT + { + init( instance, getInstanceProcAddr, device, getDeviceProcAddr ); + } - //=== VK_EXT_transform_feedback === - vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + template + void init() + { + static DynamicLoader dl; + init( dl ); + } - //=== VK_NVX_binary_import === - vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); - vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); - vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); - vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); - vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + template + void init( DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT + { + PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress( "vkGetInstanceProcAddr" ); + init( getInstanceProcAddr ); + } - //=== VK_NVX_image_view_handle === - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getInstanceProcAddr ); - //=== VK_AMD_draw_indirect_count === - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + vkGetInstanceProcAddr = getInstanceProcAddr; - //=== VK_AMD_shader_info === - vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + //=== VK_VERSION_1_0 === + vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) ); + vkEnumerateInstanceExtensionProperties = + PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ); + vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ); - //=== VK_KHR_dynamic_rendering === - vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); - if ( !vkCmdBeginRendering ) - vkCmdBeginRendering = vkCmdBeginRenderingKHR; - vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); - if ( !vkCmdEndRendering ) - vkCmdEndRendering = vkCmdEndRenderingKHR; + //=== VK_VERSION_1_1 === + vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) ); + } -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_external_memory_win32 === - vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + // This interface does not require a linked vulkan library. + void init( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = {}, + PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( instance && getInstanceProcAddr ); + vkGetInstanceProcAddr = getInstanceProcAddr; + init( VULKAN_HPP_NAMESPACE::Instance( instance ) ); + if ( device ) + { + init( VULKAN_HPP_NAMESPACE::Device( device ) ); + } + } - //=== VK_KHR_device_group === - vkGetDeviceGroupPeerMemoryFeaturesKHR = - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); - if ( !vkGetDeviceGroupPeerMemoryFeatures ) - vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; - vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); - if ( !vkCmdSetDeviceMask ) - vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); - if ( !vkCmdDispatchBase ) - vkCmdDispatchBase = vkCmdDispatchBaseKHR; + void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT + { + VkInstance instance = static_cast( instanceCpp ); + + //=== VK_VERSION_1_0 === + vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); + vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); + vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); + vkGetPhysicalDeviceFormatProperties = + PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); + vkGetPhysicalDeviceImageFormatProperties = + PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); + vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); + vkGetPhysicalDeviceQueueFamilyProperties = + PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); + vkGetPhysicalDeviceMemoryProperties = + PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); + vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); + vkEnumerateDeviceExtensionProperties = + PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); + vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); + + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); + vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = + PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); + vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); + vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); + vkGetPhysicalDeviceFormatProperties2 = + PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); + vkGetPhysicalDeviceImageFormatProperties2 = + PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2 = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); + vkGetPhysicalDeviceMemoryProperties2 = + PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2 = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetPhysicalDeviceExternalBufferProperties = + PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); + vkGetPhysicalDeviceExternalFenceProperties = + PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + vkGetPhysicalDeviceExternalSemaphoreProperties = + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); + + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = + PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + + //=== VK_VERSION_1_3 === + vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); + vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetInstanceProcAddr( instance, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetInstanceProcAddr( instance, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetInstanceProcAddr( instance, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetInstanceProcAddr( instance, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetInstanceProcAddr( instance, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetInstanceProcAddr( instance, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = + PFN_vkGetDeviceBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = + PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirements" ) ); + + //=== VK_VERSION_1_4 === + vkCmdSetLineStipple = PFN_vkCmdSetLineStipple( vkGetInstanceProcAddr( instance, "vkCmdSetLineStipple" ) ); + vkMapMemory2 = PFN_vkMapMemory2( vkGetInstanceProcAddr( instance, "vkMapMemory2" ) ); + vkUnmapMemory2 = PFN_vkUnmapMemory2( vkGetInstanceProcAddr( instance, "vkUnmapMemory2" ) ); + vkCmdBindIndexBuffer2 = PFN_vkCmdBindIndexBuffer2( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer2" ) ); + vkGetRenderingAreaGranularity = PFN_vkGetRenderingAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderingAreaGranularity" ) ); + vkGetDeviceImageSubresourceLayout = PFN_vkGetDeviceImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSubresourceLayout" ) ); + vkGetImageSubresourceLayout2 = PFN_vkGetImageSubresourceLayout2( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2" ) ); + vkCmdPushDescriptorSet = PFN_vkCmdPushDescriptorSet( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSet" ) ); + vkCmdPushDescriptorSetWithTemplate = PFN_vkCmdPushDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplate" ) ); + vkCmdSetRenderingAttachmentLocations = + PFN_vkCmdSetRenderingAttachmentLocations( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingAttachmentLocations" ) ); + vkCmdSetRenderingInputAttachmentIndices = + PFN_vkCmdSetRenderingInputAttachmentIndices( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingInputAttachmentIndices" ) ); + vkCmdBindDescriptorSets2 = PFN_vkCmdBindDescriptorSets2( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets2" ) ); + vkCmdPushConstants2 = PFN_vkCmdPushConstants2( vkGetInstanceProcAddr( instance, "vkCmdPushConstants2" ) ); + vkCmdPushDescriptorSet2 = PFN_vkCmdPushDescriptorSet2( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSet2" ) ); + vkCmdPushDescriptorSetWithTemplate2 = + PFN_vkCmdPushDescriptorSetWithTemplate2( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplate2" ) ); + vkCopyMemoryToImage = PFN_vkCopyMemoryToImage( vkGetInstanceProcAddr( instance, "vkCopyMemoryToImage" ) ); + vkCopyImageToMemory = PFN_vkCopyImageToMemory( vkGetInstanceProcAddr( instance, "vkCopyImageToMemory" ) ); + vkCopyImageToImage = PFN_vkCopyImageToImage( vkGetInstanceProcAddr( instance, "vkCopyImageToImage" ) ); + vkTransitionImageLayout = PFN_vkTransitionImageLayout( vkGetInstanceProcAddr( instance, "vkTransitionImageLayout" ) ); + + //=== VK_KHR_surface === + vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); + vkGetPhysicalDeviceSurfaceSupportKHR = + PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); + vkGetPhysicalDeviceSurfaceFormatsKHR = + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); + vkGetPhysicalDeviceSurfacePresentModesKHR = + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = + PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = + PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkGetPhysicalDevicePresentRectanglesKHR = + PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); + + //=== VK_KHR_display === + vkGetPhysicalDeviceDisplayPropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); + vkGetDisplayPlaneSupportedDisplaysKHR = + PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); + vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); + vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); + vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); + vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); - //=== VK_KHR_maintenance1 === - vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); - if ( !vkTrimCommandPool ) - vkTrimCommandPool = vkTrimCommandPoolKHR; +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); + vkGetPhysicalDeviceXlibPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); + vkGetPhysicalDeviceXcbPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); + vkGetPhysicalDeviceWaylandPresentationSupportKHR = + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_memory_win32 === - vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); - vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); + //=== VK_KHR_win32_surface === + vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); + vkGetPhysicalDeviceWin32PresentationSupportKHR = + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_external_memory_fd === - vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); - vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + //=== VK_EXT_debug_report === + vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); + vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); + vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); + + //=== VK_KHR_video_queue === + vkGetPhysicalDeviceVideoCapabilitiesKHR = + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); + vkGetPhysicalDeviceVideoFormatPropertiesKHR = + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = + PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) ); + + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdDecodeVideoKHR" ) ); + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = + PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewHandle64NVX = PFN_vkGetImageViewHandle64NVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandle64NVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) ); + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); #if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_semaphore_win32 === - vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); - vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_external_semaphore_fd === - vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); - vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); + //=== VK_KHR_get_physical_device_properties2 === + vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); + if ( !vkGetPhysicalDeviceFeatures2 ) + vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; + vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceProperties2 ) + vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; + vkGetPhysicalDeviceFormatProperties2KHR = + PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceFormatProperties2 ) + vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; + vkGetPhysicalDeviceImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceImageFormatProperties2 ) + vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; + vkGetPhysicalDeviceQueueFamilyProperties2KHR = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) + vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; + vkGetPhysicalDeviceMemoryProperties2KHR = + PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceMemoryProperties2 ) + vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) + vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; - //=== VK_KHR_push_descriptor === - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); - vkCmdPushDescriptorSetWithTemplateKHR = - PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); +#endif /*VK_USE_PLATFORM_VI_NN*/ - //=== VK_EXT_conditional_rendering === - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; - //=== VK_KHR_descriptor_update_template === - vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); - if ( !vkCreateDescriptorUpdateTemplate ) - vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; - vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); - if ( !vkDestroyDescriptorUpdateTemplate ) - vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; - vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); - if ( !vkUpdateDescriptorSetWithTemplate ) - vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + //=== VK_KHR_device_group_creation === + vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); + if ( !vkEnumeratePhysicalDeviceGroups ) + vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; - //=== VK_NV_clip_space_w_scaling === - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + //=== VK_KHR_external_memory_capabilities === + vkGetPhysicalDeviceExternalBufferPropertiesKHR = + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalBufferProperties ) + vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; - //=== VK_EXT_display_control === - vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); - vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = + PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_GOOGLE_display_timing === - vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); - vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); - //=== VK_EXT_discard_rectangles === - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + //=== VK_KHR_external_semaphore_capabilities === + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) + vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; - //=== VK_EXT_hdr_metadata === - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_create_renderpass2 === - vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); - if ( !vkCreateRenderPass2 ) - vkCreateRenderPass2 = vkCreateRenderPass2KHR; - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); - if ( !vkCmdBeginRenderPass2 ) - vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; - vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); - if ( !vkCmdNextSubpass2 ) - vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); - if ( !vkCmdEndRenderPass2 ) - vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); + if ( !vkCmdPushDescriptorSet ) + vkCmdPushDescriptorSet = vkCmdPushDescriptorSetKHR; + vkCmdPushDescriptorSetWithTemplateKHR = + PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate ) + vkCmdPushDescriptorSetWithTemplate = vkCmdPushDescriptorSetWithTemplateKHR; + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = + PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = + PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = + PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_EXT_direct_mode_display === + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); - //=== VK_KHR_shared_presentable_image === - vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); + vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + vkGetPhysicalDeviceSurfaceCapabilities2EXT = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEnableEXT" ) ); + vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleModeEXT" ) ); + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); + + //=== VK_KHR_external_fence_capabilities === + vkGetPhysicalDeviceExternalFencePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalFenceProperties ) + vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; #if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_fence_win32 === - vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); - vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_external_fence_fd === - vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); - vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); + + //=== VK_KHR_performance_query === + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_KHR_get_surface_capabilities2 === + vkGetPhysicalDeviceSurfaceCapabilities2KHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); + vkGetPhysicalDeviceSurfaceFormats2KHR = + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + + //=== VK_KHR_get_display_properties2 === + vkGetPhysicalDeviceDisplayProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); + vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); + vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); - //=== VK_KHR_performance_query === - vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); - vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); +#endif /*VK_USE_PLATFORM_IOS_MVK*/ - //=== VK_EXT_debug_utils === - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); - vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); - vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); - vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); + vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); + vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); + vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); #if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_ANDROID_external_memory_android_hardware_buffer === - vkGetAndroidHardwareBufferPropertiesANDROID = - PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); - vkGetMemoryAndroidHardwareBufferANDROID = - PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = + PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = + PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - //=== VK_EXT_sample_locations === - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = + PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetInstanceProcAddr( instance, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetInstanceProcAddr( instance, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = + PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectCountAMDX" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_KHR_get_memory_requirements2 === - vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); - if ( !vkGetImageMemoryRequirements2 ) - vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; - vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); - if ( !vkGetBufferMemoryRequirements2 ) - vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; - vkGetImageSparseMemoryRequirements2KHR = - PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); - if ( !vkGetImageSparseMemoryRequirements2 ) - vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); + vkGetPhysicalDeviceMultisamplePropertiesEXT = + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = + PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = + PFN_vkCmdBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = + PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = + PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = + PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = + PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = + PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = + PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = + PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = + PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) ); + + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = + PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = + PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = + PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; + + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = + PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); + + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = + PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); + + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = + PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = + PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = + PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = + PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); + + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = + PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); + + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); + if ( !vkGetPhysicalDeviceCalibrateableTimeDomainsKHR ) + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); + if ( !vkGetCalibratedTimestampsKHR ) + vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorEnableNV" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); + + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) ); + + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = + PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = + PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = + PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = + PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); + + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); - //=== VK_KHR_acceleration_structure === - vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); - vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); - vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); - vkCmdBuildAccelerationStructuresIndirectKHR = - PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); - vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); - vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); - vkCopyAccelerationStructureToMemoryKHR = - PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); - vkCopyMemoryToAccelerationStructureKHR = - PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); - vkWriteAccelerationStructuresPropertiesKHR = - PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); - vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); - vkCmdCopyAccelerationStructureToMemoryKHR = - PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); - vkCmdCopyMemoryToAccelerationStructureKHR = - PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); - vkGetAccelerationStructureDeviceAddressKHR = - PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); - vkCmdWriteAccelerationStructuresPropertiesKHR = - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); - vkGetDeviceAccelerationStructureCompatibilityKHR = - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); - vkGetAccelerationStructureBuildSizesKHR = - PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_KHR_sampler_ycbcr_conversion === - vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); - if ( !vkCreateSamplerYcbcrConversion ) - vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; - vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); - if ( !vkDestroySamplerYcbcrConversion ) - vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_bind_memory2 === - vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); - if ( !vkBindBufferMemory2 ) - vkBindBufferMemory2 = vkBindBufferMemory2KHR; - vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); - if ( !vkBindImageMemory2 ) - vkBindImageMemory2 = vkBindImageMemory2KHR; + //=== VK_KHR_fragment_shading_rate === + vkGetPhysicalDeviceFragmentShadingRatesKHR = + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); + vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) ); + + //=== VK_KHR_dynamic_rendering_local_read === + vkCmdSetRenderingAttachmentLocationsKHR = + PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); + if ( !vkCmdSetRenderingAttachmentLocations ) + vkCmdSetRenderingAttachmentLocations = vkCmdSetRenderingAttachmentLocationsKHR; + vkCmdSetRenderingInputAttachmentIndicesKHR = + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + if ( !vkCmdSetRenderingInputAttachmentIndices ) + vkCmdSetRenderingInputAttachmentIndices = vkCmdSetRenderingInputAttachmentIndicesKHR; + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_EXT_tooling_info === + vkGetPhysicalDeviceToolPropertiesEXT = + PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); + if ( !vkGetPhysicalDeviceToolProperties ) + vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetInstanceProcAddr( instance, "vkWaitForPresentKHR" ) ); + + //=== VK_NV_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + + //=== VK_NV_coverage_reduction_mode === + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); - //=== VK_EXT_image_drm_format_modifier === - vkGetImageDrmFormatModifierPropertiesEXT = - PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkGetPhysicalDeviceSurfacePresentModes2EXT = + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); + vkAcquireFullScreenExclusiveModeEXT = + PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = + PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = + PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_EXT_validation_cache === - vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); - vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); - vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); - vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + //=== VK_EXT_headless_surface === + vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); + + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleEXT; + + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = + PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); + + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = + PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = + PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = + PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToImageEXT" ) ); + if ( !vkCopyMemoryToImage ) + vkCopyMemoryToImage = vkCopyMemoryToImageEXT; + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToMemoryEXT" ) ); + if ( !vkCopyImageToMemory ) + vkCopyImageToMemory = vkCopyImageToMemoryEXT; + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToImageEXT" ) ); + if ( !vkCopyImageToImage ) + vkCopyImageToImage = vkCopyImageToImageEXT; + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetInstanceProcAddr( instance, "vkTransitionImageLayoutEXT" ) ); + if ( !vkTransitionImageLayout ) + vkTransitionImageLayout = vkTransitionImageLayoutEXT; + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2EXT; + + //=== VK_KHR_map_memory2 === + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetInstanceProcAddr( instance, "vkMapMemory2KHR" ) ); + if ( !vkMapMemory2 ) + vkMapMemory2 = vkMapMemory2KHR; + vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetInstanceProcAddr( instance, "vkUnmapMemory2KHR" ) ); + if ( !vkUnmapMemory2 ) + vkUnmapMemory2 = vkUnmapMemory2KHR; + + //=== VK_EXT_swapchain_maintenance1 === + vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetInstanceProcAddr( instance, "vkReleaseSwapchainImagesEXT" ) ); + + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = + PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); + + //=== VK_EXT_depth_bias_control === + vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias2EXT" ) ); + + //=== VK_EXT_acquire_drm_display === + vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); + vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); + + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; + + //=== VK_KHR_video_encode_queue === + vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR" ) ); + vkGetEncodedVideoSessionParametersKHR = + PFN_vkGetEncodedVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkGetEncodedVideoSessionParametersKHR" ) ); + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) ); - //=== VK_NV_shading_rate_image === - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); - vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetInstanceProcAddr( instance, "vkCreateCudaModuleNV" ) ); + vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetInstanceProcAddr( instance, "vkGetCudaModuleCacheNV" ) ); + vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkCreateCudaFunctionNV" ) ); + vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaModuleNV" ) ); + vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaFunctionNV" ) ); + vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetInstanceProcAddr( instance, "vkCmdCudaLaunchKernelNV" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_NV_ray_tracing === - vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); - vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); - vkGetAccelerationStructureMemoryRequirementsNV = - PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); - vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); - vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); - vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); - if ( !vkGetRayTracingShaderGroupHandlesKHR ) - vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; - vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); - vkCmdWriteAccelerationStructuresPropertiesNV = - PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetInstanceProcAddr( instance, "vkExportMetalObjectsEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_maintenance3 === - vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); - if ( !vkGetDescriptorSetLayoutSupport ) - vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; + + //=== VK_EXT_descriptor_buffer === + vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSizeEXT" ) ); + vkGetDescriptorSetLayoutBindingOffsetEXT = + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); + vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorEXT" ) ); + vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBuffersEXT" ) ); + vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplersEXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); + vkGetBufferOpaqueCaptureDescriptorDataEXT = + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageViewOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); + vkGetSamplerOpaqueCaptureDescriptorDataEXT = + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); + vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); + + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) ); + + //=== VK_EXT_mesh_shader === + vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksEXT" ) ); + vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectEXT" ) ); + vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; + + //=== VK_EXT_device_fault === + vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceFaultInfoEXT" ) ); - //=== VK_KHR_draw_indirect_count === - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); + vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_EXT_external_memory_host === - vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); + vkGetPhysicalDeviceDirectFBPresentationSupportEXT = + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - //=== VK_AMD_buffer_marker === - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) ); - //=== VK_EXT_calibrated_timestamps === - vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_NV_mesh_shader === - vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = + PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_NV_scissor_exclusive === - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = + PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_NV_device_diagnostic_checkpoints === - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetInstanceProcAddr( instance, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdSubpassShadingHUAWEI" ) ); + + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdBindInvocationMaskHUAWEI" ) ); + + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetInstanceProcAddr( instance, "vkGetMemoryRemoteAddressNV" ) ); + + //=== VK_EXT_pipeline_properties === + vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPipelinePropertiesEXT" ) ); + + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; - //=== VK_KHR_timeline_semaphore === - vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); - if ( !vkGetSemaphoreCounterValue ) - vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; - vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); - if ( !vkWaitSemaphores ) - vkWaitSemaphores = vkWaitSemaphoresKHR; - vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); - if ( !vkSignalSemaphore ) - vkSignalSemaphore = vkSignalSemaphoreKHR; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); + vkGetPhysicalDeviceScreenPresentationSupportQNX = + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - //=== VK_INTEL_performance_query === - vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkAcquirePerformanceConfigurationINTEL = - PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); - vkReleasePerformanceConfigurationINTEL = - PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); - vkQueueSetPerformanceConfigurationINTEL = - PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); - vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) ); + + //=== VK_KHR_ray_tracing_maintenance1 === + vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirect2KHR" ) ); + + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiIndexedEXT" ) ); + + //=== VK_EXT_opacity_micromap === + vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetInstanceProcAddr( instance, "vkCreateMicromapEXT" ) ); + vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetInstanceProcAddr( instance, "vkDestroyMicromapEXT" ) ); + vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkCmdBuildMicromapsEXT" ) ); + vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkBuildMicromapsEXT" ) ); + vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapEXT" ) ); + vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapToMemoryEXT" ) ); + vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToMicromapEXT" ) ); + vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkWriteMicromapsPropertiesEXT" ) ); + vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapEXT" ) ); + vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapToMemoryEXT" ) ); + vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToMicromapEXT" ) ); + vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkCmdWriteMicromapsPropertiesEXT" ) ); + vkGetDeviceMicromapCompatibilityEXT = + PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceMicromapCompatibilityEXT" ) ); + vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetInstanceProcAddr( instance, "vkGetMicromapBuildSizesEXT" ) ); + + //=== VK_HUAWEI_cluster_culling_shader === + vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterHUAWEI" ) ); + vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterIndirectHUAWEI" ) ); + + //=== VK_EXT_pageable_device_local_memory === + vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetInstanceProcAddr( instance, "vkSetDeviceMemoryPriorityEXT" ) ); + + //=== VK_KHR_maintenance4 === + vkGetDeviceBufferMemoryRequirementsKHR = + PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceBufferMemoryRequirements ) + vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; + vkGetDeviceImageMemoryRequirementsKHR = + PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageMemoryRequirements ) + vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; + vkGetDeviceImageSparseMemoryRequirementsKHR = + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageSparseMemoryRequirements ) + vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + + //=== VK_VALVE_descriptor_set_host_mapping === + vkGetDescriptorSetLayoutHostMappingInfoVALVE = + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); + vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetHostMappingVALVE" ) ); + + //=== VK_NV_copy_memory_indirect === + vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryIndirectNV" ) ); + vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToImageIndirectNV" ) ); + + //=== VK_NV_memory_decompression === + vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryNV" ) ); + vkCmdDecompressMemoryIndirectCountNV = + PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryIndirectCountNV" ) ); + + //=== VK_NV_device_generated_commands_compute === + vkGetPipelineIndirectMemoryRequirementsNV = + PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); + vkCmdUpdatePipelineIndirectBufferNV = + PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetInstanceProcAddr( instance, "vkCmdUpdatePipelineIndirectBufferNV" ) ); + vkGetPipelineIndirectDeviceAddressNV = + PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectDeviceAddressNV" ) ); + + //=== VK_EXT_extended_dynamic_state3 === + vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampEnableEXT" ) ); + vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPolygonModeEXT" ) ); + vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationSamplesEXT" ) ); + vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleMaskEXT" ) ); + vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToCoverageEnableEXT" ) ); + vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToOneEnableEXT" ) ); + vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEnableEXT" ) ); + vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEnableEXT" ) ); + vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEquationEXT" ) ); + vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteMaskEXT" ) ); + vkCmdSetTessellationDomainOriginEXT = + PFN_vkCmdSetTessellationDomainOriginEXT( vkGetInstanceProcAddr( instance, "vkCmdSetTessellationDomainOriginEXT" ) ); + vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationStreamEXT" ) ); + vkCmdSetConservativeRasterizationModeEXT = + PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetConservativeRasterizationModeEXT" ) ); + vkCmdSetExtraPrimitiveOverestimationSizeEXT = + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); + vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipEnableEXT" ) ); + vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEnableEXT" ) ); + vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendAdvancedEXT" ) ); + vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetProvokingVertexModeEXT" ) ); + vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineRasterizationModeEXT" ) ); + vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEnableEXT" ) ); + vkCmdSetDepthClipNegativeOneToOneEXT = + PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); + vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingEnableNV" ) ); + vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportSwizzleNV" ) ); + vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorEnableNV" ) ); + vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorLocationNV" ) ); + vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationModeNV" ) ); + vkCmdSetCoverageModulationTableEnableNV = + PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableEnableNV" ) ); + vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableNV" ) ); + vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetShadingRateImageEnableNV" ) ); + vkCmdSetRepresentativeFragmentTestEnableNV = + PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); + vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageReductionModeNV" ) ); + + //=== VK_EXT_shader_module_identifier === + vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleIdentifierEXT" ) ); + vkGetShaderModuleCreateInfoIdentifierEXT = + PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + + //=== VK_NV_optical_flow === + vkGetPhysicalDeviceOpticalFlowImageFormatsNV = + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) ); + vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkCreateOpticalFlowSessionNV" ) ); + vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkDestroyOpticalFlowSessionNV" ) ); + vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetInstanceProcAddr( instance, "vkBindOpticalFlowSessionImageNV" ) ); + vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetInstanceProcAddr( instance, "vkCmdOpticalFlowExecuteNV" ) ); + + //=== VK_KHR_maintenance5 === + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer2KHR" ) ); + if ( !vkCmdBindIndexBuffer2 ) + vkCmdBindIndexBuffer2 = vkCmdBindIndexBuffer2KHR; + vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetInstanceProcAddr( instance, "vkGetRenderingAreaGranularityKHR" ) ); + if ( !vkGetRenderingAreaGranularity ) + vkGetRenderingAreaGranularity = vkGetRenderingAreaGranularityKHR; + vkGetDeviceImageSubresourceLayoutKHR = + PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + if ( !vkGetDeviceImageSubresourceLayout ) + vkGetDeviceImageSubresourceLayout = vkGetDeviceImageSubresourceLayoutKHR; + vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2KHR" ) ); + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2KHR; + + //=== VK_AMD_anti_lag === + vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetInstanceProcAddr( instance, "vkAntiLagUpdateAMD" ) ); + + //=== VK_EXT_shader_object === + vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetInstanceProcAddr( instance, "vkCreateShadersEXT" ) ); + vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetInstanceProcAddr( instance, "vkDestroyShaderEXT" ) ); + vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetInstanceProcAddr( instance, "vkGetShaderBinaryDataEXT" ) ); + vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindShadersEXT" ) ); + vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampRangeEXT" ) ); + + //=== VK_KHR_pipeline_binary === + vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetInstanceProcAddr( instance, "vkCreatePipelineBinariesKHR" ) ); + vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetInstanceProcAddr( instance, "vkDestroyPipelineBinaryKHR" ) ); + vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineKeyKHR" ) ); + vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineBinaryDataKHR" ) ); + vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetInstanceProcAddr( instance, "vkReleaseCapturedPipelineDataKHR" ) ); + + //=== VK_QCOM_tile_properties === + vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetFramebufferTilePropertiesQCOM" ) ); + vkGetDynamicRenderingTilePropertiesQCOM = + PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); + + //=== VK_NV_cooperative_vector === + vkGetPhysicalDeviceCooperativeVectorPropertiesNV = + PFN_vkGetPhysicalDeviceCooperativeVectorPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeVectorPropertiesNV" ) ); + vkConvertCooperativeVectorMatrixNV = PFN_vkConvertCooperativeVectorMatrixNV( vkGetInstanceProcAddr( instance, "vkConvertCooperativeVectorMatrixNV" ) ); + vkCmdConvertCooperativeVectorMatrixNV = + PFN_vkCmdConvertCooperativeVectorMatrixNV( vkGetInstanceProcAddr( instance, "vkCmdConvertCooperativeVectorMatrixNV" ) ); + + //=== VK_NV_low_latency2 === + vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetInstanceProcAddr( instance, "vkSetLatencySleepModeNV" ) ); + vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetInstanceProcAddr( instance, "vkLatencySleepNV" ) ); + vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetInstanceProcAddr( instance, "vkSetLatencyMarkerNV" ) ); + vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetInstanceProcAddr( instance, "vkGetLatencyTimingsNV" ) ); + vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetInstanceProcAddr( instance, "vkQueueNotifyOutOfBandNV" ) ); + + //=== VK_KHR_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR" ) ); + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + vkCmdSetAttachmentFeedbackLoopEnableEXT = + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); - //=== VK_AMD_display_native_hdr === - vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetInstanceProcAddr( instance, "vkGetScreenBufferPropertiesQNX" ) ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - //=== VK_KHR_fragment_shading_rate === - vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + //=== VK_KHR_line_rasterization === + vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleKHR" ) ); + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleKHR; + + //=== VK_KHR_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR" ) ); + vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsKHR" ) ); + + //=== VK_KHR_maintenance6 === + vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets2KHR" ) ); + if ( !vkCmdBindDescriptorSets2 ) + vkCmdBindDescriptorSets2 = vkCmdBindDescriptorSets2KHR; + vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushConstants2KHR" ) ); + if ( !vkCmdPushConstants2 ) + vkCmdPushConstants2 = vkCmdPushConstants2KHR; + vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSet2KHR" ) ); + if ( !vkCmdPushDescriptorSet2 ) + vkCmdPushDescriptorSet2 = vkCmdPushDescriptorSet2KHR; + vkCmdPushDescriptorSetWithTemplate2KHR = + PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate2 ) + vkCmdPushDescriptorSetWithTemplate2 = vkCmdPushDescriptorSetWithTemplate2KHR; + vkCmdSetDescriptorBufferOffsets2EXT = + PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); + + //=== VK_NV_cluster_acceleration_structure === + vkGetClusterAccelerationStructureBuildSizesNV = + PFN_vkGetClusterAccelerationStructureBuildSizesNV( vkGetInstanceProcAddr( instance, "vkGetClusterAccelerationStructureBuildSizesNV" ) ); + vkCmdBuildClusterAccelerationStructureIndirectNV = + PFN_vkCmdBuildClusterAccelerationStructureIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdBuildClusterAccelerationStructureIndirectNV" ) ); + + //=== VK_NV_partitioned_acceleration_structure === + vkGetPartitionedAccelerationStructuresBuildSizesNV = + PFN_vkGetPartitionedAccelerationStructuresBuildSizesNV( vkGetInstanceProcAddr( instance, "vkGetPartitionedAccelerationStructuresBuildSizesNV" ) ); + vkCmdBuildPartitionedAccelerationStructuresNV = + PFN_vkCmdBuildPartitionedAccelerationStructuresNV( vkGetInstanceProcAddr( instance, "vkCmdBuildPartitionedAccelerationStructuresNV" ) ); + + //=== VK_EXT_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsEXT = + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsEXT" ) ); + vkCmdPreprocessGeneratedCommandsEXT = + PFN_vkCmdPreprocessGeneratedCommandsEXT( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsEXT" ) ); + vkCmdExecuteGeneratedCommandsEXT = PFN_vkCmdExecuteGeneratedCommandsEXT( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsEXT" ) ); + vkCreateIndirectCommandsLayoutEXT = PFN_vkCreateIndirectCommandsLayoutEXT( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutEXT" ) ); + vkDestroyIndirectCommandsLayoutEXT = PFN_vkDestroyIndirectCommandsLayoutEXT( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutEXT" ) ); + vkCreateIndirectExecutionSetEXT = PFN_vkCreateIndirectExecutionSetEXT( vkGetInstanceProcAddr( instance, "vkCreateIndirectExecutionSetEXT" ) ); + vkDestroyIndirectExecutionSetEXT = PFN_vkDestroyIndirectExecutionSetEXT( vkGetInstanceProcAddr( instance, "vkDestroyIndirectExecutionSetEXT" ) ); + vkUpdateIndirectExecutionSetPipelineEXT = + PFN_vkUpdateIndirectExecutionSetPipelineEXT( vkGetInstanceProcAddr( instance, "vkUpdateIndirectExecutionSetPipelineEXT" ) ); + vkUpdateIndirectExecutionSetShaderEXT = + PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetInstanceProcAddr( instance, "vkUpdateIndirectExecutionSetShaderEXT" ) ); + + //=== VK_NV_cooperative_matrix2 === + vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV" ) ); - //=== VK_EXT_buffer_device_address === - vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + vkGetMemoryMetalHandleEXT = PFN_vkGetMemoryMetalHandleEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryMetalHandleEXT" ) ); + vkGetMemoryMetalHandlePropertiesEXT = + PFN_vkGetMemoryMetalHandlePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryMetalHandlePropertiesEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + } - //=== VK_KHR_present_wait === - vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); + void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT + { + VkDevice device = static_cast( deviceCpp ); + + //=== VK_VERSION_1_0 === + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = + PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + + //=== VK_VERSION_1_3 === + vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = + PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); + + //=== VK_VERSION_1_4 === + vkCmdSetLineStipple = PFN_vkCmdSetLineStipple( vkGetDeviceProcAddr( device, "vkCmdSetLineStipple" ) ); + vkMapMemory2 = PFN_vkMapMemory2( vkGetDeviceProcAddr( device, "vkMapMemory2" ) ); + vkUnmapMemory2 = PFN_vkUnmapMemory2( vkGetDeviceProcAddr( device, "vkUnmapMemory2" ) ); + vkCmdBindIndexBuffer2 = PFN_vkCmdBindIndexBuffer2( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2" ) ); + vkGetRenderingAreaGranularity = PFN_vkGetRenderingAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularity" ) ); + vkGetDeviceImageSubresourceLayout = PFN_vkGetDeviceImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayout" ) ); + vkGetImageSubresourceLayout2 = PFN_vkGetImageSubresourceLayout2( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2" ) ); + vkCmdPushDescriptorSet = PFN_vkCmdPushDescriptorSet( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet" ) ); + vkCmdPushDescriptorSetWithTemplate = PFN_vkCmdPushDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate" ) ); + vkCmdSetRenderingAttachmentLocations = + PFN_vkCmdSetRenderingAttachmentLocations( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocations" ) ); + vkCmdSetRenderingInputAttachmentIndices = + PFN_vkCmdSetRenderingInputAttachmentIndices( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndices" ) ); + vkCmdBindDescriptorSets2 = PFN_vkCmdBindDescriptorSets2( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2" ) ); + vkCmdPushConstants2 = PFN_vkCmdPushConstants2( vkGetDeviceProcAddr( device, "vkCmdPushConstants2" ) ); + vkCmdPushDescriptorSet2 = PFN_vkCmdPushDescriptorSet2( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2" ) ); + vkCmdPushDescriptorSetWithTemplate2 = PFN_vkCmdPushDescriptorSetWithTemplate2( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2" ) ); + vkCopyMemoryToImage = PFN_vkCopyMemoryToImage( vkGetDeviceProcAddr( device, "vkCopyMemoryToImage" ) ); + vkCopyImageToMemory = PFN_vkCopyImageToMemory( vkGetDeviceProcAddr( device, "vkCopyImageToMemory" ) ); + vkCopyImageToImage = PFN_vkCopyImageToImage( vkGetDeviceProcAddr( device, "vkCopyImageToImage" ) ); + vkTransitionImageLayout = PFN_vkTransitionImageLayout( vkGetDeviceProcAddr( device, "vkTransitionImageLayout" ) ); + + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = + PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = + PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + + //=== VK_KHR_video_queue === + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = + PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); + + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = + PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewHandle64NVX = PFN_vkGetImageViewHandle64NVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandle64NVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; #if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); - vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); - vkGetDeviceGroupSurfacePresentModes2EXT = - PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_buffer_device_address === - vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; - vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); - if ( !vkGetBufferOpaqueCaptureAddress ) - vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; - vkGetDeviceMemoryOpaqueCaptureAddressKHR = - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); - if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) - vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; - //=== VK_EXT_line_rasterization === - vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_EXT_host_query_reset === - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); - if ( !vkResetQueryPool ) - vkResetQueryPool = vkResetQueryPoolEXT; + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); - //=== VK_EXT_extended_dynamic_state === - vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); - if ( !vkCmdSetCullMode ) - vkCmdSetCullMode = vkCmdSetCullModeEXT; - vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); - if ( !vkCmdSetFrontFace ) - vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; - vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); - if ( !vkCmdSetPrimitiveTopology ) - vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; - vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); - if ( !vkCmdSetViewportWithCount ) - vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; - vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); - if ( !vkCmdSetScissorWithCount ) - vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; - vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); - if ( !vkCmdBindVertexBuffers2 ) - vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; - vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); - if ( !vkCmdSetDepthTestEnable ) - vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; - vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); - if ( !vkCmdSetDepthWriteEnable ) - vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; - vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); - if ( !vkCmdSetDepthCompareOp ) - vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; - vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); - if ( !vkCmdSetDepthBoundsTestEnable ) - vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; - vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); - if ( !vkCmdSetStencilTestEnable ) - vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; - vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); - if ( !vkCmdSetStencilOp ) - vkCmdSetStencilOp = vkCmdSetStencilOpEXT; +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_deferred_host_operations === - vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); - vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); - vkGetDeferredOperationMaxConcurrencyKHR = - PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); - vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); - vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + if ( !vkCmdPushDescriptorSet ) + vkCmdPushDescriptorSet = vkCmdPushDescriptorSetKHR; + vkCmdPushDescriptorSetWithTemplateKHR = + PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate ) + vkCmdPushDescriptorSetWithTemplate = vkCmdPushDescriptorSetWithTemplateKHR; + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = + PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = + PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) ); + vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) ); + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); - //=== VK_KHR_pipeline_executable_properties === - vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); - vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); - vkGetPipelineExecutableInternalRepresentationsKHR = - PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_NV_device_generated_commands === - vkGetGeneratedCommandsMemoryRequirementsNV = - PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); - vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); - vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); - vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); - vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); - vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); - //=== VK_EXT_private_data === - vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); - if ( !vkCreatePrivateDataSlot ) - vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; - vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); - if ( !vkDestroyPrivateDataSlot ) - vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; - vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); - if ( !vkSetPrivateData ) - vkSetPrivateData = vkSetPrivateDataEXT; - vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); - if ( !vkGetPrivateData ) - vkGetPrivateData = vkGetPrivateDataEXT; + //=== VK_KHR_performance_query === + vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = + PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = + PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_encode_queue === - vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === - vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_KHR_synchronization2 === - vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); - if ( !vkCmdSetEvent2 ) - vkCmdSetEvent2 = vkCmdSetEvent2KHR; - vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); - if ( !vkCmdResetEvent2 ) - vkCmdResetEvent2 = vkCmdResetEvent2KHR; - vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); - if ( !vkCmdWaitEvents2 ) - vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; - vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); - if ( !vkCmdPipelineBarrier2 ) - vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; - vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); - if ( !vkCmdWriteTimestamp2 ) - vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; - vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); - if ( !vkQueueSubmit2 ) - vkQueueSubmit2 = vkQueueSubmit2KHR; - vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); - vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = + PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = + PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = + PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = + PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = + PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = + PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = + PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = + PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = + PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = + PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = + PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = + PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; + + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = + PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = + PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = + PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = + PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); + if ( !vkGetCalibratedTimestampsKHR ) + vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = + PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = + PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = + PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = + PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + + //=== VK_KHR_fragment_shading_rate === + vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + + //=== VK_KHR_dynamic_rendering_local_read === + vkCmdSetRenderingAttachmentLocationsKHR = + PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); + if ( !vkCmdSetRenderingAttachmentLocations ) + vkCmdSetRenderingAttachmentLocations = vkCmdSetRenderingAttachmentLocationsKHR; + vkCmdSetRenderingInputAttachmentIndicesKHR = + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + if ( !vkCmdSetRenderingInputAttachmentIndices ) + vkCmdSetRenderingInputAttachmentIndices = vkCmdSetRenderingInputAttachmentIndicesKHR; + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); - //=== VK_NV_fragment_shading_rate_enums === - vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = + PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_EXT_mesh_shader === - vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); - vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); - vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleEXT; + + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = + PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = + PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = + PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = + PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); + if ( !vkCopyMemoryToImage ) + vkCopyMemoryToImage = vkCopyMemoryToImageEXT; + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); + if ( !vkCopyImageToMemory ) + vkCopyImageToMemory = vkCopyImageToMemoryEXT; + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); + if ( !vkCopyImageToImage ) + vkCopyImageToImage = vkCopyImageToImageEXT; + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); + if ( !vkTransitionImageLayout ) + vkTransitionImageLayout = vkTransitionImageLayoutEXT; + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2EXT; + + //=== VK_KHR_map_memory2 === + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); + if ( !vkMapMemory2 ) + vkMapMemory2 = vkMapMemory2KHR; + vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); + if ( !vkUnmapMemory2 ) + vkUnmapMemory2 = vkUnmapMemory2KHR; + + //=== VK_EXT_swapchain_maintenance1 === + vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) ); + + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = + PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + + //=== VK_EXT_depth_bias_control === + vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) ); + + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; + + //=== VK_KHR_video_encode_queue === + vkGetEncodedVideoSessionParametersKHR = + PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) ); + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); - //=== VK_KHR_copy_commands2 === - vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); - if ( !vkCmdCopyBuffer2 ) - vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; - vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); - if ( !vkCmdCopyImage2 ) - vkCmdCopyImage2 = vkCmdCopyImage2KHR; - vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); - if ( !vkCmdCopyBufferToImage2 ) - vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; - vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); - if ( !vkCmdCopyImageToBuffer2 ) - vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; - vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); - if ( !vkCmdBlitImage2 ) - vkCmdBlitImage2 = vkCmdBlitImage2KHR; - vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); - if ( !vkCmdResolveImage2 ) - vkCmdResolveImage2 = vkCmdResolveImage2KHR; - - //=== VK_EXT_image_compression_control === - vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) ); + vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) ); + vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) ); + vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) ); + vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) ); + vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_KHR_ray_tracing_pipeline === - vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); - vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); - vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); - vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); - vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); - vkGetRayTracingShaderGroupStackSizeKHR = - PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); - vkCmdSetRayTracingPipelineStackSizeKHR = - PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_EXT_vertex_input_dynamic_state === - vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; + + //=== VK_EXT_descriptor_buffer === + vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) ); + vkGetDescriptorSetLayoutBindingOffsetEXT = + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); + vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) ); + vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) ); + vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplersEXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); + vkGetBufferOpaqueCaptureDescriptorDataEXT = + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageViewOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); + vkGetSamplerOpaqueCaptureDescriptorDataEXT = + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); + vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); + + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); + + //=== VK_EXT_mesh_shader === + vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); + vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); + vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; + + //=== VK_EXT_device_fault === + vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); + + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); #if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_memory === - vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); - vkGetMemoryZirconHandlePropertiesFUCHSIA = - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_semaphore === - vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); - vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = + PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === - vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); - vkSetBufferCollectionImageConstraintsFUCHSIA = - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); - vkSetBufferCollectionBufferConstraintsFUCHSIA = - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); - vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); - vkGetBufferCollectionPropertiesFUCHSIA = - PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = + PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_HUAWEI_subpass_shading === - vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); - vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); - - //=== VK_HUAWEI_invocation_mask === - vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); - - //=== VK_NV_external_memory_rdma === - vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); - - //=== VK_EXT_pipeline_properties === - vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); - - //=== VK_EXT_extended_dynamic_state2 === - vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); - vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); - if ( !vkCmdSetRasterizerDiscardEnable ) - vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; - vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); - if ( !vkCmdSetDepthBiasEnable ) - vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; - vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); - vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); - if ( !vkCmdSetPrimitiveRestartEnable ) - vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; - - //=== VK_EXT_color_write_enable === - vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); - - //=== VK_KHR_ray_tracing_maintenance1 === - vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); - - //=== VK_EXT_multi_draw === - vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); - vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); + + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); + + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); + + //=== VK_EXT_pipeline_properties === + vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); + + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); + + //=== VK_KHR_ray_tracing_maintenance1 === + vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); + + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); + + //=== VK_EXT_opacity_micromap === + vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) ); + vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) ); + vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) ); + vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) ); + vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) ); + vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) ); + vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) ); + vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) ); + vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) ); + vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) ); + vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) ); + vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) ); + vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) ); + vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) ); + + //=== VK_HUAWEI_cluster_culling_shader === + vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) ); + vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) ); + + //=== VK_EXT_pageable_device_local_memory === + vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); + + //=== VK_KHR_maintenance4 === + vkGetDeviceBufferMemoryRequirementsKHR = + PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceBufferMemoryRequirements ) + vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; + vkGetDeviceImageMemoryRequirementsKHR = + PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageMemoryRequirements ) + vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; + vkGetDeviceImageSparseMemoryRequirementsKHR = + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageSparseMemoryRequirements ) + vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + + //=== VK_VALVE_descriptor_set_host_mapping === + vkGetDescriptorSetLayoutHostMappingInfoVALVE = + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); + vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); + + //=== VK_NV_copy_memory_indirect === + vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); + vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); + + //=== VK_NV_memory_decompression === + vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); + vkCmdDecompressMemoryIndirectCountNV = + PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); + + //=== VK_NV_device_generated_commands_compute === + vkGetPipelineIndirectMemoryRequirementsNV = + PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); + vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); + vkGetPipelineIndirectDeviceAddressNV = + PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); + + //=== VK_EXT_extended_dynamic_state3 === + vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); + vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) ); + vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) ); + vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) ); + vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) ); + vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) ); + vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) ); + vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) ); + vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) ); + vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) ); + vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); + vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) ); + vkCmdSetConservativeRasterizationModeEXT = + PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) ); + vkCmdSetExtraPrimitiveOverestimationSizeEXT = + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); + vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) ); + vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) ); + vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) ); + vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) ); + vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) ); + vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) ); + vkCmdSetDepthClipNegativeOneToOneEXT = + PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); + vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) ); + vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) ); + vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) ); + vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) ); + vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) ); + vkCmdSetCoverageModulationTableEnableNV = + PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) ); + vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) ); + vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) ); + vkCmdSetRepresentativeFragmentTestEnableNV = + PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); + vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) ); + + //=== VK_EXT_shader_module_identifier === + vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); + vkGetShaderModuleCreateInfoIdentifierEXT = + PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + + //=== VK_NV_optical_flow === + vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) ); + vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) ); + vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); + vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); + + //=== VK_KHR_maintenance5 === + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); + if ( !vkCmdBindIndexBuffer2 ) + vkCmdBindIndexBuffer2 = vkCmdBindIndexBuffer2KHR; + vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); + if ( !vkGetRenderingAreaGranularity ) + vkGetRenderingAreaGranularity = vkGetRenderingAreaGranularityKHR; + vkGetDeviceImageSubresourceLayoutKHR = + PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + if ( !vkGetDeviceImageSubresourceLayout ) + vkGetDeviceImageSubresourceLayout = vkGetDeviceImageSubresourceLayoutKHR; + vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2KHR; + + //=== VK_AMD_anti_lag === + vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) ); + + //=== VK_EXT_shader_object === + vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); + vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); + vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); + vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); + vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampRangeEXT" ) ); + + //=== VK_KHR_pipeline_binary === + vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetDeviceProcAddr( device, "vkCreatePipelineBinariesKHR" ) ); + vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetDeviceProcAddr( device, "vkDestroyPipelineBinaryKHR" ) ); + vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetDeviceProcAddr( device, "vkGetPipelineKeyKHR" ) ); + vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetDeviceProcAddr( device, "vkGetPipelineBinaryDataKHR" ) ); + vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetDeviceProcAddr( device, "vkReleaseCapturedPipelineDataKHR" ) ); + + //=== VK_QCOM_tile_properties === + vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); + vkGetDynamicRenderingTilePropertiesQCOM = + PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); + + //=== VK_NV_cooperative_vector === + vkConvertCooperativeVectorMatrixNV = PFN_vkConvertCooperativeVectorMatrixNV( vkGetDeviceProcAddr( device, "vkConvertCooperativeVectorMatrixNV" ) ); + vkCmdConvertCooperativeVectorMatrixNV = + PFN_vkCmdConvertCooperativeVectorMatrixNV( vkGetDeviceProcAddr( device, "vkCmdConvertCooperativeVectorMatrixNV" ) ); + + //=== VK_NV_low_latency2 === + vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetDeviceProcAddr( device, "vkSetLatencySleepModeNV" ) ); + vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetDeviceProcAddr( device, "vkLatencySleepNV" ) ); + vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) ); + vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) ); + vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) ); + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + vkCmdSetAttachmentFeedbackLoopEnableEXT = + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); - //=== VK_EXT_pageable_device_local_memory === - vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - //=== VK_KHR_maintenance4 === - vkGetDeviceBufferMemoryRequirementsKHR = - PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceBufferMemoryRequirements ) - vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; - vkGetDeviceImageMemoryRequirementsKHR = - PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageMemoryRequirements ) - vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; - vkGetDeviceImageSparseMemoryRequirementsKHR = - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageSparseMemoryRequirements ) - vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + //=== VK_KHR_line_rasterization === + vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleKHR" ) ); + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleKHR; + + //=== VK_KHR_calibrated_timestamps === + vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) ); + + //=== VK_KHR_maintenance6 === + vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) ); + if ( !vkCmdBindDescriptorSets2 ) + vkCmdBindDescriptorSets2 = vkCmdBindDescriptorSets2KHR; + vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); + if ( !vkCmdPushConstants2 ) + vkCmdPushConstants2 = vkCmdPushConstants2KHR; + vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); + if ( !vkCmdPushDescriptorSet2 ) + vkCmdPushDescriptorSet2 = vkCmdPushDescriptorSet2KHR; + vkCmdPushDescriptorSetWithTemplate2KHR = + PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate2 ) + vkCmdPushDescriptorSetWithTemplate2 = vkCmdPushDescriptorSetWithTemplate2KHR; + vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); + + //=== VK_NV_cluster_acceleration_structure === + vkGetClusterAccelerationStructureBuildSizesNV = + PFN_vkGetClusterAccelerationStructureBuildSizesNV( vkGetDeviceProcAddr( device, "vkGetClusterAccelerationStructureBuildSizesNV" ) ); + vkCmdBuildClusterAccelerationStructureIndirectNV = + PFN_vkCmdBuildClusterAccelerationStructureIndirectNV( vkGetDeviceProcAddr( device, "vkCmdBuildClusterAccelerationStructureIndirectNV" ) ); + + //=== VK_NV_partitioned_acceleration_structure === + vkGetPartitionedAccelerationStructuresBuildSizesNV = + PFN_vkGetPartitionedAccelerationStructuresBuildSizesNV( vkGetDeviceProcAddr( device, "vkGetPartitionedAccelerationStructuresBuildSizesNV" ) ); + vkCmdBuildPartitionedAccelerationStructuresNV = + PFN_vkCmdBuildPartitionedAccelerationStructuresNV( vkGetDeviceProcAddr( device, "vkCmdBuildPartitionedAccelerationStructuresNV" ) ); + + //=== VK_EXT_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsEXT = + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsEXT" ) ); + vkCmdPreprocessGeneratedCommandsEXT = PFN_vkCmdPreprocessGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsEXT" ) ); + vkCmdExecuteGeneratedCommandsEXT = PFN_vkCmdExecuteGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsEXT" ) ); + vkCreateIndirectCommandsLayoutEXT = PFN_vkCreateIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutEXT" ) ); + vkDestroyIndirectCommandsLayoutEXT = PFN_vkDestroyIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutEXT" ) ); + vkCreateIndirectExecutionSetEXT = PFN_vkCreateIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectExecutionSetEXT" ) ); + vkDestroyIndirectExecutionSetEXT = PFN_vkDestroyIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectExecutionSetEXT" ) ); + vkUpdateIndirectExecutionSetPipelineEXT = + PFN_vkUpdateIndirectExecutionSetPipelineEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetPipelineEXT" ) ); + vkUpdateIndirectExecutionSetShaderEXT = + PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetShaderEXT" ) ); - //=== VK_VALVE_descriptor_set_host_mapping === - vkGetDescriptorSetLayoutHostMappingInfoVALVE = - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); - vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + vkGetMemoryMetalHandleEXT = PFN_vkGetMemoryMetalHandleEXT( vkGetDeviceProcAddr( device, "vkGetMemoryMetalHandleEXT" ) ); + vkGetMemoryMetalHandlePropertiesEXT = PFN_vkGetMemoryMetalHandlePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryMetalHandlePropertiesEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + } - //=== VK_EXT_shader_module_identifier === - vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); - vkGetShaderModuleCreateInfoIdentifierEXT = - PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + template + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device, DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT + { + PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress( "vkGetInstanceProcAddr" ); + PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress( "vkGetDeviceProcAddr" ); + init( static_cast( instance ), getInstanceProcAddr, static_cast( device ), device ? getDeviceProcAddr : nullptr ); + } - //=== VK_QCOM_tile_properties === - vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); - vkGetDynamicRenderingTilePropertiesQCOM = - PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); - } - }; + template + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device ) VULKAN_HPP_NOEXCEPT + { + static DynamicLoader dl; + init( instance, device, dl ); + } + }; + } // namespace detail } // namespace VULKAN_HPP_NAMESPACE #endif diff --git a/src/libraries/vulkanheaders/vulkan_android.h b/src/libraries/vulkanheaders/vulkan_android.h index 11f539796..a2ba3add9 100644 --- a/src/libraries/vulkanheaders/vulkan_android.h +++ b/src/libraries/vulkanheaders/vulkan_android.h @@ -2,7 +2,7 @@ #define VULKAN_ANDROID_H_ 1 /* -** Copyright 2015-2022 The Khronos Group Inc. +** Copyright 2015-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -19,6 +19,7 @@ extern "C" { +// VK_KHR_android_surface is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_android_surface 1 struct ANativeWindow; #define VK_KHR_ANDROID_SURFACE_SPEC_VERSION 6 @@ -42,6 +43,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR( #endif +// VK_ANDROID_external_memory_android_hardware_buffer is a preprocessor guard. Do not pass it to API calls. #define VK_ANDROID_external_memory_android_hardware_buffer 1 struct AHardwareBuffer; #define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION 5 @@ -118,6 +120,32 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID( struct AHardwareBuffer** pBuffer); #endif + +// VK_ANDROID_external_format_resolve is a preprocessor guard. Do not pass it to API calls. +#define VK_ANDROID_external_format_resolve 1 +#define VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_SPEC_VERSION 1 +#define VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_EXTENSION_NAME "VK_ANDROID_external_format_resolve" +typedef struct VkPhysicalDeviceExternalFormatResolveFeaturesANDROID { + VkStructureType sType; + void* pNext; + VkBool32 externalFormatResolve; +} VkPhysicalDeviceExternalFormatResolveFeaturesANDROID; + +typedef struct VkPhysicalDeviceExternalFormatResolvePropertiesANDROID { + VkStructureType sType; + void* pNext; + VkBool32 nullColorAttachmentWithExternalFormatResolve; + VkChromaLocation externalFormatResolveChromaOffsetX; + VkChromaLocation externalFormatResolveChromaOffsetY; +} VkPhysicalDeviceExternalFormatResolvePropertiesANDROID; + +typedef struct VkAndroidHardwareBufferFormatResolvePropertiesANDROID { + VkStructureType sType; + void* pNext; + VkFormat colorAttachmentFormat; +} VkAndroidHardwareBufferFormatResolvePropertiesANDROID; + + #ifdef __cplusplus } #endif diff --git a/src/libraries/vulkanheaders/vulkan_beta.h b/src/libraries/vulkanheaders/vulkan_beta.h index db03af41b..867483d08 100644 --- a/src/libraries/vulkanheaders/vulkan_beta.h +++ b/src/libraries/vulkanheaders/vulkan_beta.h @@ -2,7 +2,7 @@ #define VULKAN_BETA_H_ 1 /* -** Copyright 2015-2022 The Khronos Group Inc. +** Copyright 2015-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -19,356 +19,7 @@ extern "C" { -#define VK_KHR_video_queue 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionKHR) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionParametersKHR) -#define VK_KHR_VIDEO_QUEUE_SPEC_VERSION 5 -#define VK_KHR_VIDEO_QUEUE_EXTENSION_NAME "VK_KHR_video_queue" - -typedef enum VkQueryResultStatusKHR { - VK_QUERY_RESULT_STATUS_ERROR_KHR = -1, - VK_QUERY_RESULT_STATUS_NOT_READY_KHR = 0, - VK_QUERY_RESULT_STATUS_COMPLETE_KHR = 1, - VK_QUERY_RESULT_STATUS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkQueryResultStatusKHR; - -typedef enum VkVideoCodecOperationFlagBitsKHR { - VK_VIDEO_CODEC_OPERATION_NONE_KHR = 0, -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT = 0x00010000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_EXT = 0x00020000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_EXT = 0x00000001, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_EXT = 0x00000002, -#endif - VK_VIDEO_CODEC_OPERATION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkVideoCodecOperationFlagBitsKHR; -typedef VkFlags VkVideoCodecOperationFlagsKHR; - -typedef enum VkVideoChromaSubsamplingFlagBitsKHR { - VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR = 0, - VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR = 0x00000001, - VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR = 0x00000002, - VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR = 0x00000004, - VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR = 0x00000008, - VK_VIDEO_CHROMA_SUBSAMPLING_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkVideoChromaSubsamplingFlagBitsKHR; -typedef VkFlags VkVideoChromaSubsamplingFlagsKHR; - -typedef enum VkVideoComponentBitDepthFlagBitsKHR { - VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR = 0, - VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR = 0x00000001, - VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR = 0x00000004, - VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR = 0x00000010, - VK_VIDEO_COMPONENT_BIT_DEPTH_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkVideoComponentBitDepthFlagBitsKHR; -typedef VkFlags VkVideoComponentBitDepthFlagsKHR; - -typedef enum VkVideoCapabilityFlagBitsKHR { - VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR = 0x00000001, - VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR = 0x00000002, - VK_VIDEO_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkVideoCapabilityFlagBitsKHR; -typedef VkFlags VkVideoCapabilityFlagsKHR; - -typedef enum VkVideoSessionCreateFlagBitsKHR { - VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR = 0x00000001, - VK_VIDEO_SESSION_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkVideoSessionCreateFlagBitsKHR; -typedef VkFlags VkVideoSessionCreateFlagsKHR; -typedef VkFlags VkVideoSessionParametersCreateFlagsKHR; -typedef VkFlags VkVideoBeginCodingFlagsKHR; -typedef VkFlags VkVideoEndCodingFlagsKHR; - -typedef enum VkVideoCodingControlFlagBitsKHR { - VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR = 0x00000001, -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR = 0x00000002, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_LAYER_BIT_KHR = 0x00000004, -#endif - VK_VIDEO_CODING_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkVideoCodingControlFlagBitsKHR; -typedef VkFlags VkVideoCodingControlFlagsKHR; -typedef struct VkQueueFamilyQueryResultStatusPropertiesKHR { - VkStructureType sType; - void* pNext; - VkBool32 queryResultStatusSupport; -} VkQueueFamilyQueryResultStatusPropertiesKHR; - -typedef struct VkQueueFamilyVideoPropertiesKHR { - VkStructureType sType; - void* pNext; - VkVideoCodecOperationFlagsKHR videoCodecOperations; -} VkQueueFamilyVideoPropertiesKHR; - -typedef struct VkVideoProfileInfoKHR { - VkStructureType sType; - const void* pNext; - VkVideoCodecOperationFlagBitsKHR videoCodecOperation; - VkVideoChromaSubsamplingFlagsKHR chromaSubsampling; - VkVideoComponentBitDepthFlagsKHR lumaBitDepth; - VkVideoComponentBitDepthFlagsKHR chromaBitDepth; -} VkVideoProfileInfoKHR; - -typedef struct VkVideoProfileListInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t profileCount; - const VkVideoProfileInfoKHR* pProfiles; -} VkVideoProfileListInfoKHR; - -typedef struct VkVideoCapabilitiesKHR { - VkStructureType sType; - void* pNext; - VkVideoCapabilityFlagsKHR capabilityFlags; - VkDeviceSize minBitstreamBufferOffsetAlignment; - VkDeviceSize minBitstreamBufferSizeAlignment; - VkExtent2D videoPictureExtentGranularity; - VkExtent2D minExtent; - VkExtent2D maxExtent; - uint32_t maxReferencePicturesSlotsCount; - uint32_t maxReferencePicturesActiveCount; - VkExtensionProperties stdHeaderVersion; -} VkVideoCapabilitiesKHR; - -typedef struct VkPhysicalDeviceVideoFormatInfoKHR { - VkStructureType sType; - const void* pNext; - VkImageUsageFlags imageUsage; -} VkPhysicalDeviceVideoFormatInfoKHR; - -typedef struct VkVideoFormatPropertiesKHR { - VkStructureType sType; - void* pNext; - VkFormat format; - VkComponentMapping componentMapping; - VkImageCreateFlags imageCreateFlags; - VkImageType imageType; - VkImageTiling imageTiling; - VkImageUsageFlags imageUsageFlags; -} VkVideoFormatPropertiesKHR; - -typedef struct VkVideoPictureResourceInfoKHR { - VkStructureType sType; - const void* pNext; - VkOffset2D codedOffset; - VkExtent2D codedExtent; - uint32_t baseArrayLayer; - VkImageView imageViewBinding; -} VkVideoPictureResourceInfoKHR; - -typedef struct VkVideoReferenceSlotInfoKHR { - VkStructureType sType; - const void* pNext; - int8_t slotIndex; - const VkVideoPictureResourceInfoKHR* pPictureResource; -} VkVideoReferenceSlotInfoKHR; - -typedef struct VkVideoSessionMemoryRequirementsKHR { - VkStructureType sType; - void* pNext; - uint32_t memoryBindIndex; - VkMemoryRequirements memoryRequirements; -} VkVideoSessionMemoryRequirementsKHR; - -typedef struct VkBindVideoSessionMemoryInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t memoryBindIndex; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; - VkDeviceSize memorySize; -} VkBindVideoSessionMemoryInfoKHR; - -typedef struct VkVideoSessionCreateInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t queueFamilyIndex; - VkVideoSessionCreateFlagsKHR flags; - const VkVideoProfileInfoKHR* pVideoProfile; - VkFormat pictureFormat; - VkExtent2D maxCodedExtent; - VkFormat referencePicturesFormat; - uint32_t maxReferencePicturesSlotsCount; - uint32_t maxReferencePicturesActiveCount; - const VkExtensionProperties* pStdHeaderVersion; -} VkVideoSessionCreateInfoKHR; - -typedef struct VkVideoSessionParametersCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkVideoSessionParametersCreateFlagsKHR flags; - VkVideoSessionParametersKHR videoSessionParametersTemplate; - VkVideoSessionKHR videoSession; -} VkVideoSessionParametersCreateInfoKHR; - -typedef struct VkVideoSessionParametersUpdateInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t updateSequenceCount; -} VkVideoSessionParametersUpdateInfoKHR; - -typedef struct VkVideoBeginCodingInfoKHR { - VkStructureType sType; - const void* pNext; - VkVideoBeginCodingFlagsKHR flags; - VkVideoSessionKHR videoSession; - VkVideoSessionParametersKHR videoSessionParameters; - uint32_t referenceSlotCount; - const VkVideoReferenceSlotInfoKHR* pReferenceSlots; -} VkVideoBeginCodingInfoKHR; - -typedef struct VkVideoEndCodingInfoKHR { - VkStructureType sType; - const void* pNext; - VkVideoEndCodingFlagsKHR flags; -} VkVideoEndCodingInfoKHR; - -typedef struct VkVideoCodingControlInfoKHR { - VkStructureType sType; - const void* pNext; - VkVideoCodingControlFlagsKHR flags; -} VkVideoCodingControlInfoKHR; - -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR)(VkPhysicalDevice physicalDevice, const VkVideoProfileInfoKHR* pVideoProfile, VkVideoCapabilitiesKHR* pCapabilities); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, uint32_t* pVideoFormatPropertyCount, VkVideoFormatPropertiesKHR* pVideoFormatProperties); -typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionKHR)(VkDevice device, const VkVideoSessionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionKHR* pVideoSession); -typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionKHR)(VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetVideoSessionMemoryRequirementsKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t* pMemoryRequirementsCount, VkVideoSessionMemoryRequirementsKHR* pMemoryRequirements); -typedef VkResult (VKAPI_PTR *PFN_vkBindVideoSessionMemoryKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t bindSessionMemoryInfoCount, const VkBindVideoSessionMemoryInfoKHR* pBindSessionMemoryInfos); -typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionParametersKHR)(VkDevice device, const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionParametersKHR* pVideoSessionParameters); -typedef VkResult (VKAPI_PTR *PFN_vkUpdateVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo); -typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkCmdBeginVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR* pBeginInfo); -typedef void (VKAPI_PTR *PFN_vkCmdEndVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR* pEndCodingInfo); -typedef void (VKAPI_PTR *PFN_vkCmdControlVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR* pCodingControlInfo); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoCapabilitiesKHR( - VkPhysicalDevice physicalDevice, - const VkVideoProfileInfoKHR* pVideoProfile, - VkVideoCapabilitiesKHR* pCapabilities); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoFormatPropertiesKHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, - uint32_t* pVideoFormatPropertyCount, - VkVideoFormatPropertiesKHR* pVideoFormatProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionKHR( - VkDevice device, - const VkVideoSessionCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkVideoSessionKHR* pVideoSession); - -VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionKHR( - VkDevice device, - VkVideoSessionKHR videoSession, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetVideoSessionMemoryRequirementsKHR( - VkDevice device, - VkVideoSessionKHR videoSession, - uint32_t* pMemoryRequirementsCount, - VkVideoSessionMemoryRequirementsKHR* pMemoryRequirements); - -VKAPI_ATTR VkResult VKAPI_CALL vkBindVideoSessionMemoryKHR( - VkDevice device, - VkVideoSessionKHR videoSession, - uint32_t bindSessionMemoryInfoCount, - const VkBindVideoSessionMemoryInfoKHR* pBindSessionMemoryInfos); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionParametersKHR( - VkDevice device, - const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkVideoSessionParametersKHR* pVideoSessionParameters); - -VKAPI_ATTR VkResult VKAPI_CALL vkUpdateVideoSessionParametersKHR( - VkDevice device, - VkVideoSessionParametersKHR videoSessionParameters, - const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo); - -VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionParametersKHR( - VkDevice device, - VkVideoSessionParametersKHR videoSessionParameters, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR void VKAPI_CALL vkCmdBeginVideoCodingKHR( - VkCommandBuffer commandBuffer, - const VkVideoBeginCodingInfoKHR* pBeginInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdEndVideoCodingKHR( - VkCommandBuffer commandBuffer, - const VkVideoEndCodingInfoKHR* pEndCodingInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdControlVideoCodingKHR( - VkCommandBuffer commandBuffer, - const VkVideoCodingControlInfoKHR* pCodingControlInfo); -#endif - - -#define VK_KHR_video_decode_queue 1 -#define VK_KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION 6 -#define VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME "VK_KHR_video_decode_queue" - -typedef enum VkVideoDecodeCapabilityFlagBitsKHR { - VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR = 0x00000001, - VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR = 0x00000002, - VK_VIDEO_DECODE_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkVideoDecodeCapabilityFlagBitsKHR; -typedef VkFlags VkVideoDecodeCapabilityFlagsKHR; - -typedef enum VkVideoDecodeUsageFlagBitsKHR { - VK_VIDEO_DECODE_USAGE_DEFAULT_KHR = 0, - VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR = 0x00000001, - VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR = 0x00000002, - VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR = 0x00000004, - VK_VIDEO_DECODE_USAGE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkVideoDecodeUsageFlagBitsKHR; -typedef VkFlags VkVideoDecodeUsageFlagsKHR; -typedef VkFlags VkVideoDecodeFlagsKHR; -typedef struct VkVideoDecodeCapabilitiesKHR { - VkStructureType sType; - void* pNext; - VkVideoDecodeCapabilityFlagsKHR flags; -} VkVideoDecodeCapabilitiesKHR; - -typedef struct VkVideoDecodeUsageInfoKHR { - VkStructureType sType; - const void* pNext; - VkVideoDecodeUsageFlagsKHR videoUsageHints; -} VkVideoDecodeUsageInfoKHR; - -typedef struct VkVideoDecodeInfoKHR { - VkStructureType sType; - const void* pNext; - VkVideoDecodeFlagsKHR flags; - VkBuffer srcBuffer; - VkDeviceSize srcBufferOffset; - VkDeviceSize srcBufferRange; - VkVideoPictureResourceInfoKHR dstPictureResource; - const VkVideoReferenceSlotInfoKHR* pSetupReferenceSlot; - uint32_t referenceSlotCount; - const VkVideoReferenceSlotInfoKHR* pReferenceSlots; -} VkVideoDecodeInfoKHR; - -typedef void (VKAPI_PTR *PFN_vkCmdDecodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR* pFrameInfo); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdDecodeVideoKHR( - VkCommandBuffer commandBuffer, - const VkVideoDecodeInfoKHR* pFrameInfo); -#endif - - +// VK_KHR_portability_subset is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_portability_subset 1 #define VK_KHR_PORTABILITY_SUBSET_SPEC_VERSION 1 #define VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME "VK_KHR_portability_subset" @@ -400,617 +51,172 @@ typedef struct VkPhysicalDevicePortabilitySubsetPropertiesKHR { -#define VK_KHR_video_encode_queue 1 -#define VK_KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION 7 -#define VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME "VK_KHR_video_encode_queue" - -typedef enum VkVideoEncodeTuningModeKHR { - VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR = 0, - VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR = 1, - VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR = 2, - VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR = 3, - VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR = 4, - VK_VIDEO_ENCODE_TUNING_MODE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkVideoEncodeTuningModeKHR; -typedef VkFlags VkVideoEncodeFlagsKHR; - -typedef enum VkVideoEncodeCapabilityFlagBitsKHR { - VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR = 0x00000001, - VK_VIDEO_ENCODE_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkVideoEncodeCapabilityFlagBitsKHR; -typedef VkFlags VkVideoEncodeCapabilityFlagsKHR; - -typedef enum VkVideoEncodeRateControlModeFlagBitsKHR { - VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR = 0, - VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR = 1, - VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR = 2, - VK_VIDEO_ENCODE_RATE_CONTROL_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkVideoEncodeRateControlModeFlagBitsKHR; -typedef VkFlags VkVideoEncodeRateControlModeFlagsKHR; - -typedef enum VkVideoEncodeUsageFlagBitsKHR { - VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR = 0, - VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR = 0x00000001, - VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR = 0x00000002, - VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR = 0x00000004, - VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR = 0x00000008, - VK_VIDEO_ENCODE_USAGE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkVideoEncodeUsageFlagBitsKHR; -typedef VkFlags VkVideoEncodeUsageFlagsKHR; - -typedef enum VkVideoEncodeContentFlagBitsKHR { - VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR = 0, - VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR = 0x00000001, - VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR = 0x00000002, - VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR = 0x00000004, - VK_VIDEO_ENCODE_CONTENT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkVideoEncodeContentFlagBitsKHR; -typedef VkFlags VkVideoEncodeContentFlagsKHR; -typedef VkFlags VkVideoEncodeRateControlFlagsKHR; -typedef struct VkVideoEncodeInfoKHR { - VkStructureType sType; - const void* pNext; - VkVideoEncodeFlagsKHR flags; - uint32_t qualityLevel; - VkBuffer dstBitstreamBuffer; - VkDeviceSize dstBitstreamBufferOffset; - VkDeviceSize dstBitstreamBufferMaxRange; - VkVideoPictureResourceInfoKHR srcPictureResource; - const VkVideoReferenceSlotInfoKHR* pSetupReferenceSlot; - uint32_t referenceSlotCount; - const VkVideoReferenceSlotInfoKHR* pReferenceSlots; - uint32_t precedingExternallyEncodedBytes; -} VkVideoEncodeInfoKHR; - -typedef struct VkVideoEncodeCapabilitiesKHR { - VkStructureType sType; - void* pNext; - VkVideoEncodeCapabilityFlagsKHR flags; - VkVideoEncodeRateControlModeFlagsKHR rateControlModes; - uint8_t rateControlLayerCount; - uint8_t qualityLevelCount; - VkExtent2D inputImageDataFillAlignment; -} VkVideoEncodeCapabilitiesKHR; - -typedef struct VkVideoEncodeUsageInfoKHR { - VkStructureType sType; - const void* pNext; - VkVideoEncodeUsageFlagsKHR videoUsageHints; - VkVideoEncodeContentFlagsKHR videoContentHints; - VkVideoEncodeTuningModeKHR tuningMode; -} VkVideoEncodeUsageInfoKHR; - -typedef struct VkVideoEncodeRateControlLayerInfoKHR { +// VK_AMDX_shader_enqueue is a preprocessor guard. Do not pass it to API calls. +#define VK_AMDX_shader_enqueue 1 +#define VK_AMDX_SHADER_ENQUEUE_SPEC_VERSION 2 +#define VK_AMDX_SHADER_ENQUEUE_EXTENSION_NAME "VK_AMDX_shader_enqueue" +#define VK_SHADER_INDEX_UNUSED_AMDX (~0U) +typedef struct VkPhysicalDeviceShaderEnqueueFeaturesAMDX { VkStructureType sType; - const void* pNext; - uint32_t averageBitrate; - uint32_t maxBitrate; - uint32_t frameRateNumerator; - uint32_t frameRateDenominator; - uint32_t virtualBufferSizeInMs; - uint32_t initialVirtualBufferSizeInMs; -} VkVideoEncodeRateControlLayerInfoKHR; - -typedef struct VkVideoEncodeRateControlInfoKHR { - VkStructureType sType; - const void* pNext; - VkVideoEncodeRateControlFlagsKHR flags; - VkVideoEncodeRateControlModeFlagBitsKHR rateControlMode; - uint8_t layerCount; - const VkVideoEncodeRateControlLayerInfoKHR* pLayerConfigs; -} VkVideoEncodeRateControlInfoKHR; - -typedef void (VKAPI_PTR *PFN_vkCmdEncodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR* pEncodeInfo); + void* pNext; + VkBool32 shaderEnqueue; + VkBool32 shaderMeshEnqueue; +} VkPhysicalDeviceShaderEnqueueFeaturesAMDX; + +typedef struct VkPhysicalDeviceShaderEnqueuePropertiesAMDX { + VkStructureType sType; + void* pNext; + uint32_t maxExecutionGraphDepth; + uint32_t maxExecutionGraphShaderOutputNodes; + uint32_t maxExecutionGraphShaderPayloadSize; + uint32_t maxExecutionGraphShaderPayloadCount; + uint32_t executionGraphDispatchAddressAlignment; + uint32_t maxExecutionGraphWorkgroupCount[3]; + uint32_t maxExecutionGraphWorkgroups; +} VkPhysicalDeviceShaderEnqueuePropertiesAMDX; + +typedef struct VkExecutionGraphPipelineScratchSizeAMDX { + VkStructureType sType; + void* pNext; + VkDeviceSize minSize; + VkDeviceSize maxSize; + VkDeviceSize sizeGranularity; +} VkExecutionGraphPipelineScratchSizeAMDX; + +typedef struct VkExecutionGraphPipelineCreateInfoAMDX { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineLibraryCreateInfoKHR* pLibraryInfo; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkExecutionGraphPipelineCreateInfoAMDX; + +typedef union VkDeviceOrHostAddressConstAMDX { + VkDeviceAddress deviceAddress; + const void* hostAddress; +} VkDeviceOrHostAddressConstAMDX; + +typedef struct VkDispatchGraphInfoAMDX { + uint32_t nodeIndex; + uint32_t payloadCount; + VkDeviceOrHostAddressConstAMDX payloads; + uint64_t payloadStride; +} VkDispatchGraphInfoAMDX; + +typedef struct VkDispatchGraphCountInfoAMDX { + uint32_t count; + VkDeviceOrHostAddressConstAMDX infos; + uint64_t stride; +} VkDispatchGraphCountInfoAMDX; + +typedef struct VkPipelineShaderStageNodeCreateInfoAMDX { + VkStructureType sType; + const void* pNext; + const char* pName; + uint32_t index; +} VkPipelineShaderStageNodeCreateInfoAMDX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateExecutionGraphPipelinesAMDX)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineScratchSizeAMDX)(VkDevice device, VkPipeline executionGraph, VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineNodeIndexAMDX)(VkDevice device, VkPipeline executionGraph, const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, uint32_t* pNodeIndex); +typedef void (VKAPI_PTR *PFN_vkCmdInitializeGraphScratchMemoryAMDX)(VkCommandBuffer commandBuffer, VkPipeline executionGraph, VkDeviceAddress scratch, VkDeviceSize scratchSize); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX* pCountInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX* pCountInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectCountAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, VkDeviceAddress countInfo); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdEncodeVideoKHR( +VKAPI_ATTR VkResult VKAPI_CALL vkCreateExecutionGraphPipelinesAMDX( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetExecutionGraphPipelineScratchSizeAMDX( + VkDevice device, + VkPipeline executionGraph, + VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetExecutionGraphPipelineNodeIndexAMDX( + VkDevice device, + VkPipeline executionGraph, + const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, + uint32_t* pNodeIndex); + +VKAPI_ATTR void VKAPI_CALL vkCmdInitializeGraphScratchMemoryAMDX( VkCommandBuffer commandBuffer, - const VkVideoEncodeInfoKHR* pEncodeInfo); + VkPipeline executionGraph, + VkDeviceAddress scratch, + VkDeviceSize scratchSize); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + const VkDispatchGraphCountInfoAMDX* pCountInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphIndirectAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + const VkDispatchGraphCountInfoAMDX* pCountInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphIndirectCountAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + VkDeviceAddress countInfo); #endif -#define VK_EXT_video_encode_h264 1 -#include "vk_video/vulkan_video_codec_h264std.h" -#include "vk_video/vulkan_video_codec_h264std_encode.h" -#define VK_EXT_VIDEO_ENCODE_H264_SPEC_VERSION 8 -#define VK_EXT_VIDEO_ENCODE_H264_EXTENSION_NAME "VK_EXT_video_encode_h264" - -typedef enum VkVideoEncodeH264RateControlStructureEXT { - VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT = 0, - VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAT_EXT = 1, - VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_DYADIC_EXT = 2, - VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkVideoEncodeH264RateControlStructureEXT; - -typedef enum VkVideoEncodeH264CapabilityFlagBitsEXT { - VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_ENABLED_BIT_EXT = 0x00000001, - VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_DISABLED_BIT_EXT = 0x00000002, - VK_VIDEO_ENCODE_H264_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT = 0x00000004, - VK_VIDEO_ENCODE_H264_CAPABILITY_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_BIT_EXT = 0x00000008, - VK_VIDEO_ENCODE_H264_CAPABILITY_SCALING_LISTS_BIT_EXT = 0x00000010, - VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_EXT = 0x00000020, - VK_VIDEO_ENCODE_H264_CAPABILITY_CHROMA_QP_OFFSET_BIT_EXT = 0x00000040, - VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT = 0x00000080, - VK_VIDEO_ENCODE_H264_CAPABILITY_PIC_INIT_QP_MINUS26_BIT_EXT = 0x00000100, - VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_BIT_EXT = 0x00000200, - VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_EXPLICIT_BIT_EXT = 0x00000400, - VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_IMPLICIT_BIT_EXT = 0x00000800, - VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT = 0x00001000, - VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT = 0x00002000, - VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT = 0x00004000, - VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT = 0x00008000, - VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_DISABLED_BIT_EXT = 0x00010000, - VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT = 0x00020000, - VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT = 0x00040000, - VK_VIDEO_ENCODE_H264_CAPABILITY_DISABLE_DIRECT_SPATIAL_MV_PRED_BIT_EXT = 0x00080000, - VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT = 0x00100000, - VK_VIDEO_ENCODE_H264_CAPABILITY_SLICE_MB_COUNT_BIT_EXT = 0x00200000, - VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_EXT = 0x00400000, - VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT = 0x00800000, - VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT = 0x01000000, - VK_VIDEO_ENCODE_H264_CAPABILITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkVideoEncodeH264CapabilityFlagBitsEXT; -typedef VkFlags VkVideoEncodeH264CapabilityFlagsEXT; - -typedef enum VkVideoEncodeH264InputModeFlagBitsEXT { - VK_VIDEO_ENCODE_H264_INPUT_MODE_FRAME_BIT_EXT = 0x00000001, - VK_VIDEO_ENCODE_H264_INPUT_MODE_SLICE_BIT_EXT = 0x00000002, - VK_VIDEO_ENCODE_H264_INPUT_MODE_NON_VCL_BIT_EXT = 0x00000004, - VK_VIDEO_ENCODE_H264_INPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkVideoEncodeH264InputModeFlagBitsEXT; -typedef VkFlags VkVideoEncodeH264InputModeFlagsEXT; - -typedef enum VkVideoEncodeH264OutputModeFlagBitsEXT { - VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FRAME_BIT_EXT = 0x00000001, - VK_VIDEO_ENCODE_H264_OUTPUT_MODE_SLICE_BIT_EXT = 0x00000002, - VK_VIDEO_ENCODE_H264_OUTPUT_MODE_NON_VCL_BIT_EXT = 0x00000004, - VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkVideoEncodeH264OutputModeFlagBitsEXT; -typedef VkFlags VkVideoEncodeH264OutputModeFlagsEXT; -typedef struct VkVideoEncodeH264CapabilitiesEXT { - VkStructureType sType; - void* pNext; - VkVideoEncodeH264CapabilityFlagsEXT flags; - VkVideoEncodeH264InputModeFlagsEXT inputModeFlags; - VkVideoEncodeH264OutputModeFlagsEXT outputModeFlags; - uint8_t maxPPictureL0ReferenceCount; - uint8_t maxBPictureL0ReferenceCount; - uint8_t maxL1ReferenceCount; - VkBool32 motionVectorsOverPicBoundariesFlag; - uint32_t maxBytesPerPicDenom; - uint32_t maxBitsPerMbDenom; - uint32_t log2MaxMvLengthHorizontal; - uint32_t log2MaxMvLengthVertical; -} VkVideoEncodeH264CapabilitiesEXT; - -typedef struct VkVideoEncodeH264SessionParametersAddInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t spsStdCount; - const StdVideoH264SequenceParameterSet* pSpsStd; - uint32_t ppsStdCount; - const StdVideoH264PictureParameterSet* pPpsStd; -} VkVideoEncodeH264SessionParametersAddInfoEXT; - -typedef struct VkVideoEncodeH264SessionParametersCreateInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t maxSpsStdCount; - uint32_t maxPpsStdCount; - const VkVideoEncodeH264SessionParametersAddInfoEXT* pParametersAddInfo; -} VkVideoEncodeH264SessionParametersCreateInfoEXT; - -typedef struct VkVideoEncodeH264DpbSlotInfoEXT { - VkStructureType sType; - const void* pNext; - int8_t slotIndex; - const StdVideoEncodeH264ReferenceInfo* pStdReferenceInfo; -} VkVideoEncodeH264DpbSlotInfoEXT; - -typedef struct VkVideoEncodeH264ReferenceListsInfoEXT { - VkStructureType sType; - const void* pNext; - uint8_t referenceList0EntryCount; - const VkVideoEncodeH264DpbSlotInfoEXT* pReferenceList0Entries; - uint8_t referenceList1EntryCount; - const VkVideoEncodeH264DpbSlotInfoEXT* pReferenceList1Entries; - const StdVideoEncodeH264RefMemMgmtCtrlOperations* pMemMgmtCtrlOperations; -} VkVideoEncodeH264ReferenceListsInfoEXT; - -typedef struct VkVideoEncodeH264NaluSliceInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t mbCount; - const VkVideoEncodeH264ReferenceListsInfoEXT* pReferenceFinalLists; - const StdVideoEncodeH264SliceHeader* pSliceHeaderStd; -} VkVideoEncodeH264NaluSliceInfoEXT; - -typedef struct VkVideoEncodeH264VclFrameInfoEXT { - VkStructureType sType; - const void* pNext; - const VkVideoEncodeH264ReferenceListsInfoEXT* pReferenceFinalLists; - uint32_t naluSliceEntryCount; - const VkVideoEncodeH264NaluSliceInfoEXT* pNaluSliceEntries; - const StdVideoEncodeH264PictureInfo* pCurrentPictureInfo; -} VkVideoEncodeH264VclFrameInfoEXT; - -typedef struct VkVideoEncodeH264EmitPictureParametersInfoEXT { +// VK_NV_displacement_micromap is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_displacement_micromap 1 +#define VK_NV_DISPLACEMENT_MICROMAP_SPEC_VERSION 2 +#define VK_NV_DISPLACEMENT_MICROMAP_EXTENSION_NAME "VK_NV_displacement_micromap" + +typedef enum VkDisplacementMicromapFormatNV { + VK_DISPLACEMENT_MICROMAP_FORMAT_64_TRIANGLES_64_BYTES_NV = 1, + VK_DISPLACEMENT_MICROMAP_FORMAT_256_TRIANGLES_128_BYTES_NV = 2, + VK_DISPLACEMENT_MICROMAP_FORMAT_1024_TRIANGLES_128_BYTES_NV = 3, + VK_DISPLACEMENT_MICROMAP_FORMAT_MAX_ENUM_NV = 0x7FFFFFFF +} VkDisplacementMicromapFormatNV; +typedef struct VkPhysicalDeviceDisplacementMicromapFeaturesNV { VkStructureType sType; - const void* pNext; - uint8_t spsId; - VkBool32 emitSpsEnable; - uint32_t ppsIdEntryCount; - const uint8_t* ppsIdEntries; -} VkVideoEncodeH264EmitPictureParametersInfoEXT; - -typedef struct VkVideoEncodeH264ProfileInfoEXT { - VkStructureType sType; - const void* pNext; - StdVideoH264ProfileIdc stdProfileIdc; -} VkVideoEncodeH264ProfileInfoEXT; - -typedef struct VkVideoEncodeH264RateControlInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t gopFrameCount; - uint32_t idrPeriod; - uint32_t consecutiveBFrameCount; - VkVideoEncodeH264RateControlStructureEXT rateControlStructure; - uint8_t temporalLayerCount; -} VkVideoEncodeH264RateControlInfoEXT; - -typedef struct VkVideoEncodeH264QpEXT { - int32_t qpI; - int32_t qpP; - int32_t qpB; -} VkVideoEncodeH264QpEXT; - -typedef struct VkVideoEncodeH264FrameSizeEXT { - uint32_t frameISize; - uint32_t framePSize; - uint32_t frameBSize; -} VkVideoEncodeH264FrameSizeEXT; - -typedef struct VkVideoEncodeH264RateControlLayerInfoEXT { - VkStructureType sType; - const void* pNext; - uint8_t temporalLayerId; - VkBool32 useInitialRcQp; - VkVideoEncodeH264QpEXT initialRcQp; - VkBool32 useMinQp; - VkVideoEncodeH264QpEXT minQp; - VkBool32 useMaxQp; - VkVideoEncodeH264QpEXT maxQp; - VkBool32 useMaxFrameSize; - VkVideoEncodeH264FrameSizeEXT maxFrameSize; -} VkVideoEncodeH264RateControlLayerInfoEXT; - - - -#define VK_EXT_video_encode_h265 1 -#include "vk_video/vulkan_video_codec_h265std.h" -#include "vk_video/vulkan_video_codec_h265std_encode.h" -#define VK_EXT_VIDEO_ENCODE_H265_SPEC_VERSION 8 -#define VK_EXT_VIDEO_ENCODE_H265_EXTENSION_NAME "VK_EXT_video_encode_h265" - -typedef enum VkVideoEncodeH265RateControlStructureEXT { - VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT = 0, - VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAT_EXT = 1, - VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_DYADIC_EXT = 2, - VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkVideoEncodeH265RateControlStructureEXT; - -typedef enum VkVideoEncodeH265CapabilityFlagBitsEXT { - VK_VIDEO_ENCODE_H265_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT = 0x00000001, - VK_VIDEO_ENCODE_H265_CAPABILITY_SCALING_LISTS_BIT_EXT = 0x00000002, - VK_VIDEO_ENCODE_H265_CAPABILITY_SAMPLE_ADAPTIVE_OFFSET_ENABLED_BIT_EXT = 0x00000004, - VK_VIDEO_ENCODE_H265_CAPABILITY_PCM_ENABLE_BIT_EXT = 0x00000008, - VK_VIDEO_ENCODE_H265_CAPABILITY_SPS_TEMPORAL_MVP_ENABLED_BIT_EXT = 0x00000010, - VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_EXT = 0x00000020, - VK_VIDEO_ENCODE_H265_CAPABILITY_INIT_QP_MINUS26_BIT_EXT = 0x00000040, - VK_VIDEO_ENCODE_H265_CAPABILITY_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_EXT = 0x00000080, - VK_VIDEO_ENCODE_H265_CAPABILITY_SIGN_DATA_HIDING_ENABLED_BIT_EXT = 0x00000100, - VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_ENABLED_BIT_EXT = 0x00000200, - VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_DISABLED_BIT_EXT = 0x00000400, - VK_VIDEO_ENCODE_H265_CAPABILITY_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_BIT_EXT = 0x00000800, - VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_BIT_EXT = 0x00001000, - VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_BIPRED_BIT_EXT = 0x00002000, - VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT = 0x00004000, - VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSQUANT_BYPASS_ENABLED_BIT_EXT = 0x00008000, - VK_VIDEO_ENCODE_H265_CAPABILITY_ENTROPY_CODING_SYNC_ENABLED_BIT_EXT = 0x00010000, - VK_VIDEO_ENCODE_H265_CAPABILITY_DEBLOCKING_FILTER_OVERRIDE_ENABLED_BIT_EXT = 0x00020000, - VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_FRAME_BIT_EXT = 0x00040000, - VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_PER_TILE_BIT_EXT = 0x00080000, - VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_SLICE_BIT_EXT = 0x00100000, - VK_VIDEO_ENCODE_H265_CAPABILITY_SLICE_SEGMENT_CTB_COUNT_BIT_EXT = 0x00200000, - VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_EXT = 0x00400000, - VK_VIDEO_ENCODE_H265_CAPABILITY_DEPENDENT_SLICE_SEGMENT_BIT_EXT = 0x00800000, - VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT = 0x01000000, - VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT = 0x02000000, - VK_VIDEO_ENCODE_H265_CAPABILITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkVideoEncodeH265CapabilityFlagBitsEXT; -typedef VkFlags VkVideoEncodeH265CapabilityFlagsEXT; - -typedef enum VkVideoEncodeH265InputModeFlagBitsEXT { - VK_VIDEO_ENCODE_H265_INPUT_MODE_FRAME_BIT_EXT = 0x00000001, - VK_VIDEO_ENCODE_H265_INPUT_MODE_SLICE_SEGMENT_BIT_EXT = 0x00000002, - VK_VIDEO_ENCODE_H265_INPUT_MODE_NON_VCL_BIT_EXT = 0x00000004, - VK_VIDEO_ENCODE_H265_INPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkVideoEncodeH265InputModeFlagBitsEXT; -typedef VkFlags VkVideoEncodeH265InputModeFlagsEXT; - -typedef enum VkVideoEncodeH265OutputModeFlagBitsEXT { - VK_VIDEO_ENCODE_H265_OUTPUT_MODE_FRAME_BIT_EXT = 0x00000001, - VK_VIDEO_ENCODE_H265_OUTPUT_MODE_SLICE_SEGMENT_BIT_EXT = 0x00000002, - VK_VIDEO_ENCODE_H265_OUTPUT_MODE_NON_VCL_BIT_EXT = 0x00000004, - VK_VIDEO_ENCODE_H265_OUTPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkVideoEncodeH265OutputModeFlagBitsEXT; -typedef VkFlags VkVideoEncodeH265OutputModeFlagsEXT; - -typedef enum VkVideoEncodeH265CtbSizeFlagBitsEXT { - VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_EXT = 0x00000001, - VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_EXT = 0x00000002, - VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_EXT = 0x00000004, - VK_VIDEO_ENCODE_H265_CTB_SIZE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkVideoEncodeH265CtbSizeFlagBitsEXT; -typedef VkFlags VkVideoEncodeH265CtbSizeFlagsEXT; - -typedef enum VkVideoEncodeH265TransformBlockSizeFlagBitsEXT { - VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_EXT = 0x00000001, - VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_EXT = 0x00000002, - VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_EXT = 0x00000004, - VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_EXT = 0x00000008, - VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkVideoEncodeH265TransformBlockSizeFlagBitsEXT; -typedef VkFlags VkVideoEncodeH265TransformBlockSizeFlagsEXT; -typedef struct VkVideoEncodeH265CapabilitiesEXT { - VkStructureType sType; - void* pNext; - VkVideoEncodeH265CapabilityFlagsEXT flags; - VkVideoEncodeH265InputModeFlagsEXT inputModeFlags; - VkVideoEncodeH265OutputModeFlagsEXT outputModeFlags; - VkVideoEncodeH265CtbSizeFlagsEXT ctbSizes; - VkVideoEncodeH265TransformBlockSizeFlagsEXT transformBlockSizes; - uint8_t maxPPictureL0ReferenceCount; - uint8_t maxBPictureL0ReferenceCount; - uint8_t maxL1ReferenceCount; - uint8_t maxSubLayersCount; - uint8_t minLog2MinLumaCodingBlockSizeMinus3; - uint8_t maxLog2MinLumaCodingBlockSizeMinus3; - uint8_t minLog2MinLumaTransformBlockSizeMinus2; - uint8_t maxLog2MinLumaTransformBlockSizeMinus2; - uint8_t minMaxTransformHierarchyDepthInter; - uint8_t maxMaxTransformHierarchyDepthInter; - uint8_t minMaxTransformHierarchyDepthIntra; - uint8_t maxMaxTransformHierarchyDepthIntra; - uint8_t maxDiffCuQpDeltaDepth; - uint8_t minMaxNumMergeCand; - uint8_t maxMaxNumMergeCand; -} VkVideoEncodeH265CapabilitiesEXT; - -typedef struct VkVideoEncodeH265SessionParametersAddInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t vpsStdCount; - const StdVideoH265VideoParameterSet* pVpsStd; - uint32_t spsStdCount; - const StdVideoH265SequenceParameterSet* pSpsStd; - uint32_t ppsStdCount; - const StdVideoH265PictureParameterSet* pPpsStd; -} VkVideoEncodeH265SessionParametersAddInfoEXT; - -typedef struct VkVideoEncodeH265SessionParametersCreateInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t maxVpsStdCount; - uint32_t maxSpsStdCount; - uint32_t maxPpsStdCount; - const VkVideoEncodeH265SessionParametersAddInfoEXT* pParametersAddInfo; -} VkVideoEncodeH265SessionParametersCreateInfoEXT; - -typedef struct VkVideoEncodeH265DpbSlotInfoEXT { - VkStructureType sType; - const void* pNext; - int8_t slotIndex; - const StdVideoEncodeH265ReferenceInfo* pStdReferenceInfo; -} VkVideoEncodeH265DpbSlotInfoEXT; - -typedef struct VkVideoEncodeH265ReferenceListsInfoEXT { - VkStructureType sType; - const void* pNext; - uint8_t referenceList0EntryCount; - const VkVideoEncodeH265DpbSlotInfoEXT* pReferenceList0Entries; - uint8_t referenceList1EntryCount; - const VkVideoEncodeH265DpbSlotInfoEXT* pReferenceList1Entries; - const StdVideoEncodeH265ReferenceModifications* pReferenceModifications; -} VkVideoEncodeH265ReferenceListsInfoEXT; - -typedef struct VkVideoEncodeH265NaluSliceSegmentInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t ctbCount; - const VkVideoEncodeH265ReferenceListsInfoEXT* pReferenceFinalLists; - const StdVideoEncodeH265SliceSegmentHeader* pSliceSegmentHeaderStd; -} VkVideoEncodeH265NaluSliceSegmentInfoEXT; - -typedef struct VkVideoEncodeH265VclFrameInfoEXT { - VkStructureType sType; - const void* pNext; - const VkVideoEncodeH265ReferenceListsInfoEXT* pReferenceFinalLists; - uint32_t naluSliceSegmentEntryCount; - const VkVideoEncodeH265NaluSliceSegmentInfoEXT* pNaluSliceSegmentEntries; - const StdVideoEncodeH265PictureInfo* pCurrentPictureInfo; -} VkVideoEncodeH265VclFrameInfoEXT; - -typedef struct VkVideoEncodeH265EmitPictureParametersInfoEXT { + void* pNext; + VkBool32 displacementMicromap; +} VkPhysicalDeviceDisplacementMicromapFeaturesNV; + +typedef struct VkPhysicalDeviceDisplacementMicromapPropertiesNV { VkStructureType sType; - const void* pNext; - uint8_t vpsId; - uint8_t spsId; - VkBool32 emitVpsEnable; - VkBool32 emitSpsEnable; - uint32_t ppsIdEntryCount; - const uint8_t* ppsIdEntries; -} VkVideoEncodeH265EmitPictureParametersInfoEXT; - -typedef struct VkVideoEncodeH265ProfileInfoEXT { - VkStructureType sType; - const void* pNext; - StdVideoH265ProfileIdc stdProfileIdc; -} VkVideoEncodeH265ProfileInfoEXT; - -typedef struct VkVideoEncodeH265RateControlInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t gopFrameCount; - uint32_t idrPeriod; - uint32_t consecutiveBFrameCount; - VkVideoEncodeH265RateControlStructureEXT rateControlStructure; - uint8_t subLayerCount; -} VkVideoEncodeH265RateControlInfoEXT; - -typedef struct VkVideoEncodeH265QpEXT { - int32_t qpI; - int32_t qpP; - int32_t qpB; -} VkVideoEncodeH265QpEXT; - -typedef struct VkVideoEncodeH265FrameSizeEXT { - uint32_t frameISize; - uint32_t framePSize; - uint32_t frameBSize; -} VkVideoEncodeH265FrameSizeEXT; - -typedef struct VkVideoEncodeH265RateControlLayerInfoEXT { - VkStructureType sType; - const void* pNext; - uint8_t temporalId; - VkBool32 useInitialRcQp; - VkVideoEncodeH265QpEXT initialRcQp; - VkBool32 useMinQp; - VkVideoEncodeH265QpEXT minQp; - VkBool32 useMaxQp; - VkVideoEncodeH265QpEXT maxQp; - VkBool32 useMaxFrameSize; - VkVideoEncodeH265FrameSizeEXT maxFrameSize; -} VkVideoEncodeH265RateControlLayerInfoEXT; - - - -#define VK_EXT_video_decode_h264 1 -#include "vk_video/vulkan_video_codec_h264std_decode.h" -#define VK_EXT_VIDEO_DECODE_H264_SPEC_VERSION 6 -#define VK_EXT_VIDEO_DECODE_H264_EXTENSION_NAME "VK_EXT_video_decode_h264" - -typedef enum VkVideoDecodeH264PictureLayoutFlagBitsEXT { - VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_EXT = 0, - VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_EXT = 0x00000001, - VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_EXT = 0x00000002, - VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkVideoDecodeH264PictureLayoutFlagBitsEXT; -typedef VkFlags VkVideoDecodeH264PictureLayoutFlagsEXT; -typedef struct VkVideoDecodeH264ProfileInfoEXT { - VkStructureType sType; - const void* pNext; - StdVideoH264ProfileIdc stdProfileIdc; - VkVideoDecodeH264PictureLayoutFlagsEXT pictureLayout; -} VkVideoDecodeH264ProfileInfoEXT; - -typedef struct VkVideoDecodeH264CapabilitiesEXT { - VkStructureType sType; - void* pNext; - StdVideoH264Level maxLevel; - VkOffset2D fieldOffsetGranularity; -} VkVideoDecodeH264CapabilitiesEXT; - -typedef struct VkVideoDecodeH264SessionParametersAddInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t spsStdCount; - const StdVideoH264SequenceParameterSet* pSpsStd; - uint32_t ppsStdCount; - const StdVideoH264PictureParameterSet* pPpsStd; -} VkVideoDecodeH264SessionParametersAddInfoEXT; - -typedef struct VkVideoDecodeH264SessionParametersCreateInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t maxSpsStdCount; - uint32_t maxPpsStdCount; - const VkVideoDecodeH264SessionParametersAddInfoEXT* pParametersAddInfo; -} VkVideoDecodeH264SessionParametersCreateInfoEXT; - -typedef struct VkVideoDecodeH264PictureInfoEXT { - VkStructureType sType; - const void* pNext; - const StdVideoDecodeH264PictureInfo* pStdPictureInfo; - uint32_t slicesCount; - const uint32_t* pSlicesDataOffsets; -} VkVideoDecodeH264PictureInfoEXT; - -typedef struct VkVideoDecodeH264MvcInfoEXT { - VkStructureType sType; - const void* pNext; - const StdVideoDecodeH264Mvc* pStdMvc; -} VkVideoDecodeH264MvcInfoEXT; - -typedef struct VkVideoDecodeH264DpbSlotInfoEXT { - VkStructureType sType; - const void* pNext; - const StdVideoDecodeH264ReferenceInfo* pStdReferenceInfo; -} VkVideoDecodeH264DpbSlotInfoEXT; - - - -#define VK_EXT_video_decode_h265 1 -#include "vk_video/vulkan_video_codec_h265std_decode.h" -#define VK_EXT_VIDEO_DECODE_H265_SPEC_VERSION 4 -#define VK_EXT_VIDEO_DECODE_H265_EXTENSION_NAME "VK_EXT_video_decode_h265" -typedef struct VkVideoDecodeH265ProfileInfoEXT { - VkStructureType sType; - const void* pNext; - StdVideoH265ProfileIdc stdProfileIdc; -} VkVideoDecodeH265ProfileInfoEXT; - -typedef struct VkVideoDecodeH265CapabilitiesEXT { - VkStructureType sType; - void* pNext; - StdVideoH265Level maxLevel; -} VkVideoDecodeH265CapabilitiesEXT; - -typedef struct VkVideoDecodeH265SessionParametersAddInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t vpsStdCount; - const StdVideoH265VideoParameterSet* pVpsStd; - uint32_t spsStdCount; - const StdVideoH265SequenceParameterSet* pSpsStd; - uint32_t ppsStdCount; - const StdVideoH265PictureParameterSet* pPpsStd; -} VkVideoDecodeH265SessionParametersAddInfoEXT; - -typedef struct VkVideoDecodeH265SessionParametersCreateInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t maxVpsStdCount; - uint32_t maxSpsStdCount; - uint32_t maxPpsStdCount; - const VkVideoDecodeH265SessionParametersAddInfoEXT* pParametersAddInfo; -} VkVideoDecodeH265SessionParametersCreateInfoEXT; - -typedef struct VkVideoDecodeH265PictureInfoEXT { - VkStructureType sType; - const void* pNext; - StdVideoDecodeH265PictureInfo* pStdPictureInfo; - uint32_t slicesCount; - const uint32_t* pSlicesDataOffsets; -} VkVideoDecodeH265PictureInfoEXT; - -typedef struct VkVideoDecodeH265DpbSlotInfoEXT { - VkStructureType sType; - const void* pNext; - const StdVideoDecodeH265ReferenceInfo* pStdReferenceInfo; -} VkVideoDecodeH265DpbSlotInfoEXT; + void* pNext; + uint32_t maxDisplacementMicromapSubdivisionLevel; +} VkPhysicalDeviceDisplacementMicromapPropertiesNV; + +typedef struct VkAccelerationStructureTrianglesDisplacementMicromapNV { + VkStructureType sType; + void* pNext; + VkFormat displacementBiasAndScaleFormat; + VkFormat displacementVectorFormat; + VkDeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer; + VkDeviceSize displacementBiasAndScaleStride; + VkDeviceOrHostAddressConstKHR displacementVectorBuffer; + VkDeviceSize displacementVectorStride; + VkDeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags; + VkDeviceSize displacedMicromapPrimitiveFlagsStride; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexBuffer; + VkDeviceSize indexStride; + uint32_t baseTriangle; + uint32_t usageCountsCount; + const VkMicromapUsageEXT* pUsageCounts; + const VkMicromapUsageEXT* const* ppUsageCounts; + VkMicromapEXT micromap; +} VkAccelerationStructureTrianglesDisplacementMicromapNV; #ifdef __cplusplus diff --git a/src/libraries/vulkanheaders/vulkan_core.h b/src/libraries/vulkanheaders/vulkan_core.h index 6cc788e71..4327f1134 100644 --- a/src/libraries/vulkanheaders/vulkan_core.h +++ b/src/libraries/vulkanheaders/vulkan_core.h @@ -2,7 +2,7 @@ #define VULKAN_CORE_H_ 1 /* -** Copyright 2015-2022 The Khronos Group Inc. +** Copyright 2015-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -19,6 +19,7 @@ extern "C" { +// VK_VERSION_1_0 is a preprocessor guard. Do not pass it to API calls. #define VK_VERSION_1_0 1 #include "vk_platform.h" @@ -26,7 +27,7 @@ extern "C" { #ifndef VK_USE_64_BIT_PTR_DEFINES - #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) + #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) || (defined(__riscv) && __riscv_xlen == 64) #define VK_USE_64_BIT_PTR_DEFINES 1 #else #define VK_USE_64_BIT_PTR_DEFINES 0 @@ -58,37 +59,41 @@ extern "C" { #endif #endif -// DEPRECATED: This define is deprecated. VK_MAKE_API_VERSION should be used instead. -#define VK_MAKE_VERSION(major, minor, patch) \ - ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) +#define VK_MAKE_API_VERSION(variant, major, minor, patch) \ + ((((uint32_t)(variant)) << 29U) | (((uint32_t)(major)) << 22U) | (((uint32_t)(minor)) << 12U) | ((uint32_t)(patch))) // DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead. -//#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0 - -#define VK_MAKE_API_VERSION(variant, major, minor, patch) \ - ((((uint32_t)(variant)) << 29) | (((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) +//#define VK_API_VERSION VK_MAKE_API_VERSION(0, 1, 0, 0) // Patch version should always be set to 0 // Vulkan 1.0 version number #define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 228 +#define VK_HEADER_VERSION 307 // Complete version of this file -#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) +#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 4, VK_HEADER_VERSION) + +// VK_MAKE_VERSION is deprecated, but no reason was given in the API XML +// DEPRECATED: This define is deprecated. VK_MAKE_API_VERSION should be used instead. +#define VK_MAKE_VERSION(major, minor, patch) \ + ((((uint32_t)(major)) << 22U) | (((uint32_t)(minor)) << 12U) | ((uint32_t)(patch))) +// VK_VERSION_MAJOR is deprecated, but no reason was given in the API XML // DEPRECATED: This define is deprecated. VK_API_VERSION_MAJOR should be used instead. -#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22) +#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22U) +// VK_VERSION_MINOR is deprecated, but no reason was given in the API XML // DEPRECATED: This define is deprecated. VK_API_VERSION_MINOR should be used instead. -#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3FFU) +#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12U) & 0x3FFU) +// VK_VERSION_PATCH is deprecated, but no reason was given in the API XML // DEPRECATED: This define is deprecated. VK_API_VERSION_PATCH should be used instead. #define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) -#define VK_API_VERSION_VARIANT(version) ((uint32_t)(version) >> 29) -#define VK_API_VERSION_MAJOR(version) (((uint32_t)(version) >> 22) & 0x7FU) -#define VK_API_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3FFU) +#define VK_API_VERSION_VARIANT(version) ((uint32_t)(version) >> 29U) +#define VK_API_VERSION_MAJOR(version) (((uint32_t)(version) >> 22U) & 0x7FU) +#define VK_API_VERSION_MINOR(version) (((uint32_t)(version) >> 12U) & 0x3FFU) #define VK_API_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) typedef uint32_t VkBool32; typedef uint64_t VkDeviceAddress; @@ -161,6 +166,7 @@ typedef enum VkResult { VK_ERROR_FRAGMENTATION = -1000161000, VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS = -1000257000, VK_PIPELINE_COMPILE_REQUIRED = 1000297000, + VK_ERROR_NOT_PERMITTED = -1000174001, VK_ERROR_SURFACE_LOST_KHR = -1000000000, VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001, VK_SUBOPTIMAL_KHR = 1000001003, @@ -168,40 +174,34 @@ typedef enum VkResult { VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001, VK_ERROR_VALIDATION_FAILED_EXT = -1000011001, VK_ERROR_INVALID_SHADER_NV = -1000012000, -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR = -1000023000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR = -1000023001, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR = -1000023002, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR = -1000023003, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR = -1000023004, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR = -1000023005, -#endif VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = -1000158000, - VK_ERROR_NOT_PERMITTED_KHR = -1000174001, VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT = -1000255000, VK_THREAD_IDLE_KHR = 1000268000, VK_THREAD_DONE_KHR = 1000268001, VK_OPERATION_DEFERRED_KHR = 1000268002, VK_OPERATION_NOT_DEFERRED_KHR = 1000268003, + VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR = -1000299000, VK_ERROR_COMPRESSION_EXHAUSTED_EXT = -1000338000, + VK_INCOMPATIBLE_SHADER_BINARY_EXT = 1000482000, + VK_PIPELINE_BINARY_MISSING_KHR = 1000483000, + VK_ERROR_NOT_ENOUGH_SPACE_KHR = -1000483000, VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY, VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE, VK_ERROR_FRAGMENTATION_EXT = VK_ERROR_FRAGMENTATION, - VK_ERROR_NOT_PERMITTED_EXT = VK_ERROR_NOT_PERMITTED_KHR, + VK_ERROR_NOT_PERMITTED_EXT = VK_ERROR_NOT_PERMITTED, + VK_ERROR_NOT_PERMITTED_KHR = VK_ERROR_NOT_PERMITTED, VK_ERROR_INVALID_DEVICE_ADDRESS_EXT = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, VK_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED, VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED, + // VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT is a deprecated alias + VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT = VK_INCOMPATIBLE_SHADER_BINARY_EXT, VK_RESULT_MAX_ENUM = 0x7FFFFFFF } VkResult; @@ -422,6 +422,56 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES = 1000413001, VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS = 1000413002, VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS = 1000413003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_4_FEATURES = 55, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_4_PROPERTIES = 56, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO = 1000174000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES = 1000388000, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES = 1000388001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES = 1000416000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES = 1000528000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES = 1000544000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES = 1000259000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO = 1000259001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES = 1000259002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES = 1000525000, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO = 1000190001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES = 1000190002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES = 1000265000, + VK_STRUCTURE_TYPE_MEMORY_MAP_INFO = 1000271000, + VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO = 1000271001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES = 1000470000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES = 1000470001, + VK_STRUCTURE_TYPE_RENDERING_AREA_INFO = 1000470003, + VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO = 1000470004, + VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2 = 1000338002, + VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2 = 1000338003, + VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO = 1000470005, + VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO = 1000470006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES = 1000080000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES = 1000232000, + VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO = 1000232001, + VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO = 1000232002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES = 1000545000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES = 1000545001, + VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS = 1000545002, + VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO = 1000545003, + VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO = 1000545004, + VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO = 1000545005, + VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO = 1000545006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES = 1000466000, + VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO = 1000068000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES = 1000068001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES = 1000068002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES = 1000270000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES = 1000270001, + VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY = 1000270002, + VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY = 1000270003, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO = 1000270004, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO = 1000270005, + VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO = 1000270006, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO = 1000270007, + VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE = 1000270008, + VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY = 1000270009, VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000, VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001, VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR = 1000060007, @@ -443,66 +493,26 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT = 1000022000, VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001, VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002, -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR = 1000023000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR = 1000023001, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR = 1000023002, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR = 1000023003, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR = 1000023004, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR = 1000023005, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000023006, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR = 1000023007, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR = 1000023008, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR = 1000023009, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR = 1000023010, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR = 1000023011, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR = 1000023012, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR = 1000023013, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR = 1000023014, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR = 1000023015, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR = 1000023016, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR = 1000024000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR = 1000024001, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR = 1000024002, -#endif VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000, VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001, VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002, @@ -512,100 +522,44 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX = 1000029000, VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX = 1000029001, VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX = 1000029002, + VK_STRUCTURE_TYPE_CU_MODULE_TEXTURING_MODE_CREATE_INFO_NVX = 1000029004, VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX = 1000030000, VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX = 1000030001, -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT = 1000038000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000038001, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT = 1000038002, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT = 1000038003, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT = 1000038004, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_EXT = 1000038005, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_INFO_EXT = 1000038006, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_EXT = 1000038007, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_EXT = 1000038008, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_EXT = 1000038009, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_REFERENCE_LISTS_INFO_EXT = 1000038010, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_EXT = 1000039000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000039001, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT = 1000039002, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_VCL_FRAME_INFO_EXT = 1000039003, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_EXT = 1000039004, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_EXT = 1000039005, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_INFO_EXT = 1000039006, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_EXT = 1000039007, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_REFERENCE_LISTS_INFO_EXT = 1000039008, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_EXT = 1000039009, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_EXT = 1000039010, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_EXT = 1000040000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_EXT = 1000040001, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_MVC_INFO_EXT = 1000040002, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_EXT = 1000040003, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000040004, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT = 1000040005, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_EXT = 1000040006, -#endif + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_KHR = 1000038000, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000038001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR = 1000038002, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PICTURE_INFO_KHR = 1000038003, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_KHR = 1000038004, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_KHR = 1000038005, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_GOP_REMAINING_FRAME_INFO_KHR = 1000038006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_KHR = 1000038007, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_KHR = 1000038008, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_KHR = 1000038009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_KHR = 1000038010, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUALITY_LEVEL_PROPERTIES_KHR = 1000038011, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_GET_INFO_KHR = 1000038012, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_FEEDBACK_INFO_KHR = 1000038013, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_KHR = 1000039000, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000039001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR = 1000039002, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PICTURE_INFO_KHR = 1000039003, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_KHR = 1000039004, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_KHR = 1000039005, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_GOP_REMAINING_FRAME_INFO_KHR = 1000039006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_KHR = 1000039007, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_KHR = 1000039009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_KHR = 1000039010, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_CREATE_INFO_KHR = 1000039011, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUALITY_LEVEL_PROPERTIES_KHR = 1000039012, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_GET_INFO_KHR = 1000039013, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_FEEDBACK_INFO_KHR = 1000039014, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_KHR = 1000040000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_KHR = 1000040001, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_KHR = 1000040003, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000040004, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR = 1000040005, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR = 1000040006, VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000, - VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000044006, - VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT = 1000044007, - VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD = 1000044008, - VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX = 1000044009, VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP = 1000049000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV = 1000050000, VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000, @@ -617,9 +571,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000, VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT = 1000067000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT = 1000067001, - VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT = 1000068000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT = 1000068001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT = 1000068002, VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000, VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073001, VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR = 1000073002, @@ -634,7 +585,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR = 1000078003, VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR = 1000079000, VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR = 1000079001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = 1000080000, VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT = 1000081000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT = 1000081001, VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT = 1000081002, @@ -647,6 +597,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT = 1000091003, VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000, + VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX = 1000044009, VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000, VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001, @@ -655,6 +606,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT = 1000102000, VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT = 1000102001, VK_STRUCTURE_TYPE_HDR_METADATA_EXT = 1000105000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RELAXED_LINE_RASTERIZATION_FEATURES_IMG = 1000110000, VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR = 1000111000, VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114000, VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114001, @@ -690,6 +642,22 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129004, VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID = 1000129005, VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID = 1000129006, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX = 1000134000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX = 1000134001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX = 1000134002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX = 1000134003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX = 1000134004, +#endif + VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD = 1000044008, VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000, VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001, VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002, @@ -761,35 +729,16 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT = 1000178002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR = 1000181000, VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD = 1000183000, - VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT = 1000184000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD = 1000185000, -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_EXT = 1000187000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000187001, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT = 1000187002, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_EXT = 1000187003, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_EXT = 1000187004, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_EXT = 1000187005, -#endif - VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR = 1000174000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR = 1000388000, - VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR = 1000388001, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR = 1000187000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000187001, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR = 1000187002, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR = 1000187003, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR = 1000187004, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR = 1000187005, VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD = 1000189000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT = 1000190000, - VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT = 1000190001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT = 1000190002, VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP = 1000191000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV = 1000201000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV = 1000202000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV = 1000202001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV = 1000204000, @@ -797,6 +746,8 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV = 1000205002, VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV = 1000206000, VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV = 1000206001, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008, + VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL = 1000209000, VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL = 1000210000, VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL = 1000210001, @@ -812,14 +763,17 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT = 1000218000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT = 1000218001, VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT = 1000218002, + VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT = 1000044007, VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000226000, VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR = 1000226001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR = 1000226002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR = 1000226003, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR = 1000226004, + VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000044006, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD = 1000227000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD = 1000229000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT = 1000234000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_QUAD_CONTROL_FEATURES_KHR = 1000235000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT = 1000237000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT = 1000238000, VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT = 1000238001, @@ -844,11 +798,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT = 1000255002, VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT = 1000255001, VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT = 1000256000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT = 1000259000, - VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT = 1000259001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT = 1000259002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT = 1000260000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT = 1000265000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT = 1000267000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR = 1000269000, VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR = 1000269001, @@ -856,7 +806,19 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR = 1000269003, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR = 1000269004, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR = 1000269005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_FEATURES_EXT = 1000272000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_PROPERTIES_EXT = 1000272001, + VK_STRUCTURE_TYPE_MEMORY_MAP_PLACED_INFO_EXT = 1000272002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT = 1000273000, + VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT = 1000274000, + VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT = 1000274001, + VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT = 1000274002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT = 1000275000, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT = 1000275001, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT = 1000275002, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT = 1000275003, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT = 1000275004, + VK_STRUCTURE_TYPE_RELEASE_SWAPCHAIN_IMAGES_INFO_EXT = 1000275005, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV = 1000277000, VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV = 1000277001, VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV = 1000277002, @@ -870,6 +832,9 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT = 1000281000, VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM = 1000282000, VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM = 1000282001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_BIAS_CONTROL_FEATURES_EXT = 1000283000, + VK_STRUCTURE_TYPE_DEPTH_BIAS_INFO_EXT = 1000283001, + VK_STRUCTURE_TYPE_DEPTH_BIAS_REPRESENTATION_INFO_EXT = 1000283002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT = 1000284000, VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT = 1000284001, VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT = 1000284002, @@ -879,25 +844,30 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT = 1000287001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT = 1000287002, VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR = 1000290000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV = 1000292000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV = 1000292001, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV = 1000292002, VK_STRUCTURE_TYPE_PRESENT_ID_KHR = 1000294000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR = 1000294001, -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR = 1000299000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR = 1000299001, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR = 1000299002, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR = 1000299003, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR = 1000299004, -#endif + VK_STRUCTURE_TYPE_QUERY_POOL_VIDEO_ENCODE_FEEDBACK_CREATE_INFO_KHR = 1000299005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR = 1000299006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_PROPERTIES_KHR = 1000299007, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR = 1000299008, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_GET_INFO_KHR = 1000299009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_FEEDBACK_INFO_KHR = 1000299010, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000, VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001, + VK_STRUCTURE_TYPE_CUDA_MODULE_CREATE_INFO_NV = 1000307000, + VK_STRUCTURE_TYPE_CUDA_FUNCTION_CREATE_INFO_NV = 1000307001, + VK_STRUCTURE_TYPE_CUDA_LAUNCH_INFO_NV = 1000307002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV = 1000307003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV = 1000307004, + VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV = 1000310000, VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT = 1000311000, VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT = 1000311001, VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT = 1000311002, @@ -910,8 +880,19 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT = 1000311009, VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311010, VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311011, - VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008, - VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT = 1000316000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT = 1000316001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT = 1000316002, + VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT = 1000316003, + VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT = 1000316004, + VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316005, + VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316006, + VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316007, + VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316008, + VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT = 1000316010, + VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT = 1000316011, + VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT = 1000316012, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316009, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT = 1000320000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT = 1000320001, VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT = 1000320002, @@ -934,20 +915,24 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR = 1000336000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT = 1000338000, VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT = 1000338001, - VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT = 1000338002, - VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT = 1000338003, VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT = 1000338004, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT = 1000339000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT = 1000340000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT = 1000341000, + VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT = 1000341001, + VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT = 1000341002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT = 1000344000, VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT = 1000346000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT = 1000352000, VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT = 1000352001, VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT = 1000352002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT = 1000353000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT = 1000354000, + VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT = 1000354001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT = 1000355000, VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT = 1000355001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT = 1000356000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_EXT = 1000361000, VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364000, VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA = 1000364001, VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364002, @@ -971,6 +956,8 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV = 1000371001, VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT = 1000372000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT = 1000372001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAME_BOUNDARY_FEATURES_EXT = 1000375000, + VK_STRUCTURE_TYPE_FRAME_BOUNDARY_EXT = 1000375001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT = 1000376000, VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT = 1000376001, VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT = 1000376002, @@ -985,47 +972,270 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT = 1000392000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT = 1000392001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT = 1000393000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT = 1000395000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT = 1000395001, + VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT = 1000396000, + VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT = 1000396001, + VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT = 1000396002, + VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT = 1000396003, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT = 1000396004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT = 1000396005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT = 1000396006, + VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT = 1000396007, + VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT = 1000396008, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT = 1000396009, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_FEATURES_NV = 1000397000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_PROPERTIES_NV = 1000397001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_DISPLACEMENT_MICROMAP_NV = 1000397002, +#endif + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI = 1000404000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI = 1000404001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI = 1000404002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT = 1000411000, VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT = 1000411001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT = 1000412000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM = 1000415000, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM = 1000417000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM = 1000417001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM = 1000417002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT = 1000418000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT = 1000418001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE = 1000420000, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE = 1000420001, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE = 1000420002, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT = 1000421000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT = 1000422000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM = 1000424000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM = 1000424001, + VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_BEGIN_INFO_ARM = 1000424002, + VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_INFO_ARM = 1000424003, + VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_SUBMIT_INFO_ARM = 1000424004, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM = 1000425000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM = 1000425001, VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM = 1000425002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV = 1000426000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV = 1000426001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV = 1000427000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV = 1000427001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV = 1000428000, + VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV = 1000428001, + VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV = 1000428002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_LINEAR_SWEPT_SPHERES_FEATURES_NV = 1000429008, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_LINEAR_SWEPT_SPHERES_DATA_NV = 1000429009, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_SPHERES_DATA_NV = 1000429010, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV = 1000430000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MAXIMAL_RECONVERGENCE_FEATURES_KHR = 1000434000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT = 1000437000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM = 1000440000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM = 1000440001, VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM = 1000440002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT = 1000451000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT = 1000451001, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT = 1000453000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT = 1000455000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT = 1000455001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT = 1000458000, VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT = 1000458001, VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT = 1000458002, VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT = 1000458003, + VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG = 1000459000, + VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG = 1000459001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT = 1000462000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT = 1000462001, VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT = 1000462002, VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT = 1000462003, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT = 1000342000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV = 1000464000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV = 1000464001, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV = 1000464002, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV = 1000464003, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV = 1000464004, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV = 1000464005, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV = 1000464010, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT = 1000465000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID = 1000468000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID = 1000468001, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_RESOLVE_PROPERTIES_ANDROID = 1000468002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ANTI_LAG_FEATURES_AMD = 1000476000, + VK_STRUCTURE_TYPE_ANTI_LAG_DATA_AMD = 1000476001, + VK_STRUCTURE_TYPE_ANTI_LAG_PRESENTATION_INFO_AMD = 1000476002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR = 1000481000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT = 1000482000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT = 1000482001, + VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT = 1000482002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_FEATURES_KHR = 1000483000, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_CREATE_INFO_KHR = 1000483001, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_INFO_KHR = 1000483002, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_KEY_KHR = 1000483003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_PROPERTIES_KHR = 1000483004, + VK_STRUCTURE_TYPE_RELEASE_CAPTURED_PIPELINE_DATA_INFO_KHR = 1000483005, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_DATA_INFO_KHR = 1000483006, + VK_STRUCTURE_TYPE_PIPELINE_CREATE_INFO_KHR = 1000483007, + VK_STRUCTURE_TYPE_DEVICE_PIPELINE_BINARY_INTERNAL_CACHE_CONTROL_KHR = 1000483008, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_HANDLES_INFO_KHR = 1000483009, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM = 1000484000, VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM = 1000484001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC = 1000485000, VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC = 1000485001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM = 1000488000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV = 1000490000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV = 1000490001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_VECTOR_FEATURES_NV = 1000491000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_VECTOR_PROPERTIES_NV = 1000491001, + VK_STRUCTURE_TYPE_COOPERATIVE_VECTOR_PROPERTIES_NV = 1000491002, + VK_STRUCTURE_TYPE_CONVERT_COOPERATIVE_VECTOR_MATRIX_INFO_NV = 1000491004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_FEATURES_NV = 1000492000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_PROPERTIES_NV = 1000492001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT = 1000351000, VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT = 1000351002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_FEATURES_EXT = 1000495000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_PROPERTIES_EXT = 1000495001, + VK_STRUCTURE_TYPE_LAYER_SETTINGS_CREATE_INFO_EXT = 1000496000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM = 1000497000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM = 1000497001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_LIBRARY_GROUP_HANDLES_FEATURES_EXT = 1000498000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT = 1000499000, + VK_STRUCTURE_TYPE_LATENCY_SLEEP_MODE_INFO_NV = 1000505000, + VK_STRUCTURE_TYPE_LATENCY_SLEEP_INFO_NV = 1000505001, + VK_STRUCTURE_TYPE_SET_LATENCY_MARKER_INFO_NV = 1000505002, + VK_STRUCTURE_TYPE_GET_LATENCY_MARKER_INFO_NV = 1000505003, + VK_STRUCTURE_TYPE_LATENCY_TIMINGS_FRAME_REPORT_NV = 1000505004, + VK_STRUCTURE_TYPE_LATENCY_SUBMISSION_PRESENT_ID_NV = 1000505005, + VK_STRUCTURE_TYPE_OUT_OF_BAND_QUEUE_TYPE_INFO_NV = 1000505006, + VK_STRUCTURE_TYPE_SWAPCHAIN_LATENCY_CREATE_INFO_NV = 1000505007, + VK_STRUCTURE_TYPE_LATENCY_SURFACE_CAPABILITIES_NV = 1000505008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR = 1000506000, + VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_KHR = 1000506001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR = 1000506002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_RENDER_AREAS_FEATURES_QCOM = 1000510000, + VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM = 1000510001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR = 1000201000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_PROPERTIES_KHR = 1000511000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_CAPABILITIES_KHR = 1000512000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PICTURE_INFO_KHR = 1000512001, + VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PROFILE_INFO_KHR = 1000512003, + VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000512004, + VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_DPB_SLOT_INFO_KHR = 1000512005, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_CAPABILITIES_KHR = 1000513000, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000513001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_PICTURE_INFO_KHR = 1000513002, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_DPB_SLOT_INFO_KHR = 1000513003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_AV1_FEATURES_KHR = 1000513004, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_PROFILE_INFO_KHR = 1000513005, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_RATE_CONTROL_INFO_KHR = 1000513006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_RATE_CONTROL_LAYER_INFO_KHR = 1000513007, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUALITY_LEVEL_PROPERTIES_KHR = 1000513008, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_CREATE_INFO_KHR = 1000513009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_GOP_REMAINING_FRAME_INFO_KHR = 1000513010, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR = 1000515000, + VK_STRUCTURE_TYPE_VIDEO_INLINE_QUERY_INFO_KHR = 1000515001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV = 1000516000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_FEATURES_QCOM = 1000518000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_PROPERTIES_QCOM = 1000518001, + VK_STRUCTURE_TYPE_SAMPLER_BLOCK_MATCH_WINDOW_CREATE_INFO_QCOM = 1000518002, + VK_STRUCTURE_TYPE_SAMPLER_CUBIC_WEIGHTS_CREATE_INFO_QCOM = 1000519000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_WEIGHTS_FEATURES_QCOM = 1000519001, + VK_STRUCTURE_TYPE_BLIT_IMAGE_CUBIC_WEIGHTS_INFO_QCOM = 1000519002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_DEGAMMA_FEATURES_QCOM = 1000520000, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM = 1000520001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM = 1000521000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT = 1000524000, + VK_STRUCTURE_TYPE_SCREEN_BUFFER_PROPERTIES_QNX = 1000529000, + VK_STRUCTURE_TYPE_SCREEN_BUFFER_FORMAT_PROPERTIES_QNX = 1000529001, + VK_STRUCTURE_TYPE_IMPORT_SCREEN_BUFFER_INFO_QNX = 1000529002, + VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_QNX = 1000529003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX = 1000529004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT = 1000530000, + VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR = 1000184000, + VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT = 1000545007, + VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT = 1000545008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV = 1000546000, + VK_STRUCTURE_TYPE_DISPLAY_SURFACE_STEREO_CREATE_INFO_NV = 1000551000, + VK_STRUCTURE_TYPE_DISPLAY_MODE_STEREO_PROPERTIES_NV = 1000551001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_CAPABILITIES_KHR = 1000553000, + VK_STRUCTURE_TYPE_VIDEO_FORMAT_QUANTIZATION_MAP_PROPERTIES_KHR = 1000553001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_INFO_KHR = 1000553002, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000553005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUANTIZATION_MAP_FEATURES_KHR = 1000553009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUANTIZATION_MAP_CAPABILITIES_KHR = 1000553003, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUANTIZATION_MAP_CAPABILITIES_KHR = 1000553004, + VK_STRUCTURE_TYPE_VIDEO_FORMAT_H265_QUANTIZATION_MAP_PROPERTIES_KHR = 1000553006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUANTIZATION_MAP_CAPABILITIES_KHR = 1000553007, + VK_STRUCTURE_TYPE_VIDEO_FORMAT_AV1_QUANTIZATION_MAP_PROPERTIES_KHR = 1000553008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV = 1000555000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_RELAXED_EXTENDED_INSTRUCTION_FEATURES_KHR = 1000558000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMMAND_BUFFER_INHERITANCE_FEATURES_NV = 1000559000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_FEATURES_KHR = 1000562000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_PROPERTIES_KHR = 1000562001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_LIST_KHR = 1000562002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_KHR = 1000562003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_VULKAN_PROPERTIES_KHR = 1000562004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV = 1000563000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT = 1000564000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV = 1000568000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_ACCELERATION_STRUCTURE_FEATURES_NV = 1000569000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_ACCELERATION_STRUCTURE_PROPERTIES_NV = 1000569001, + VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_CLUSTERS_BOTTOM_LEVEL_INPUT_NV = 1000569002, + VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_TRIANGLE_CLUSTER_INPUT_NV = 1000569003, + VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_MOVE_OBJECTS_INPUT_NV = 1000569004, + VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_INPUT_INFO_NV = 1000569005, + VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_COMMANDS_INFO_NV = 1000569006, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CLUSTER_ACCELERATION_STRUCTURE_CREATE_INFO_NV = 1000569007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PARTITIONED_ACCELERATION_STRUCTURE_FEATURES_NV = 1000570000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PARTITIONED_ACCELERATION_STRUCTURE_PROPERTIES_NV = 1000570001, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_PARTITIONED_ACCELERATION_STRUCTURE_NV = 1000570002, + VK_STRUCTURE_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCES_INPUT_NV = 1000570003, + VK_STRUCTURE_TYPE_BUILD_PARTITIONED_ACCELERATION_STRUCTURE_INFO_NV = 1000570004, + VK_STRUCTURE_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_FLAGS_NV = 1000570005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_EXT = 1000572000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_EXT = 1000572001, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_EXT = 1000572002, + VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_CREATE_INFO_EXT = 1000572003, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_EXT = 1000572004, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_EXT = 1000572006, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_EXT = 1000572007, + VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_PIPELINE_EXT = 1000572008, + VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_SHADER_EXT = 1000572009, + VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_PIPELINE_INFO_EXT = 1000572010, + VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_INFO_EXT = 1000572011, + VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_LAYOUT_INFO_EXT = 1000572012, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_PIPELINE_INFO_EXT = 1000572013, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_SHADER_INFO_EXT = 1000572014, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_8_FEATURES_KHR = 1000574000, + VK_STRUCTURE_TYPE_MEMORY_BARRIER_ACCESS_FLAGS_3_KHR = 1000574002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_FEATURES_MESA = 1000575000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_PROPERTIES_MESA = 1000575001, + VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA = 1000575002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT = 1000582000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT = 1000582001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_2_FEATURES_KHR = 1000586000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_INLINE_SESSION_PARAMETERS_INFO_KHR = 1000586001, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_INLINE_SESSION_PARAMETERS_INFO_KHR = 1000586002, + VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_INLINE_SESSION_PARAMETERS_INFO_KHR = 1000586003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HDR_VIVID_FEATURES_HUAWEI = 1000590000, + VK_STRUCTURE_TYPE_HDR_VIVID_DYNAMIC_METADATA_HUAWEI = 1000590001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_FEATURES_NV = 1000593000, + VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_FLEXIBLE_DIMENSIONS_PROPERTIES_NV = 1000593001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV = 1000593002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_OPACITY_MICROMAP_FEATURES_ARM = 1000596000, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_METAL_HANDLE_INFO_EXT = 1000602000, + VK_STRUCTURE_TYPE_MEMORY_METAL_HANDLE_PROPERTIES_EXT = 1000602001, + VK_STRUCTURE_TYPE_MEMORY_GET_METAL_HANDLE_INFO_EXT = 1000602002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_KHR = 1000421000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_ROBUSTNESS_FEATURES_EXT = 1000608000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, + // VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT is a deprecated alias VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_INFO, VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO, VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES, VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO, - VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, @@ -1046,6 +1256,9 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES, + VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, @@ -1059,10 +1272,12 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + // VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT is a deprecated alias VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO, @@ -1098,6 +1313,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, + VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, @@ -1113,15 +1329,22 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, - VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, + VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES, VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES, VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES, @@ -1129,6 +1352,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, + // VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL is a deprecated alias VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES, @@ -1136,6 +1360,9 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES, VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES, + VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO, + VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT, VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT, @@ -1149,7 +1376,23 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO, VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES, + VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT = VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY, + VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT = VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO, + VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT = VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO, + VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE, + VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT = VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY, + VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO, + VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES, @@ -1179,17 +1422,47 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2, VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2, VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2, + VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2, + VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT, VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3, VK_STRUCTURE_TYPE_PIPELINE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR, - VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES, VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS, VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES, + VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_AREA_INFO, + VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO, + VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2, + VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2, + VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO, + VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO, + VK_STRUCTURE_TYPE_SHADER_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES, + VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS_KHR = VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS, + VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO_KHR = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO, + VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO_KHR = VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO, + VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO_KHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO, + VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO, VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF } VkStructureType; @@ -1216,32 +1489,23 @@ typedef enum VkImageLayout { VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL = 1000241003, VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL = 1000314000, VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL = 1000314001, + VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ = 1000232000, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR = 1000024000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR = 1000024001, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR = 1000024002, -#endif VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000, VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000, VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR = 1000164003, -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR = 1000299000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR = 1000299001, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR = 1000299002, -#endif VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT = 1000339000, + VK_IMAGE_LAYOUT_VIDEO_ENCODE_QUANTIZATION_MAP_KHR = 1000553000, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR, + VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ_KHR = VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, @@ -1286,12 +1550,8 @@ typedef enum VkObjectType { VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000, VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001, VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000, -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_OBJECT_TYPE_VIDEO_SESSION_KHR = 1000023000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR = 1000023001, -#endif VK_OBJECT_TYPE_CU_MODULE_NVX = 1000029000, VK_OBJECT_TYPE_CU_FUNCTION_NVX = 1000029001, VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000, @@ -1301,7 +1561,15 @@ typedef enum VkObjectType { VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000, VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR = 1000268000, VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV = 1000277000, + VK_OBJECT_TYPE_CUDA_MODULE_NV = 1000307000, + VK_OBJECT_TYPE_CUDA_FUNCTION_NV = 1000307001, VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA = 1000366000, + VK_OBJECT_TYPE_MICROMAP_EXT = 1000396000, + VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV = 1000464000, + VK_OBJECT_TYPE_SHADER_EXT = 1000482000, + VK_OBJECT_TYPE_PIPELINE_BINARY_KHR = 1000483000, + VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_EXT = 1000572000, + VK_OBJECT_TYPE_INDIRECT_EXECUTION_SET_EXT = 1000572001, VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT, @@ -1309,12 +1577,14 @@ typedef enum VkObjectType { } VkObjectType; typedef enum VkVendorId { + VK_VENDOR_ID_KHRONOS = 0x10000, VK_VENDOR_ID_VIV = 0x10001, VK_VENDOR_ID_VSI = 0x10002, VK_VENDOR_ID_KAZAN = 0x10003, VK_VENDOR_ID_CODEPLAY = 0x10004, VK_VENDOR_ID_MESA = 0x10005, VK_VENDOR_ID_POCL = 0x10006, + VK_VENDOR_ID_MOBILEYE = 0x10007, VK_VENDOR_ID_MAX_ENUM = 0x7FFFFFFF } VkVendorId; @@ -1572,6 +1842,8 @@ typedef enum VkFormat { VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK = 1000066011, VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK = 1000066012, VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK = 1000066013, + VK_FORMAT_A1B5G5R5_UNORM_PACK16 = 1000470000, + VK_FORMAT_A8_UNORM = 1000470001, VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000, VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001, VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002, @@ -1580,6 +1852,7 @@ typedef enum VkFormat { VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005, VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006, VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007, + VK_FORMAT_R16G16_SFIXED5_NV = 1000464000, VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK, VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK, VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK, @@ -1634,6 +1907,10 @@ typedef enum VkFormat { VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM, VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16, VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16, + // VK_FORMAT_R16G16_S10_5_NV is a deprecated alias + VK_FORMAT_R16G16_S10_5_NV = VK_FORMAT_R16G16_SFIXED5_NV, + VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR = VK_FORMAT_A1B5G5R5_UNORM_PACK16, + VK_FORMAT_A8_UNORM_KHR = VK_FORMAT_A8_UNORM, VK_FORMAT_MAX_ENUM = 0x7FFFFFFF } VkFormat; @@ -1664,22 +1941,20 @@ typedef enum VkQueryType { VK_QUERY_TYPE_OCCLUSION = 0, VK_QUERY_TYPE_PIPELINE_STATISTICS = 1, VK_QUERY_TYPE_TIMESTAMP = 2, -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR = 1000023000, -#endif VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT = 1000028004, VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR = 1000116000, VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR = 1000150000, VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR = 1000150001, VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = 1000165000, VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL = 1000210000, -#ifdef VK_ENABLE_BETA_EXTENSIONS - VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR = 1000299000, -#endif + VK_QUERY_TYPE_VIDEO_ENCODE_FEEDBACK_KHR = 1000299000, VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT = 1000328000, VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT = 1000382000, VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR = 1000386000, VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR = 1000386001, + VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT = 1000396000, + VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT = 1000396001, VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF } VkQueryType; @@ -1826,19 +2101,56 @@ typedef enum VkDynamicState { VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE = 1000377001, VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE = 1000377002, VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE = 1000377004, + VK_DYNAMIC_STATE_LINE_STIPPLE = 1000259000, VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000, VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000, + VK_DYNAMIC_STATE_DISCARD_RECTANGLE_ENABLE_EXT = 1000099001, + VK_DYNAMIC_STATE_DISCARD_RECTANGLE_MODE_EXT = 1000099002, VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT = 1000143000, VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR = 1000347000, VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV = 1000164004, VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV = 1000164006, + VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_ENABLE_NV = 1000205000, VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV = 1000205001, VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR = 1000226000, - VK_DYNAMIC_STATE_LINE_STIPPLE_EXT = 1000259000, VK_DYNAMIC_STATE_VERTEX_INPUT_EXT = 1000352000, VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT = 1000377000, VK_DYNAMIC_STATE_LOGIC_OP_EXT = 1000377003, VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT = 1000381000, + VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT = 1000455003, + VK_DYNAMIC_STATE_POLYGON_MODE_EXT = 1000455004, + VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT = 1000455005, + VK_DYNAMIC_STATE_SAMPLE_MASK_EXT = 1000455006, + VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT = 1000455007, + VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT = 1000455008, + VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT = 1000455009, + VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT = 1000455010, + VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT = 1000455011, + VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT = 1000455012, + VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT = 1000455002, + VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT = 1000455013, + VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT = 1000455014, + VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT = 1000455015, + VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT = 1000455016, + VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT = 1000455017, + VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT = 1000455018, + VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT = 1000455019, + VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT = 1000455020, + VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT = 1000455021, + VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT = 1000455022, + VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV = 1000455023, + VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV = 1000455024, + VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV = 1000455025, + VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV = 1000455026, + VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV = 1000455027, + VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV = 1000455028, + VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV = 1000455029, + VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV = 1000455030, + VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV = 1000455031, + VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV = 1000455032, + VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT = 1000524000, + VK_DYNAMIC_STATE_DEPTH_CLAMP_RANGE_EXT = 1000582000, + VK_DYNAMIC_STATE_LINE_STIPPLE_EXT = VK_DYNAMIC_STATE_LINE_STIPPLE, VK_DYNAMIC_STATE_CULL_MODE_EXT = VK_DYNAMIC_STATE_CULL_MODE, VK_DYNAMIC_STATE_FRONT_FACE_EXT = VK_DYNAMIC_STATE_FRONT_FACE, VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY, @@ -1854,6 +2166,7 @@ typedef enum VkDynamicState { VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE, VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE, VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE, + VK_DYNAMIC_STATE_LINE_STIPPLE_KHR = VK_DYNAMIC_STATE_LINE_STIPPLE, VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF } VkDynamicState; @@ -1950,6 +2263,7 @@ typedef enum VkSamplerAddressMode { VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2, VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3, VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4, + // VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR is a deprecated alias VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF } VkSamplerAddressMode; @@ -1978,6 +2292,7 @@ typedef enum VkDescriptorType { VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM = 1000440000, VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM = 1000440001, VK_DESCRIPTOR_TYPE_MUTABLE_EXT = 1000351000, + VK_DESCRIPTOR_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_NV = 1000570000, VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK, VK_DESCRIPTOR_TYPE_MUTABLE_VALVE = VK_DESCRIPTOR_TYPE_MUTABLE_EXT, VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF @@ -1987,7 +2302,9 @@ typedef enum VkAttachmentLoadOp { VK_ATTACHMENT_LOAD_OP_LOAD = 0, VK_ATTACHMENT_LOAD_OP_CLEAR = 1, VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2, - VK_ATTACHMENT_LOAD_OP_NONE_EXT = 1000400000, + VK_ATTACHMENT_LOAD_OP_NONE = 1000400000, + VK_ATTACHMENT_LOAD_OP_NONE_EXT = VK_ATTACHMENT_LOAD_OP_NONE, + VK_ATTACHMENT_LOAD_OP_NONE_KHR = VK_ATTACHMENT_LOAD_OP_NONE, VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF } VkAttachmentLoadOp; @@ -2004,6 +2321,9 @@ typedef enum VkAttachmentStoreOp { typedef enum VkPipelineBindPoint { VK_PIPELINE_BIND_POINT_GRAPHICS = 0, VK_PIPELINE_BIND_POINT_COMPUTE = 1, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_PIPELINE_BIND_POINT_EXECUTION_GRAPH_AMDX = 1000134000, +#endif VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR = 1000165000, VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI = 1000369003, VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, @@ -2019,15 +2339,19 @@ typedef enum VkCommandBufferLevel { typedef enum VkIndexType { VK_INDEX_TYPE_UINT16 = 0, VK_INDEX_TYPE_UINT32 = 1, + VK_INDEX_TYPE_UINT8 = 1000265000, VK_INDEX_TYPE_NONE_KHR = 1000165000, - VK_INDEX_TYPE_UINT8_EXT = 1000265000, VK_INDEX_TYPE_NONE_NV = VK_INDEX_TYPE_NONE_KHR, + VK_INDEX_TYPE_UINT8_EXT = VK_INDEX_TYPE_UINT8, + VK_INDEX_TYPE_UINT8_KHR = VK_INDEX_TYPE_UINT8, VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF } VkIndexType; typedef enum VkSubpassContents { VK_SUBPASS_CONTENTS_INLINE = 0, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1, + VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR = 1000451000, + VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_EXT = VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR, VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF } VkSubpassContents; @@ -2065,6 +2389,8 @@ typedef enum VkAccessFlagBits { VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, VK_ACCESS_NONE_KHR = VK_ACCESS_NONE, + VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_EXT = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, + VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_EXT = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkAccessFlagBits; typedef VkFlags VkAccessFlags; @@ -2114,22 +2440,14 @@ typedef enum VkFormatFeatureFlagBits { VK_FORMAT_FEATURE_DISJOINT_BIT = 0x00400000, VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000, -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000, -#endif VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000, VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = 0x00002000, VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000, VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000, -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000, -#endif VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, @@ -2161,9 +2479,11 @@ typedef enum VkImageCreateFlagBits { VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV = 0x00002000, VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT = 0x00001000, VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT = 0x00004000, + VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00010000, VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT = 0x00040000, VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT = 0x00020000, VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM = 0x00008000, + VK_IMAGE_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR = 0x00100000, VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, @@ -2195,31 +2515,23 @@ typedef enum VkImageUsageFlagBits { VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, -#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_IMAGE_USAGE_HOST_TRANSFER_BIT = 0x00400000, VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00000400, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00000800, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR = 0x00001000, -#endif VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200, VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00000100, -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00002000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00004000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR = 0x00008000, -#endif VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x00080000, VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI = 0x00040000, VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM = 0x00100000, VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM = 0x00200000, + VK_IMAGE_USAGE_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x02000000, + VK_IMAGE_USAGE_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR = 0x04000000, VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT = VK_IMAGE_USAGE_HOST_TRANSFER_BIT, VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkImageUsageFlagBits; typedef VkFlags VkImageUsageFlags; @@ -2258,12 +2570,9 @@ typedef enum VkQueueFlagBits { VK_QUEUE_TRANSFER_BIT = 0x00000004, VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008, VK_QUEUE_PROTECTED_BIT = 0x00000010, -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_QUEUE_VIDEO_DECODE_BIT_KHR = 0x00000020, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_QUEUE_VIDEO_ENCODE_BIT_KHR = 0x00000040, -#endif + VK_QUEUE_OPTICAL_FLOW_BIT_NV = 0x00000100, VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkQueueFlagBits; typedef VkFlags VkQueueFlags; @@ -2309,9 +2618,15 @@ typedef enum VkPipelineStageFlagBits { VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT, VK_PIPELINE_STAGE_NONE_KHR = VK_PIPELINE_STAGE_NONE, + VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_EXT = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkPipelineStageFlagBits; typedef VkFlags VkPipelineStageFlags; + +typedef enum VkMemoryMapFlagBits { + VK_MEMORY_MAP_PLACED_BIT_EXT = 0x00000001, + VK_MEMORY_MAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryMapFlagBits; typedef VkFlags VkMemoryMapFlags; typedef enum VkSparseMemoryBindFlagBits { @@ -2356,6 +2671,7 @@ typedef enum VkQueryPipelineStatisticFlagBits { VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT = 0x00000400, VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT = 0x00000800, VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT = 0x00001000, + VK_QUERY_PIPELINE_STATISTIC_CLUSTER_CULLING_SHADER_INVOCATIONS_BIT_HUAWEI = 0x00002000, VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkQueryPipelineStatisticFlagBits; typedef VkFlags VkQueryPipelineStatisticFlags; @@ -2366,9 +2682,7 @@ typedef enum VkQueryResultFlagBits { VK_QUERY_RESULT_WAIT_BIT = 0x00000002, VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004, VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008, -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_QUERY_RESULT_WITH_STATUS_BIT_KHR = 0x00000010, -#endif VK_QUERY_RESULT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkQueryResultFlagBits; typedef VkFlags VkQueryResultFlags; @@ -2379,6 +2693,8 @@ typedef enum VkBufferCreateFlagBits { VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004, VK_BUFFER_CREATE_PROTECTED_BIT = 0x00000008, VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT = 0x00000010, + VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000020, + VK_BUFFER_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR = 0x00000040, VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, VK_BUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF @@ -2396,24 +2712,24 @@ typedef enum VkBufferUsageFlagBits { VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100, VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT = 0x00020000, -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00004000, -#endif VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800, VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000, VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000, +#endif VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000, VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000, VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400, -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00008000, -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000, -#endif + VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT = 0x00200000, + VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00400000, + VK_BUFFER_USAGE_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT = 0x04000000, + VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT = 0x00800000, + VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT = 0x01000000, VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR, VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, @@ -2424,6 +2740,7 @@ typedef VkFlags VkBufferViewCreateFlags; typedef enum VkImageViewCreateFlagBits { VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT = 0x00000001, + VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000004, VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT = 0x00000002, VK_IMAGE_VIEW_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkImageViewCreateFlagBits; @@ -2432,6 +2749,7 @@ typedef VkFlags VkShaderModuleCreateFlags; typedef enum VkPipelineCacheCreateFlagBits { VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT = 0x00000001, + VK_PIPELINE_CACHE_CREATE_INTERNALLY_SYNCHRONIZED_MERGE_BIT_KHR = 0x00000008, VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT, VK_PIPELINE_CACHE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkPipelineCacheCreateFlagBits; @@ -2454,8 +2772,8 @@ typedef enum VkPipelineCreateFlagBits { VK_PIPELINE_CREATE_DISPATCH_BASE_BIT = 0x00000010, VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT = 0x00000100, VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT = 0x00000200, - VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000, - VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000, + VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT = 0x08000000, + VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT = 0x40000000, VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000, VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000, VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000, @@ -2464,22 +2782,33 @@ typedef enum VkPipelineCreateFlagBits { VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000, VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000, VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020, + VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000, + VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000, VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR = 0x00000040, VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080, VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00040000, VK_PIPELINE_CREATE_LIBRARY_BIT_KHR = 0x00000800, + VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT = 0x20000000, VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT = 0x00800000, VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT = 0x00000400, VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV = 0x00100000, VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x02000000, VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x04000000, + VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT = 0x01000000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_PIPELINE_CREATE_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV = 0x10000000, +#endif VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, - VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, - VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE, + // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT is a deprecated alias + VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR is a deprecated alias + VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT, VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT, + VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT = VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT, + VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT = VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT, VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkPipelineCreateFlagBits; typedef VkFlags VkPipelineCreateFlags; @@ -2511,6 +2840,7 @@ typedef enum VkShaderStageFlagBits { VK_SHADER_STAGE_TASK_BIT_EXT = 0x00000040, VK_SHADER_STAGE_MESH_BIT_EXT = 0x00000080, VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI = 0x00004000, + VK_SHADER_STAGE_CLUSTER_CULLING_BIT_HUAWEI = 0x00080000, VK_SHADER_STAGE_RAYGEN_BIT_NV = VK_SHADER_STAGE_RAYGEN_BIT_KHR, VK_SHADER_STAGE_ANY_HIT_BIT_NV = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, @@ -2564,6 +2894,7 @@ typedef VkFlags VkShaderStageFlags; typedef enum VkSamplerCreateFlagBits { VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT = 0x00000001, VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT = 0x00000002, + VK_SAMPLER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000008, VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT = 0x00000004, VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM = 0x00000010, VK_SAMPLER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF @@ -2574,6 +2905,8 @@ typedef enum VkDescriptorPoolCreateFlagBits { VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001, VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT = 0x00000002, VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT = 0x00000004, + VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_SETS_BIT_NV = 0x00000008, + VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_POOLS_BIT_NV = 0x00000010, VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT, VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF @@ -2583,8 +2916,13 @@ typedef VkFlags VkDescriptorPoolResetFlags; typedef enum VkDescriptorSetLayoutCreateFlagBits { VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT = 0x00000002, - VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT = 0x00000001, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00000010, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT = 0x00000020, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00000080, VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT = 0x00000004, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PER_STAGE_BIT_NV = 0x00000040, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT, VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT, VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF @@ -2602,6 +2940,7 @@ typedef enum VkDependencyFlagBits { VK_DEPENDENCY_DEVICE_GROUP_BIT = 0x00000004, VK_DEPENDENCY_VIEW_LOCAL_BIT = 0x00000002, VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT = 0x00000008, + VK_DEPENDENCY_QUEUE_FAMILY_OWNERSHIP_TRANSFER_USE_ALL_STAGES_BIT_KHR = 0x00000020, VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR = VK_DEPENDENCY_VIEW_LOCAL_BIT, VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR = VK_DEPENDENCY_DEVICE_GROUP_BIT, VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF @@ -2675,6 +3014,7 @@ typedef enum VkStencilFaceFlagBits { VK_STENCIL_FACE_FRONT_BIT = 0x00000001, VK_STENCIL_FACE_BACK_BIT = 0x00000002, VK_STENCIL_FACE_FRONT_AND_BACK = 0x00000003, + // VK_STENCIL_FRONT_AND_BACK is a deprecated alias VK_STENCIL_FRONT_AND_BACK = VK_STENCIL_FACE_FRONT_AND_BACK, VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkStencilFaceFlagBits; @@ -3085,7 +3425,9 @@ typedef struct VkDeviceCreateInfo { VkDeviceCreateFlags flags; uint32_t queueCreateInfoCount; const VkDeviceQueueCreateInfo* pQueueCreateInfos; + // enabledLayerCount is deprecated and should not be used uint32_t enabledLayerCount; + // ppEnabledLayerNames is deprecated and should not be used const char* const* ppEnabledLayerNames; uint32_t enabledExtensionCount; const char* const* ppEnabledExtensionNames; @@ -4740,6 +5082,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands( #endif +// VK_VERSION_1_1 is a preprocessor guard. Do not pass it to API calls. #define VK_VERSION_1_1 1 // Vulkan 1.1 version number #define VK_API_VERSION_1_1 VK_MAKE_API_VERSION(0, 1, 1, 0)// Patch version should always be set to 0 @@ -4798,7 +5141,8 @@ typedef enum VkChromaLocation { typedef enum VkDescriptorUpdateTemplateType { VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET = 0, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS = 1, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM = 0x7FFFFFFF } VkDescriptorUpdateTemplateType; @@ -4812,7 +5156,11 @@ typedef enum VkSubgroupFeatureFlagBits { VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT = 0x00000020, VK_SUBGROUP_FEATURE_CLUSTERED_BIT = 0x00000040, VK_SUBGROUP_FEATURE_QUAD_BIT = 0x00000080, + VK_SUBGROUP_FEATURE_ROTATE_BIT = 0x00000200, + VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT = 0x00000400, VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV = 0x00000100, + VK_SUBGROUP_FEATURE_ROTATE_BIT_KHR = VK_SUBGROUP_FEATURE_ROTATE_BIT, + VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT_KHR = VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT, VK_SUBGROUP_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkSubgroupFeatureFlagBits; typedef VkFlags VkSubgroupFeatureFlags; @@ -4857,6 +5205,10 @@ typedef enum VkExternalMemoryHandleTypeFlagBits { VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT = 0x00000100, VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA = 0x00000800, VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV = 0x00001000, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_SCREEN_BUFFER_BIT_QNX = 0x00004000, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLBUFFER_BIT_EXT = 0x00010000, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLTEXTURE_BIT_EXT = 0x00020000, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLHEAP_BIT_EXT = 0x00040000, VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, @@ -5605,6 +5957,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport( #endif +// VK_VERSION_1_2 is a preprocessor guard. Do not pass it to API calls. #define VK_VERSION_1_2 1 // Vulkan 1.2 version number #define VK_API_VERSION_1_2 VK_MAKE_API_VERSION(0, 1, 2, 0)// Patch version should always be set to 0 @@ -5636,6 +5989,10 @@ typedef enum VkDriverId { VK_DRIVER_ID_SAMSUNG_PROPRIETARY = 21, VK_DRIVER_ID_MESA_VENUS = 22, VK_DRIVER_ID_MESA_DOZEN = 23, + VK_DRIVER_ID_MESA_NVK = 24, + VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA = 25, + VK_DRIVER_ID_MESA_HONEYKRISP = 26, + VK_DRIVER_ID_VULKAN_SC_EMULATION_ON_VULKAN = 27, VK_DRIVER_ID_AMD_PROPRIETARY_KHR = VK_DRIVER_ID_AMD_PROPRIETARY, VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = VK_DRIVER_ID_AMD_OPEN_SOURCE, VK_DRIVER_ID_MESA_RADV_KHR = VK_DRIVER_ID_MESA_RADV, @@ -5665,6 +6022,7 @@ typedef enum VkSamplerReductionMode { VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE = 0, VK_SAMPLER_REDUCTION_MODE_MIN = 1, VK_SAMPLER_REDUCTION_MODE_MAX = 2, + VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_RANGECLAMP_QCOM = 1000521000, VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE, VK_SAMPLER_REDUCTION_MODE_MIN_EXT = VK_SAMPLER_REDUCTION_MODE_MIN, VK_SAMPLER_REDUCTION_MODE_MAX_EXT = VK_SAMPLER_REDUCTION_MODE_MAX, @@ -5685,6 +6043,7 @@ typedef enum VkResolveModeFlagBits { VK_RESOLVE_MODE_AVERAGE_BIT = 0x00000002, VK_RESOLVE_MODE_MIN_BIT = 0x00000004, VK_RESOLVE_MODE_MAX_BIT = 0x00000008, + VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID = 0x00000010, VK_RESOLVE_MODE_NONE_KHR = VK_RESOLVE_MODE_NONE, VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT, VK_RESOLVE_MODE_AVERAGE_BIT_KHR = VK_RESOLVE_MODE_AVERAGE_BIT, @@ -6357,6 +6716,7 @@ VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddress( #endif +// VK_VERSION_1_3 is a preprocessor guard. Do not pass it to API calls. #define VK_VERSION_1_3 1 // Vulkan 1.3 version number #define VK_API_VERSION_1_3 VK_MAKE_API_VERSION(0, 1, 3, 0)// Patch version should always be set to 0 @@ -6397,66 +6757,63 @@ typedef VkFlags64 VkPipelineStageFlags2; // Flag bits for VkPipelineStageFlagBits2 typedef VkFlags64 VkPipelineStageFlagBits2; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE = 0ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE_KHR = 0ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT = 0x00000001ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR = 0x00000001ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT = 0x00000002ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR = 0x00000002ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT = 0x00000004ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR = 0x00000004ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT = 0x00000008ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR = 0x00000008ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR = 0x00000010ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR = 0x00000020ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT = 0x00000040ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR = 0x00000040ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT = 0x00000080ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR = 0x00000080ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT = 0x00000100ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR = 0x00000100ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT = 0x00000200ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR = 0x00000200ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR = 0x00000400ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT = 0x00000800ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR = 0x00000800ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT = 0x00001000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR = 0x00001000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT = 0x00001000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR = 0x00001000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT = 0x00002000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR = 0x00002000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT = 0x00004000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT_KHR = 0x00004000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT = 0x00008000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR = 0x00008000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT = 0x00010000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR = 0x00010000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT = 0x100000000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT_KHR = 0x100000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT = 0x200000000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR = 0x200000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT = 0x400000000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT_KHR = 0x400000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT = 0x800000000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR = 0x800000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT = 0x1000000000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR = 0x1000000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT = 0x2000000000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR = 0x2000000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT = 0x4000000000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR = 0x4000000000ULL; -#ifdef VK_ENABLE_BETA_EXTENSIONS static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR = 0x04000000ULL; -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR = 0x08000000ULL; -#endif +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE_KHR = 0ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR = 0x00000001ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR = 0x00000002ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR = 0x00000004ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR = 0x00000008ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR = 0x00000010ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR = 0x00000020ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR = 0x00000040ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR = 0x00000080ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR = 0x00000100ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR = 0x00000200ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR = 0x00000400ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR = 0x00000800ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR = 0x00002000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT_KHR = 0x00004000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR = 0x00008000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR = 0x00010000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT_KHR = 0x100000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR = 0x200000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT_KHR = 0x400000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR = 0x800000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR = 0x1000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR = 0x2000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR = 0x4000000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV = 0x00020000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_EXT = 0x00020000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV = 0x00400000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000ULL; @@ -6468,74 +6825,74 @@ static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV = 0 static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV = 0x00100000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT = 0x00080000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT = 0x00100000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI = 0x8000000000ULL; +// VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI is a deprecated alias static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI = 0x8000000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI = 0x10000000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR = 0x10000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT = 0x40000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLUSTER_CULLING_SHADER_BIT_HUAWEI = 0x20000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV = 0x20000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CONVERT_COOPERATIVE_VECTOR_MATRIX_BIT_NV = 0x100000000000ULL; typedef VkFlags64 VkAccessFlags2; // Flag bits for VkAccessFlagBits2 typedef VkFlags64 VkAccessFlagBits2; static const VkAccessFlagBits2 VK_ACCESS_2_NONE = 0ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_NONE_KHR = 0ULL; static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT = 0x00000001ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR = 0x00000001ULL; static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT = 0x00000002ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT_KHR = 0x00000002ULL; static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR = 0x00000004ULL; static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT = 0x00000008ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT_KHR = 0x00000008ULL; static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT = 0x00000010ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR = 0x00000010ULL; static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT = 0x00000020ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT_KHR = 0x00000020ULL; static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT = 0x00000040ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT_KHR = 0x00000040ULL; static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT = 0x00000080ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR = 0x00000080ULL; static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR = 0x00000100ULL; static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR = 0x00000200ULL; static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR = 0x00000400ULL; static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT = 0x00000800ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT_KHR = 0x00000800ULL; static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT = 0x00001000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR = 0x00001000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT = 0x00002000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT_KHR = 0x00002000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT = 0x00004000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT_KHR = 0x00004000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT = 0x00008000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT_KHR = 0x00008000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT = 0x00010000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT_KHR = 0x00010000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT = 0x100000000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR = 0x100000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT = 0x200000000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR = 0x200000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT = 0x400000000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR = 0x400000000ULL; -#ifdef VK_ENABLE_BETA_EXTENSIONS static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR = 0x800000000ULL; -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR = 0x1000000000ULL; -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR = 0x2000000000ULL; -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR = 0x4000000000ULL; -#endif +static const VkAccessFlagBits2 VK_ACCESS_2_NONE_KHR = 0ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR = 0x00000001ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT_KHR = 0x00000002ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR = 0x00000004ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT_KHR = 0x00000008ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR = 0x00000010ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT_KHR = 0x00000020ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT_KHR = 0x00000040ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR = 0x00000080ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR = 0x00000100ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR = 0x00000200ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR = 0x00000400ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT_KHR = 0x00000800ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR = 0x00001000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT_KHR = 0x00002000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT_KHR = 0x00004000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT_KHR = 0x00008000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT_KHR = 0x00010000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR = 0x100000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR = 0x200000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR = 0x400000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_EXT = 0x00020000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_EXT = 0x00040000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000ULL; @@ -6544,8 +6901,13 @@ static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV = static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 0x00400000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DESCRIPTOR_BUFFER_READ_BIT_EXT = 0x20000000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI = 0x8000000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR = 0x10000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MICROMAP_READ_BIT_EXT = 0x100000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT = 0x200000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV = 0x40000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV = 0x80000000000ULL; typedef enum VkSubmitFlagBits { @@ -6560,9 +6922,11 @@ typedef enum VkRenderingFlagBits { VK_RENDERING_SUSPENDING_BIT = 0x00000002, VK_RENDERING_RESUMING_BIT = 0x00000004, VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x00000008, + VK_RENDERING_CONTENTS_INLINE_BIT_KHR = 0x00000010, VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT_KHR = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT, VK_RENDERING_SUSPENDING_BIT_KHR = VK_RENDERING_SUSPENDING_BIT, VK_RENDERING_RESUMING_BIT_KHR = VK_RENDERING_RESUMING_BIT, + VK_RENDERING_CONTENTS_INLINE_BIT_EXT = VK_RENDERING_CONTENTS_INLINE_BIT_KHR, VK_RENDERING_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkRenderingFlagBits; typedef VkFlags VkRenderingFlags; @@ -6571,79 +6935,79 @@ typedef VkFlags64 VkFormatFeatureFlags2; // Flag bits for VkFormatFeatureFlagBits2 typedef VkFlags64 VkFormatFeatureFlagBits2; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT = 0x00000001ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT_KHR = 0x00000001ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT = 0x00000002ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT_KHR = 0x00000002ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT_KHR = 0x00000004ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT = 0x00000010ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000010ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT_KHR = 0x00000020ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT = 0x00000040ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT_KHR = 0x00000040ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT = 0x00000080ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT_KHR = 0x00000080ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT_KHR = 0x00000100ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT_KHR = 0x00000200ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT = 0x00000400ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT_KHR = 0x00000400ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT = 0x00000800ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT_KHR = 0x00000800ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT_KHR = 0x00001000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT = 0x00002000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = 0x00002000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT = 0x00004000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT_KHR = 0x00004000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT = 0x00008000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT_KHR = 0x00008000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT_KHR = 0x00010000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = 0x00020000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = 0x00040000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = 0x00080000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = 0x00100000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = 0x00200000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT = 0x00400000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT_KHR = 0x00400000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT = 0x00800000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT_KHR = 0x00800000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT = 0x80000000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT_KHR = 0x80000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT = 0x100000000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT_KHR = 0x100000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT = 0x200000000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT_KHR = 0x200000000ULL; -#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT = 0x00002000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT = 0x400000000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000ULL; -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000ULL; -#endif static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000ULL; -#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT = 0x400000000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000ULL; -#endif -#ifdef VK_ENABLE_BETA_EXTENSIONS static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000ULL; -#endif +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT_KHR = 0x00000001ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT_KHR = 0x00000002ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT_KHR = 0x00000004ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000010ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT_KHR = 0x00000020ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT_KHR = 0x00000040ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT_KHR = 0x00000080ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT_KHR = 0x00000100ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT_KHR = 0x00000200ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT_KHR = 0x00000400ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT_KHR = 0x00000800ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT_KHR = 0x00001000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT_KHR = 0x00004000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT_KHR = 0x00008000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = 0x00020000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = 0x00040000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = 0x00080000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = 0x00100000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = 0x00200000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT_KHR = 0x00400000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT_KHR = 0x00800000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT_KHR = 0x80000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT_KHR = 0x100000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT_KHR = 0x200000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT_KHR = 0x00010000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = 0x00002000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_RADIUS_BUFFER_BIT_NV = 0x8000000000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV = 0x4000000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM = 0x400000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM = 0x800000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM = 0x1000000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM = 0x2000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV = 0x10000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV = 0x20000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV = 0x40000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x2000000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR = 0x4000000000000ULL; typedef struct VkPhysicalDeviceVulkan13Features { VkStructureType sType; @@ -7397,7219 +7761,13594 @@ VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirements( #endif -#define VK_KHR_surface 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) -#define VK_KHR_SURFACE_SPEC_VERSION 25 -#define VK_KHR_SURFACE_EXTENSION_NAME "VK_KHR_surface" +// VK_VERSION_1_4 is a preprocessor guard. Do not pass it to API calls. +#define VK_VERSION_1_4 1 +// Vulkan 1.4 version number +#define VK_API_VERSION_1_4 VK_MAKE_API_VERSION(0, 1, 4, 0)// Patch version should always be set to 0 + +#define VK_MAX_GLOBAL_PRIORITY_SIZE 16U + +typedef enum VkPipelineRobustnessBufferBehavior { + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT = 0, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED = 1, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS = 2, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2 = 3, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF +} VkPipelineRobustnessBufferBehavior; + +typedef enum VkPipelineRobustnessImageBehavior { + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT = 0, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED = 1, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS = 2, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2 = 3, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF +} VkPipelineRobustnessImageBehavior; + +typedef enum VkQueueGlobalPriority { + VK_QUEUE_GLOBAL_PRIORITY_LOW = 128, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM = 256, + VK_QUEUE_GLOBAL_PRIORITY_HIGH = 512, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME = 1024, + VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM, + VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = VK_QUEUE_GLOBAL_PRIORITY_HIGH, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME, + VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR = VK_QUEUE_GLOBAL_PRIORITY_LOW, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM, + VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR = VK_QUEUE_GLOBAL_PRIORITY_HIGH, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR = VK_QUEUE_GLOBAL_PRIORITY_REALTIME, + VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM = 0x7FFFFFFF +} VkQueueGlobalPriority; + +typedef enum VkLineRasterizationMode { + VK_LINE_RASTERIZATION_MODE_DEFAULT = 0, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR = 1, + VK_LINE_RASTERIZATION_MODE_BRESENHAM = 2, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH = 3, + VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT = VK_LINE_RASTERIZATION_MODE_DEFAULT, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT = VK_LINE_RASTERIZATION_MODE_RECTANGULAR, + VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT = VK_LINE_RASTERIZATION_MODE_BRESENHAM, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH, + VK_LINE_RASTERIZATION_MODE_DEFAULT_KHR = VK_LINE_RASTERIZATION_MODE_DEFAULT, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_KHR = VK_LINE_RASTERIZATION_MODE_RECTANGULAR, + VK_LINE_RASTERIZATION_MODE_BRESENHAM_KHR = VK_LINE_RASTERIZATION_MODE_BRESENHAM, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_KHR = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH, + VK_LINE_RASTERIZATION_MODE_MAX_ENUM = 0x7FFFFFFF +} VkLineRasterizationMode; + +typedef enum VkMemoryUnmapFlagBits { + VK_MEMORY_UNMAP_RESERVE_BIT_EXT = 0x00000001, + VK_MEMORY_UNMAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryUnmapFlagBits; +typedef VkFlags VkMemoryUnmapFlags; +typedef VkFlags64 VkPipelineCreateFlags2; + +// Flag bits for VkPipelineCreateFlagBits2 +typedef VkFlags64 VkPipelineCreateFlagBits2; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT = 0x00000001ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT = 0x00000002ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DERIVATIVE_BIT = 0x00000004ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT = 0x00000010ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT = 0x00000100ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT = 0x00000200ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT = 0x08000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT = 0x40000000ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX = 0x100000000ULL; +#endif +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_BUILT_IN_PRIMITIVES_BIT_KHR = 0x00001000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_SPHERES_AND_LINEAR_SWEPT_SPHERES_BIT_NV = 0x200000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x400000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR = 0x00000001ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR = 0x00000002ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR = 0x00000004ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = 0x00000008ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR = 0x00000010ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV = 0x00000020ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR = 0x00000040ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_KHR = 0x00000100ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT_KHR = 0x00000200ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_EXT = 0x00000400ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT = 0x00800000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR = 0x00000800ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_NV = 0x00040000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_NV = 0x00100000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT = 0x01000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x02000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x04000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_EXT = 0x08000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV = 0x10000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT = 0x20000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DISALLOW_OPACITY_MICROMAP_BIT_ARM = 0x2000000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR = 0x80000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_EXT = 0x4000000000ULL; + +typedef VkFlags64 VkBufferUsageFlags2; + +// Flag bits for VkBufferUsageFlagBits2 +typedef VkFlags64 VkBufferUsageFlagBits2; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT = 0x00000001ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFER_DST_BIT = 0x00000002ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT = 0x00000008ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT = 0x00000010ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT = 0x00000020ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT = 0x00000040ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT = 0x00000080ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT = 0x00000100ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT = 0x00020000ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000ULL; +#endif +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT_KHR = 0x00000001ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFER_DST_BIT_KHR = 0x00000002ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000004ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT_KHR = 0x00000010ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR = 0x00000020ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR = 0x00000040ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR = 0x00000080ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR = 0x00000100ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_RAY_TRACING_BIT_NV = 0x00000400ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR = 0x00004000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR = 0x00008000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT_KHR = 0x00020000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT = 0x00200000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00400000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT = 0x04000000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT = 0x00800000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT = 0x01000000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT = 0x80000000ULL; + + +typedef enum VkHostImageCopyFlagBits { + VK_HOST_IMAGE_COPY_MEMCPY = 0x00000001, + VK_HOST_IMAGE_COPY_MEMCPY_EXT = VK_HOST_IMAGE_COPY_MEMCPY, + VK_HOST_IMAGE_COPY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkHostImageCopyFlagBits; +typedef VkFlags VkHostImageCopyFlags; +typedef struct VkPhysicalDeviceVulkan14Features { + VkStructureType sType; + void* pNext; + VkBool32 globalPriorityQuery; + VkBool32 shaderSubgroupRotate; + VkBool32 shaderSubgroupRotateClustered; + VkBool32 shaderFloatControls2; + VkBool32 shaderExpectAssume; + VkBool32 rectangularLines; + VkBool32 bresenhamLines; + VkBool32 smoothLines; + VkBool32 stippledRectangularLines; + VkBool32 stippledBresenhamLines; + VkBool32 stippledSmoothLines; + VkBool32 vertexAttributeInstanceRateDivisor; + VkBool32 vertexAttributeInstanceRateZeroDivisor; + VkBool32 indexTypeUint8; + VkBool32 dynamicRenderingLocalRead; + VkBool32 maintenance5; + VkBool32 maintenance6; + VkBool32 pipelineProtectedAccess; + VkBool32 pipelineRobustness; + VkBool32 hostImageCopy; + VkBool32 pushDescriptor; +} VkPhysicalDeviceVulkan14Features; -typedef enum VkPresentModeKHR { - VK_PRESENT_MODE_IMMEDIATE_KHR = 0, - VK_PRESENT_MODE_MAILBOX_KHR = 1, - VK_PRESENT_MODE_FIFO_KHR = 2, - VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, - VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, - VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, - VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkPresentModeKHR; +typedef struct VkPhysicalDeviceVulkan14Properties { + VkStructureType sType; + void* pNext; + uint32_t lineSubPixelPrecisionBits; + uint32_t maxVertexAttribDivisor; + VkBool32 supportsNonZeroFirstInstance; + uint32_t maxPushDescriptors; + VkBool32 dynamicRenderingLocalReadDepthStencilAttachments; + VkBool32 dynamicRenderingLocalReadMultisampledAttachments; + VkBool32 earlyFragmentMultisampleCoverageAfterSampleCounting; + VkBool32 earlyFragmentSampleMaskTestBeforeSampleCounting; + VkBool32 depthStencilSwizzleOneSupport; + VkBool32 polygonModePointSize; + VkBool32 nonStrictSinglePixelWideLinesUseParallelogram; + VkBool32 nonStrictWideLinesUseParallelogram; + VkBool32 blockTexelViewCompatibleMultipleLayers; + uint32_t maxCombinedImageSamplerDescriptorCount; + VkBool32 fragmentShadingRateClampCombinerInputs; + VkPipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers; + VkPipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers; + VkPipelineRobustnessBufferBehavior defaultRobustnessVertexInputs; + VkPipelineRobustnessImageBehavior defaultRobustnessImages; + uint32_t copySrcLayoutCount; + VkImageLayout* pCopySrcLayouts; + uint32_t copyDstLayoutCount; + VkImageLayout* pCopyDstLayouts; + uint8_t optimalTilingLayoutUUID[VK_UUID_SIZE]; + VkBool32 identicalMemoryTypeRequirements; +} VkPhysicalDeviceVulkan14Properties; + +typedef struct VkDeviceQueueGlobalPriorityCreateInfo { + VkStructureType sType; + const void* pNext; + VkQueueGlobalPriority globalPriority; +} VkDeviceQueueGlobalPriorityCreateInfo; -typedef enum VkColorSpaceKHR { - VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0, - VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001, - VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002, - VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT = 1000104003, - VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004, - VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005, - VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006, - VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007, - VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008, - VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009, - VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010, - VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011, - VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT = 1000104012, - VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013, - VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014, - VK_COLOR_SPACE_DISPLAY_NATIVE_AMD = 1000213000, - VK_COLORSPACE_SRGB_NONLINEAR_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, - VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, - VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkColorSpaceKHR; +typedef struct VkPhysicalDeviceGlobalPriorityQueryFeatures { + VkStructureType sType; + void* pNext; + VkBool32 globalPriorityQuery; +} VkPhysicalDeviceGlobalPriorityQueryFeatures; -typedef enum VkSurfaceTransformFlagBitsKHR { - VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001, - VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002, - VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004, - VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080, - VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100, - VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkSurfaceTransformFlagBitsKHR; +typedef struct VkQueueFamilyGlobalPriorityProperties { + VkStructureType sType; + void* pNext; + uint32_t priorityCount; + VkQueueGlobalPriority priorities[VK_MAX_GLOBAL_PRIORITY_SIZE]; +} VkQueueFamilyGlobalPriorityProperties; -typedef enum VkCompositeAlphaFlagBitsKHR { - VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, - VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002, - VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004, - VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008, - VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkCompositeAlphaFlagBitsKHR; -typedef VkFlags VkCompositeAlphaFlagsKHR; -typedef VkFlags VkSurfaceTransformFlagsKHR; -typedef struct VkSurfaceCapabilitiesKHR { - uint32_t minImageCount; - uint32_t maxImageCount; - VkExtent2D currentExtent; - VkExtent2D minImageExtent; - VkExtent2D maxImageExtent; - uint32_t maxImageArrayLayers; - VkSurfaceTransformFlagsKHR supportedTransforms; - VkSurfaceTransformFlagBitsKHR currentTransform; - VkCompositeAlphaFlagsKHR supportedCompositeAlpha; - VkImageUsageFlags supportedUsageFlags; -} VkSurfaceCapabilitiesKHR; +typedef struct VkPhysicalDeviceShaderSubgroupRotateFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupRotate; + VkBool32 shaderSubgroupRotateClustered; +} VkPhysicalDeviceShaderSubgroupRotateFeatures; -typedef struct VkSurfaceFormatKHR { - VkFormat format; - VkColorSpaceKHR colorSpace; -} VkSurfaceFormatKHR; +typedef struct VkPhysicalDeviceShaderFloatControls2Features { + VkStructureType sType; + void* pNext; + VkBool32 shaderFloatControls2; +} VkPhysicalDeviceShaderFloatControls2Features; -typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); +typedef struct VkPhysicalDeviceShaderExpectAssumeFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderExpectAssume; +} VkPhysicalDeviceShaderExpectAssumeFeatures; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR( - VkInstance instance, - VkSurfaceKHR surface, - const VkAllocationCallbacks* pAllocator); +typedef struct VkPhysicalDeviceLineRasterizationFeatures { + VkStructureType sType; + void* pNext; + VkBool32 rectangularLines; + VkBool32 bresenhamLines; + VkBool32 smoothLines; + VkBool32 stippledRectangularLines; + VkBool32 stippledBresenhamLines; + VkBool32 stippledSmoothLines; +} VkPhysicalDeviceLineRasterizationFeatures; -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR( - VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - VkSurfaceKHR surface, - VkBool32* pSupported); +typedef struct VkPhysicalDeviceLineRasterizationProperties { + VkStructureType sType; + void* pNext; + uint32_t lineSubPixelPrecisionBits; +} VkPhysicalDeviceLineRasterizationProperties; -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); +typedef struct VkPipelineRasterizationLineStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkLineRasterizationMode lineRasterizationMode; + VkBool32 stippledLineEnable; + uint32_t lineStippleFactor; + uint16_t lineStipplePattern; +} VkPipelineRasterizationLineStateCreateInfo; -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t* pSurfaceFormatCount, - VkSurfaceFormatKHR* pSurfaceFormats); +typedef struct VkPhysicalDeviceVertexAttributeDivisorProperties { + VkStructureType sType; + void* pNext; + uint32_t maxVertexAttribDivisor; + VkBool32 supportsNonZeroFirstInstance; +} VkPhysicalDeviceVertexAttributeDivisorProperties; -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t* pPresentModeCount, - VkPresentModeKHR* pPresentModes); -#endif +typedef struct VkVertexInputBindingDivisorDescription { + uint32_t binding; + uint32_t divisor; +} VkVertexInputBindingDivisorDescription; +typedef struct VkPipelineVertexInputDivisorStateCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t vertexBindingDivisorCount; + const VkVertexInputBindingDivisorDescription* pVertexBindingDivisors; +} VkPipelineVertexInputDivisorStateCreateInfo; -#define VK_KHR_swapchain 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) -#define VK_KHR_SWAPCHAIN_SPEC_VERSION 70 -#define VK_KHR_SWAPCHAIN_EXTENSION_NAME "VK_KHR_swapchain" +typedef struct VkPhysicalDeviceVertexAttributeDivisorFeatures { + VkStructureType sType; + void* pNext; + VkBool32 vertexAttributeInstanceRateDivisor; + VkBool32 vertexAttributeInstanceRateZeroDivisor; +} VkPhysicalDeviceVertexAttributeDivisorFeatures; -typedef enum VkSwapchainCreateFlagBitsKHR { - VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = 0x00000001, - VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR = 0x00000002, - VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR = 0x00000004, - VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkSwapchainCreateFlagBitsKHR; -typedef VkFlags VkSwapchainCreateFlagsKHR; +typedef struct VkPhysicalDeviceIndexTypeUint8Features { + VkStructureType sType; + void* pNext; + VkBool32 indexTypeUint8; +} VkPhysicalDeviceIndexTypeUint8Features; -typedef enum VkDeviceGroupPresentModeFlagBitsKHR { - VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR = 0x00000001, - VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR = 0x00000002, - VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR = 0x00000004, - VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR = 0x00000008, - VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkDeviceGroupPresentModeFlagBitsKHR; -typedef VkFlags VkDeviceGroupPresentModeFlagsKHR; -typedef struct VkSwapchainCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkSwapchainCreateFlagsKHR flags; - VkSurfaceKHR surface; - uint32_t minImageCount; - VkFormat imageFormat; - VkColorSpaceKHR imageColorSpace; - VkExtent2D imageExtent; - uint32_t imageArrayLayers; - VkImageUsageFlags imageUsage; - VkSharingMode imageSharingMode; - uint32_t queueFamilyIndexCount; - const uint32_t* pQueueFamilyIndices; - VkSurfaceTransformFlagBitsKHR preTransform; - VkCompositeAlphaFlagBitsKHR compositeAlpha; - VkPresentModeKHR presentMode; - VkBool32 clipped; - VkSwapchainKHR oldSwapchain; -} VkSwapchainCreateInfoKHR; +typedef struct VkMemoryMapInfo { + VkStructureType sType; + const void* pNext; + VkMemoryMapFlags flags; + VkDeviceMemory memory; + VkDeviceSize offset; + VkDeviceSize size; +} VkMemoryMapInfo; -typedef struct VkPresentInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t waitSemaphoreCount; - const VkSemaphore* pWaitSemaphores; - uint32_t swapchainCount; - const VkSwapchainKHR* pSwapchains; - const uint32_t* pImageIndices; - VkResult* pResults; -} VkPresentInfoKHR; +typedef struct VkMemoryUnmapInfo { + VkStructureType sType; + const void* pNext; + VkMemoryUnmapFlags flags; + VkDeviceMemory memory; +} VkMemoryUnmapInfo; -typedef struct VkImageSwapchainCreateInfoKHR { +typedef struct VkPhysicalDeviceMaintenance5Features { VkStructureType sType; - const void* pNext; - VkSwapchainKHR swapchain; -} VkImageSwapchainCreateInfoKHR; + void* pNext; + VkBool32 maintenance5; +} VkPhysicalDeviceMaintenance5Features; -typedef struct VkBindImageMemorySwapchainInfoKHR { +typedef struct VkPhysicalDeviceMaintenance5Properties { VkStructureType sType; - const void* pNext; - VkSwapchainKHR swapchain; - uint32_t imageIndex; -} VkBindImageMemorySwapchainInfoKHR; + void* pNext; + VkBool32 earlyFragmentMultisampleCoverageAfterSampleCounting; + VkBool32 earlyFragmentSampleMaskTestBeforeSampleCounting; + VkBool32 depthStencilSwizzleOneSupport; + VkBool32 polygonModePointSize; + VkBool32 nonStrictSinglePixelWideLinesUseParallelogram; + VkBool32 nonStrictWideLinesUseParallelogram; +} VkPhysicalDeviceMaintenance5Properties; -typedef struct VkAcquireNextImageInfoKHR { +typedef struct VkRenderingAreaInfo { VkStructureType sType; const void* pNext; - VkSwapchainKHR swapchain; - uint64_t timeout; - VkSemaphore semaphore; - VkFence fence; - uint32_t deviceMask; -} VkAcquireNextImageInfoKHR; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkFormat* pColorAttachmentFormats; + VkFormat depthAttachmentFormat; + VkFormat stencilAttachmentFormat; +} VkRenderingAreaInfo; -typedef struct VkDeviceGroupPresentCapabilitiesKHR { - VkStructureType sType; - void* pNext; - uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE]; - VkDeviceGroupPresentModeFlagsKHR modes; -} VkDeviceGroupPresentCapabilitiesKHR; +typedef struct VkImageSubresource2 { + VkStructureType sType; + void* pNext; + VkImageSubresource imageSubresource; +} VkImageSubresource2; -typedef struct VkDeviceGroupPresentInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t swapchainCount; - const uint32_t* pDeviceMasks; - VkDeviceGroupPresentModeFlagBitsKHR mode; -} VkDeviceGroupPresentInfoKHR; +typedef struct VkDeviceImageSubresourceInfo { + VkStructureType sType; + const void* pNext; + const VkImageCreateInfo* pCreateInfo; + const VkImageSubresource2* pSubresource; +} VkDeviceImageSubresourceInfo; -typedef struct VkDeviceGroupSwapchainCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkDeviceGroupPresentModeFlagsKHR modes; -} VkDeviceGroupSwapchainCreateInfoKHR; +typedef struct VkSubresourceLayout2 { + VkStructureType sType; + void* pNext; + VkSubresourceLayout subresourceLayout; +} VkSubresourceLayout2; -typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain); -typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages); -typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex); -typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHR)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); -typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHR)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects); -typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImage2KHR)(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex); +typedef struct VkPipelineCreateFlags2CreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags2 flags; +} VkPipelineCreateFlags2CreateInfo; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR( - VkDevice device, - const VkSwapchainCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSwapchainKHR* pSwapchain); +typedef struct VkBufferUsageFlags2CreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferUsageFlags2 usage; +} VkBufferUsageFlags2CreateInfo; -VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR( - VkDevice device, - VkSwapchainKHR swapchain, - const VkAllocationCallbacks* pAllocator); +typedef struct VkPhysicalDevicePushDescriptorProperties { + VkStructureType sType; + void* pNext; + uint32_t maxPushDescriptors; +} VkPhysicalDevicePushDescriptorProperties; -VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR( - VkDevice device, - VkSwapchainKHR swapchain, - uint32_t* pSwapchainImageCount, - VkImage* pSwapchainImages); +typedef struct VkPhysicalDeviceDynamicRenderingLocalReadFeatures { + VkStructureType sType; + void* pNext; + VkBool32 dynamicRenderingLocalRead; +} VkPhysicalDeviceDynamicRenderingLocalReadFeatures; -VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR( - VkDevice device, - VkSwapchainKHR swapchain, - uint64_t timeout, - VkSemaphore semaphore, - VkFence fence, - uint32_t* pImageIndex); +typedef struct VkRenderingAttachmentLocationInfo { + VkStructureType sType; + const void* pNext; + uint32_t colorAttachmentCount; + const uint32_t* pColorAttachmentLocations; +} VkRenderingAttachmentLocationInfo; -VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR( - VkQueue queue, - const VkPresentInfoKHR* pPresentInfo); +typedef struct VkRenderingInputAttachmentIndexInfo { + VkStructureType sType; + const void* pNext; + uint32_t colorAttachmentCount; + const uint32_t* pColorAttachmentInputIndices; + const uint32_t* pDepthInputAttachmentIndex; + const uint32_t* pStencilInputAttachmentIndex; +} VkRenderingInputAttachmentIndexInfo; -VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR( - VkDevice device, - VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); +typedef struct VkPhysicalDeviceMaintenance6Features { + VkStructureType sType; + void* pNext; + VkBool32 maintenance6; +} VkPhysicalDeviceMaintenance6Features; -VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR( - VkDevice device, - VkSurfaceKHR surface, - VkDeviceGroupPresentModeFlagsKHR* pModes); +typedef struct VkPhysicalDeviceMaintenance6Properties { + VkStructureType sType; + void* pNext; + VkBool32 blockTexelViewCompatibleMultipleLayers; + uint32_t maxCombinedImageSamplerDescriptorCount; + VkBool32 fragmentShadingRateClampCombinerInputs; +} VkPhysicalDeviceMaintenance6Properties; -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t* pRectCount, - VkRect2D* pRects); +typedef struct VkBindMemoryStatus { + VkStructureType sType; + const void* pNext; + VkResult* pResult; +} VkBindMemoryStatus; -VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR( - VkDevice device, - const VkAcquireNextImageInfoKHR* pAcquireInfo, - uint32_t* pImageIndex); -#endif +typedef struct VkBindDescriptorSetsInfo { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t firstSet; + uint32_t descriptorSetCount; + const VkDescriptorSet* pDescriptorSets; + uint32_t dynamicOffsetCount; + const uint32_t* pDynamicOffsets; +} VkBindDescriptorSetsInfo; + +typedef struct VkPushConstantsInfo { + VkStructureType sType; + const void* pNext; + VkPipelineLayout layout; + VkShaderStageFlags stageFlags; + uint32_t offset; + uint32_t size; + const void* pValues; +} VkPushConstantsInfo; +typedef struct VkPushDescriptorSetInfo { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t set; + uint32_t descriptorWriteCount; + const VkWriteDescriptorSet* pDescriptorWrites; +} VkPushDescriptorSetInfo; + +typedef struct VkPushDescriptorSetWithTemplateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorUpdateTemplate descriptorUpdateTemplate; + VkPipelineLayout layout; + uint32_t set; + const void* pData; +} VkPushDescriptorSetWithTemplateInfo; -#define VK_KHR_display 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) -#define VK_KHR_DISPLAY_SPEC_VERSION 23 -#define VK_KHR_DISPLAY_EXTENSION_NAME "VK_KHR_display" -typedef VkFlags VkDisplayModeCreateFlagsKHR; +typedef struct VkPhysicalDevicePipelineProtectedAccessFeatures { + VkStructureType sType; + void* pNext; + VkBool32 pipelineProtectedAccess; +} VkPhysicalDevicePipelineProtectedAccessFeatures; -typedef enum VkDisplayPlaneAlphaFlagBitsKHR { - VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, - VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002, - VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004, - VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008, - VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkDisplayPlaneAlphaFlagBitsKHR; -typedef VkFlags VkDisplayPlaneAlphaFlagsKHR; -typedef VkFlags VkDisplaySurfaceCreateFlagsKHR; -typedef struct VkDisplayModeParametersKHR { - VkExtent2D visibleRegion; - uint32_t refreshRate; -} VkDisplayModeParametersKHR; - -typedef struct VkDisplayModeCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkDisplayModeCreateFlagsKHR flags; - VkDisplayModeParametersKHR parameters; -} VkDisplayModeCreateInfoKHR; - -typedef struct VkDisplayModePropertiesKHR { - VkDisplayModeKHR displayMode; - VkDisplayModeParametersKHR parameters; -} VkDisplayModePropertiesKHR; - -typedef struct VkDisplayPlaneCapabilitiesKHR { - VkDisplayPlaneAlphaFlagsKHR supportedAlpha; - VkOffset2D minSrcPosition; - VkOffset2D maxSrcPosition; - VkExtent2D minSrcExtent; - VkExtent2D maxSrcExtent; - VkOffset2D minDstPosition; - VkOffset2D maxDstPosition; - VkExtent2D minDstExtent; - VkExtent2D maxDstExtent; -} VkDisplayPlaneCapabilitiesKHR; - -typedef struct VkDisplayPlanePropertiesKHR { - VkDisplayKHR currentDisplay; - uint32_t currentStackIndex; -} VkDisplayPlanePropertiesKHR; - -typedef struct VkDisplayPropertiesKHR { - VkDisplayKHR display; - const char* displayName; - VkExtent2D physicalDimensions; - VkExtent2D physicalResolution; - VkSurfaceTransformFlagsKHR supportedTransforms; - VkBool32 planeReorderPossible; - VkBool32 persistentContent; -} VkDisplayPropertiesKHR; - -typedef struct VkDisplaySurfaceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkDisplaySurfaceCreateFlagsKHR flags; - VkDisplayModeKHR displayMode; - uint32_t planeIndex; - uint32_t planeStackIndex; - VkSurfaceTransformFlagBitsKHR transform; - float globalAlpha; - VkDisplayPlaneAlphaFlagBitsKHR alphaMode; - VkExtent2D imageExtent; -} VkDisplaySurfaceCreateInfoKHR; - -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities); -typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR( - VkPhysicalDevice physicalDevice, - uint32_t* pPropertyCount, - VkDisplayPropertiesKHR* pProperties); +typedef struct VkPhysicalDevicePipelineRobustnessFeatures { + VkStructureType sType; + void* pNext; + VkBool32 pipelineRobustness; +} VkPhysicalDevicePipelineRobustnessFeatures; -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR( - VkPhysicalDevice physicalDevice, - uint32_t* pPropertyCount, - VkDisplayPlanePropertiesKHR* pProperties); +typedef struct VkPhysicalDevicePipelineRobustnessProperties { + VkStructureType sType; + void* pNext; + VkPipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers; + VkPipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers; + VkPipelineRobustnessBufferBehavior defaultRobustnessVertexInputs; + VkPipelineRobustnessImageBehavior defaultRobustnessImages; +} VkPhysicalDevicePipelineRobustnessProperties; -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR( - VkPhysicalDevice physicalDevice, - uint32_t planeIndex, - uint32_t* pDisplayCount, - VkDisplayKHR* pDisplays); +typedef struct VkPipelineRobustnessCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineRobustnessBufferBehavior storageBuffers; + VkPipelineRobustnessBufferBehavior uniformBuffers; + VkPipelineRobustnessBufferBehavior vertexInputs; + VkPipelineRobustnessImageBehavior images; +} VkPipelineRobustnessCreateInfo; -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR( - VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - uint32_t* pPropertyCount, - VkDisplayModePropertiesKHR* pProperties); +typedef struct VkPhysicalDeviceHostImageCopyFeatures { + VkStructureType sType; + void* pNext; + VkBool32 hostImageCopy; +} VkPhysicalDeviceHostImageCopyFeatures; -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR( - VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - const VkDisplayModeCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDisplayModeKHR* pMode); +typedef struct VkPhysicalDeviceHostImageCopyProperties { + VkStructureType sType; + void* pNext; + uint32_t copySrcLayoutCount; + VkImageLayout* pCopySrcLayouts; + uint32_t copyDstLayoutCount; + VkImageLayout* pCopyDstLayouts; + uint8_t optimalTilingLayoutUUID[VK_UUID_SIZE]; + VkBool32 identicalMemoryTypeRequirements; +} VkPhysicalDeviceHostImageCopyProperties; + +typedef struct VkMemoryToImageCopy { + VkStructureType sType; + const void* pNext; + const void* pHostPointer; + uint32_t memoryRowLength; + uint32_t memoryImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkMemoryToImageCopy; -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR( - VkPhysicalDevice physicalDevice, - VkDisplayModeKHR mode, - uint32_t planeIndex, - VkDisplayPlaneCapabilitiesKHR* pCapabilities); +typedef struct VkImageToMemoryCopy { + VkStructureType sType; + const void* pNext; + void* pHostPointer; + uint32_t memoryRowLength; + uint32_t memoryImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkImageToMemoryCopy; -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR( - VkInstance instance, - const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); -#endif +typedef struct VkCopyMemoryToImageInfo { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlags flags; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkMemoryToImageCopy* pRegions; +} VkCopyMemoryToImageInfo; + +typedef struct VkCopyImageToMemoryInfo { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlags flags; + VkImage srcImage; + VkImageLayout srcImageLayout; + uint32_t regionCount; + const VkImageToMemoryCopy* pRegions; +} VkCopyImageToMemoryInfo; + +typedef struct VkCopyImageToImageInfo { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlags flags; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageCopy2* pRegions; +} VkCopyImageToImageInfo; +typedef struct VkHostImageLayoutTransitionInfo { + VkStructureType sType; + const void* pNext; + VkImage image; + VkImageLayout oldLayout; + VkImageLayout newLayout; + VkImageSubresourceRange subresourceRange; +} VkHostImageLayoutTransitionInfo; -#define VK_KHR_display_swapchain 1 -#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 10 -#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain" -typedef struct VkDisplayPresentInfoKHR { +typedef struct VkSubresourceHostMemcpySize { VkStructureType sType; - const void* pNext; - VkRect2D srcRect; - VkRect2D dstRect; - VkBool32 persistent; -} VkDisplayPresentInfoKHR; + void* pNext; + VkDeviceSize size; +} VkSubresourceHostMemcpySize; -typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains); +typedef struct VkHostImageCopyDevicePerformanceQuery { + VkStructureType sType; + void* pNext; + VkBool32 optimalDeviceAccess; + VkBool32 identicalMemoryLayout; +} VkHostImageCopyDevicePerformanceQuery; + +typedef void (VKAPI_PTR *PFN_vkCmdSetLineStipple)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern); +typedef VkResult (VKAPI_PTR *PFN_vkMapMemory2)(VkDevice device, const VkMemoryMapInfo* pMemoryMapInfo, void** ppData); +typedef VkResult (VKAPI_PTR *PFN_vkUnmapMemory2)(VkDevice device, const VkMemoryUnmapInfo* pMemoryUnmapInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer2)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType); +typedef void (VKAPI_PTR *PFN_vkGetRenderingAreaGranularity)(VkDevice device, const VkRenderingAreaInfo* pRenderingAreaInfo, VkExtent2D* pGranularity); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSubresourceLayout)(VkDevice device, const VkDeviceImageSubresourceInfo* pInfo, VkSubresourceLayout2* pLayout); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2)(VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, VkSubresourceLayout2* pLayout); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSet)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplate)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingAttachmentLocations)(VkCommandBuffer commandBuffer, const VkRenderingAttachmentLocationInfo* pLocationInfo); +typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingInputAttachmentIndices)(VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets2)(VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushConstants2)(VkCommandBuffer commandBuffer, const VkPushConstantsInfo* pPushConstantsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSet2)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfo* pPushDescriptorSetInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplate2)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToImage)(VkDevice device, const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToMemory)(VkDevice device, const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToImage)(VkDevice device, const VkCopyImageToImageInfo* pCopyImageToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkTransitionImageLayout)(VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfo* pTransitions); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR( - VkDevice device, - uint32_t swapchainCount, - const VkSwapchainCreateInfoKHR* pCreateInfos, - const VkAllocationCallbacks* pAllocator, - VkSwapchainKHR* pSwapchains); -#endif +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStipple( + VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern); +VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory2( + VkDevice device, + const VkMemoryMapInfo* pMemoryMapInfo, + void** ppData); -#define VK_KHR_sampler_mirror_clamp_to_edge 1 -#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 3 -#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge" +VKAPI_ATTR VkResult VKAPI_CALL vkUnmapMemory2( + VkDevice device, + const VkMemoryUnmapInfo* pMemoryUnmapInfo); +VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer2( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkDeviceSize size, + VkIndexType indexType); -#define VK_KHR_dynamic_rendering 1 -#define VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION 1 -#define VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME "VK_KHR_dynamic_rendering" -typedef VkRenderingFlags VkRenderingFlagsKHR; +VKAPI_ATTR void VKAPI_CALL vkGetRenderingAreaGranularity( + VkDevice device, + const VkRenderingAreaInfo* pRenderingAreaInfo, + VkExtent2D* pGranularity); -typedef VkRenderingFlagBits VkRenderingFlagBitsKHR; +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSubresourceLayout( + VkDevice device, + const VkDeviceImageSubresourceInfo* pInfo, + VkSubresourceLayout2* pLayout); -typedef VkRenderingInfo VkRenderingInfoKHR; +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2( + VkDevice device, + VkImage image, + const VkImageSubresource2* pSubresource, + VkSubresourceLayout2* pLayout); -typedef VkRenderingAttachmentInfo VkRenderingAttachmentInfoKHR; +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSet( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites); -typedef VkPipelineRenderingCreateInfo VkPipelineRenderingCreateInfoKHR; +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplate( + VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void* pData); -typedef VkPhysicalDeviceDynamicRenderingFeatures VkPhysicalDeviceDynamicRenderingFeaturesKHR; +VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingAttachmentLocations( + VkCommandBuffer commandBuffer, + const VkRenderingAttachmentLocationInfo* pLocationInfo); -typedef VkCommandBufferInheritanceRenderingInfo VkCommandBufferInheritanceRenderingInfoKHR; +VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingInputAttachmentIndices( + VkCommandBuffer commandBuffer, + const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo); -typedef struct VkRenderingFragmentShadingRateAttachmentInfoKHR { - VkStructureType sType; - const void* pNext; - VkImageView imageView; - VkImageLayout imageLayout; - VkExtent2D shadingRateAttachmentTexelSize; -} VkRenderingFragmentShadingRateAttachmentInfoKHR; +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets2( + VkCommandBuffer commandBuffer, + const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo); -typedef struct VkRenderingFragmentDensityMapAttachmentInfoEXT { - VkStructureType sType; - const void* pNext; - VkImageView imageView; - VkImageLayout imageLayout; -} VkRenderingFragmentDensityMapAttachmentInfoEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants2( + VkCommandBuffer commandBuffer, + const VkPushConstantsInfo* pPushConstantsInfo); -typedef struct VkAttachmentSampleCountInfoAMD { - VkStructureType sType; - const void* pNext; - uint32_t colorAttachmentCount; - const VkSampleCountFlagBits* pColorAttachmentSamples; - VkSampleCountFlagBits depthStencilAttachmentSamples; -} VkAttachmentSampleCountInfoAMD; +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSet2( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetInfo* pPushDescriptorSetInfo); -typedef VkAttachmentSampleCountInfoAMD VkAttachmentSampleCountInfoNV; +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplate2( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo); -typedef struct VkMultiviewPerViewAttributesInfoNVX { - VkStructureType sType; - const void* pNext; - VkBool32 perViewAttributes; - VkBool32 perViewAttributesPositionXOnly; -} VkMultiviewPerViewAttributesInfoNVX; +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToImage( + VkDevice device, + const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo); -typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderingKHR)(VkCommandBuffer commandBuffer, const VkRenderingInfo* pRenderingInfo); -typedef void (VKAPI_PTR *PFN_vkCmdEndRenderingKHR)(VkCommandBuffer commandBuffer); +VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToMemory( + VkDevice device, + const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderingKHR( - VkCommandBuffer commandBuffer, - const VkRenderingInfo* pRenderingInfo); +VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToImage( + VkDevice device, + const VkCopyImageToImageInfo* pCopyImageToImageInfo); -VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderingKHR( - VkCommandBuffer commandBuffer); +VKAPI_ATTR VkResult VKAPI_CALL vkTransitionImageLayout( + VkDevice device, + uint32_t transitionCount, + const VkHostImageLayoutTransitionInfo* pTransitions); #endif -#define VK_KHR_multiview 1 -#define VK_KHR_MULTIVIEW_SPEC_VERSION 1 -#define VK_KHR_MULTIVIEW_EXTENSION_NAME "VK_KHR_multiview" -typedef VkRenderPassMultiviewCreateInfo VkRenderPassMultiviewCreateInfoKHR; - -typedef VkPhysicalDeviceMultiviewFeatures VkPhysicalDeviceMultiviewFeaturesKHR; - -typedef VkPhysicalDeviceMultiviewProperties VkPhysicalDeviceMultiviewPropertiesKHR; +// VK_KHR_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_surface 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) +#define VK_KHR_SURFACE_SPEC_VERSION 25 +#define VK_KHR_SURFACE_EXTENSION_NAME "VK_KHR_surface" +typedef enum VkPresentModeKHR { + VK_PRESENT_MODE_IMMEDIATE_KHR = 0, + VK_PRESENT_MODE_MAILBOX_KHR = 1, + VK_PRESENT_MODE_FIFO_KHR = 2, + VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, + VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, + VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, + VK_PRESENT_MODE_FIFO_LATEST_READY_EXT = 1000361000, + VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPresentModeKHR; +typedef enum VkColorSpaceKHR { + VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0, + VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001, + VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002, + VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT = 1000104003, + VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004, + VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005, + VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006, + VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007, + VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008, + // VK_COLOR_SPACE_DOLBYVISION_EXT is deprecated, but no reason was given in the API XML + VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009, + VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010, + VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011, + VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT = 1000104012, + VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013, + VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014, + VK_COLOR_SPACE_DISPLAY_NATIVE_AMD = 1000213000, + // VK_COLORSPACE_SRGB_NONLINEAR_KHR is a deprecated alias + VK_COLORSPACE_SRGB_NONLINEAR_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + // VK_COLOR_SPACE_DCI_P3_LINEAR_EXT is a deprecated alias + VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, + VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkColorSpaceKHR; -#define VK_KHR_get_physical_device_properties2 1 -#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 2 -#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2" -typedef VkPhysicalDeviceFeatures2 VkPhysicalDeviceFeatures2KHR; - -typedef VkPhysicalDeviceProperties2 VkPhysicalDeviceProperties2KHR; - -typedef VkFormatProperties2 VkFormatProperties2KHR; - -typedef VkImageFormatProperties2 VkImageFormatProperties2KHR; - -typedef VkPhysicalDeviceImageFormatInfo2 VkPhysicalDeviceImageFormatInfo2KHR; - -typedef VkQueueFamilyProperties2 VkQueueFamilyProperties2KHR; - -typedef VkPhysicalDeviceMemoryProperties2 VkPhysicalDeviceMemoryProperties2KHR; +typedef enum VkSurfaceTransformFlagBitsKHR { + VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001, + VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002, + VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004, + VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080, + VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100, + VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSurfaceTransformFlagBitsKHR; -typedef VkSparseImageFormatProperties2 VkSparseImageFormatProperties2KHR; +typedef enum VkCompositeAlphaFlagBitsKHR { + VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002, + VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004, + VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008, + VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkCompositeAlphaFlagBitsKHR; +typedef VkFlags VkCompositeAlphaFlagsKHR; +typedef VkFlags VkSurfaceTransformFlagsKHR; +typedef struct VkSurfaceCapabilitiesKHR { + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; +} VkSurfaceCapabilitiesKHR; -typedef VkPhysicalDeviceSparseImageFormatInfo2 VkPhysicalDeviceSparseImageFormatInfo2KHR; +typedef struct VkSurfaceFormatKHR { + VkFormat format; + VkColorSpaceKHR colorSpace; +} VkSurfaceFormatKHR; -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceFeatures2* pFeatures); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceProperties2* pProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties2* pFormatProperties); +VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR( + VkInstance instance, + VkSurfaceKHR surface, + const VkAllocationCallbacks* pAllocator); -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR( +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, - VkImageFormatProperties2* pImageFormatProperties); + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32* pSupported); -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR( +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, - uint32_t* pQueueFamilyPropertyCount, - VkQueueFamilyProperties2* pQueueFamilyProperties); + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR( +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties2* pMemoryProperties); + VkSurfaceKHR surface, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormatKHR* pSurfaceFormats); -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR( +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, - uint32_t* pPropertyCount, - VkSparseImageFormatProperties2* pProperties); + VkSurfaceKHR surface, + uint32_t* pPresentModeCount, + VkPresentModeKHR* pPresentModes); #endif -#define VK_KHR_device_group 1 -#define VK_KHR_DEVICE_GROUP_SPEC_VERSION 4 -#define VK_KHR_DEVICE_GROUP_EXTENSION_NAME "VK_KHR_device_group" -typedef VkPeerMemoryFeatureFlags VkPeerMemoryFeatureFlagsKHR; - -typedef VkPeerMemoryFeatureFlagBits VkPeerMemoryFeatureFlagBitsKHR; +// VK_KHR_swapchain is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_swapchain 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) +#define VK_KHR_SWAPCHAIN_SPEC_VERSION 70 +#define VK_KHR_SWAPCHAIN_EXTENSION_NAME "VK_KHR_swapchain" -typedef VkMemoryAllocateFlags VkMemoryAllocateFlagsKHR; +typedef enum VkSwapchainCreateFlagBitsKHR { + VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = 0x00000001, + VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR = 0x00000002, + VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR = 0x00000004, + VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT = 0x00000008, + VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSwapchainCreateFlagBitsKHR; +typedef VkFlags VkSwapchainCreateFlagsKHR; -typedef VkMemoryAllocateFlagBits VkMemoryAllocateFlagBitsKHR; +typedef enum VkDeviceGroupPresentModeFlagBitsKHR { + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR = 0x00000001, + VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR = 0x00000002, + VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR = 0x00000004, + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR = 0x00000008, + VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDeviceGroupPresentModeFlagBitsKHR; +typedef VkFlags VkDeviceGroupPresentModeFlagsKHR; +typedef struct VkSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainCreateFlagsKHR flags; + VkSurfaceKHR surface; + uint32_t minImageCount; + VkFormat imageFormat; + VkColorSpaceKHR imageColorSpace; + VkExtent2D imageExtent; + uint32_t imageArrayLayers; + VkImageUsageFlags imageUsage; + VkSharingMode imageSharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkSurfaceTransformFlagBitsKHR preTransform; + VkCompositeAlphaFlagBitsKHR compositeAlpha; + VkPresentModeKHR presentMode; + VkBool32 clipped; + VkSwapchainKHR oldSwapchain; +} VkSwapchainCreateInfoKHR; -typedef VkMemoryAllocateFlagsInfo VkMemoryAllocateFlagsInfoKHR; +typedef struct VkPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t swapchainCount; + const VkSwapchainKHR* pSwapchains; + const uint32_t* pImageIndices; + VkResult* pResults; +} VkPresentInfoKHR; -typedef VkDeviceGroupRenderPassBeginInfo VkDeviceGroupRenderPassBeginInfoKHR; +typedef struct VkImageSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; +} VkImageSwapchainCreateInfoKHR; -typedef VkDeviceGroupCommandBufferBeginInfo VkDeviceGroupCommandBufferBeginInfoKHR; +typedef struct VkBindImageMemorySwapchainInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint32_t imageIndex; +} VkBindImageMemorySwapchainInfoKHR; -typedef VkDeviceGroupSubmitInfo VkDeviceGroupSubmitInfoKHR; +typedef struct VkAcquireNextImageInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint64_t timeout; + VkSemaphore semaphore; + VkFence fence; + uint32_t deviceMask; +} VkAcquireNextImageInfoKHR; -typedef VkDeviceGroupBindSparseInfo VkDeviceGroupBindSparseInfoKHR; +typedef struct VkDeviceGroupPresentCapabilitiesKHR { + VkStructureType sType; + void* pNext; + uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE]; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupPresentCapabilitiesKHR; -typedef VkBindBufferMemoryDeviceGroupInfo VkBindBufferMemoryDeviceGroupInfoKHR; +typedef struct VkDeviceGroupPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const uint32_t* pDeviceMasks; + VkDeviceGroupPresentModeFlagBitsKHR mode; +} VkDeviceGroupPresentInfoKHR; -typedef VkBindImageMemoryDeviceGroupInfo VkBindImageMemoryDeviceGroupInfoKHR; +typedef struct VkDeviceGroupSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupSwapchainCreateInfoKHR; -typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); -typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHR)(VkCommandBuffer commandBuffer, uint32_t deviceMask); -typedef void (VKAPI_PTR *PFN_vkCmdDispatchBaseKHR)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain); +typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex); +typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHR)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHR)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImage2KHR)(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHR( +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR( VkDevice device, - uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); + const VkSwapchainCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchain); -VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHR( - VkCommandBuffer commandBuffer, - uint32_t deviceMask); +VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR( + VkDevice device, + VkSwapchainKHR swapchain, + const VkAllocationCallbacks* pAllocator); -VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHR( - VkCommandBuffer commandBuffer, - uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ); -#endif +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pSwapchainImageCount, + VkImage* pSwapchainImages); +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint64_t timeout, + VkSemaphore semaphore, + VkFence fence, + uint32_t* pImageIndex); -#define VK_KHR_shader_draw_parameters 1 -#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1 -#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters" +VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR( + VkQueue queue, + const VkPresentInfoKHR* pPresentInfo); +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR( + VkDevice device, + VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); -#define VK_KHR_maintenance1 1 -#define VK_KHR_MAINTENANCE_1_SPEC_VERSION 2 -#define VK_KHR_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_maintenance1" -#define VK_KHR_MAINTENANCE1_SPEC_VERSION VK_KHR_MAINTENANCE_1_SPEC_VERSION -#define VK_KHR_MAINTENANCE1_EXTENSION_NAME VK_KHR_MAINTENANCE_1_EXTENSION_NAME -typedef VkCommandPoolTrimFlags VkCommandPoolTrimFlagsKHR; +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR( + VkDevice device, + VkSurfaceKHR surface, + VkDeviceGroupPresentModeFlagsKHR* pModes); -typedef void (VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pRectCount, + VkRect2D* pRects); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR( +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR( VkDevice device, - VkCommandPool commandPool, - VkCommandPoolTrimFlags flags); + const VkAcquireNextImageInfoKHR* pAcquireInfo, + uint32_t* pImageIndex); #endif -#define VK_KHR_device_group_creation 1 -#define VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION 1 -#define VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHR_device_group_creation" -#define VK_MAX_DEVICE_GROUP_SIZE_KHR VK_MAX_DEVICE_GROUP_SIZE -typedef VkPhysicalDeviceGroupProperties VkPhysicalDeviceGroupPropertiesKHR; - -typedef VkDeviceGroupDeviceCreateInfo VkDeviceGroupDeviceCreateInfoKHR; - -typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHR)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +// VK_KHR_display is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_display 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) +#define VK_KHR_DISPLAY_SPEC_VERSION 23 +#define VK_KHR_DISPLAY_EXTENSION_NAME "VK_KHR_display" +typedef VkFlags VkDisplayModeCreateFlagsKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHR( - VkInstance instance, - uint32_t* pPhysicalDeviceGroupCount, - VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); -#endif +typedef enum VkDisplayPlaneAlphaFlagBitsKHR { + VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008, + VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDisplayPlaneAlphaFlagBitsKHR; +typedef VkFlags VkDisplayPlaneAlphaFlagsKHR; +typedef VkFlags VkDisplaySurfaceCreateFlagsKHR; +typedef struct VkDisplayModeParametersKHR { + VkExtent2D visibleRegion; + uint32_t refreshRate; +} VkDisplayModeParametersKHR; +typedef struct VkDisplayModeCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplayModeCreateFlagsKHR flags; + VkDisplayModeParametersKHR parameters; +} VkDisplayModeCreateInfoKHR; -#define VK_KHR_external_memory_capabilities 1 -#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities" -#define VK_LUID_SIZE_KHR VK_LUID_SIZE -typedef VkExternalMemoryHandleTypeFlags VkExternalMemoryHandleTypeFlagsKHR; +typedef struct VkDisplayModePropertiesKHR { + VkDisplayModeKHR displayMode; + VkDisplayModeParametersKHR parameters; +} VkDisplayModePropertiesKHR; -typedef VkExternalMemoryHandleTypeFlagBits VkExternalMemoryHandleTypeFlagBitsKHR; +typedef struct VkDisplayPlaneCapabilitiesKHR { + VkDisplayPlaneAlphaFlagsKHR supportedAlpha; + VkOffset2D minSrcPosition; + VkOffset2D maxSrcPosition; + VkExtent2D minSrcExtent; + VkExtent2D maxSrcExtent; + VkOffset2D minDstPosition; + VkOffset2D maxDstPosition; + VkExtent2D minDstExtent; + VkExtent2D maxDstExtent; +} VkDisplayPlaneCapabilitiesKHR; -typedef VkExternalMemoryFeatureFlags VkExternalMemoryFeatureFlagsKHR; +typedef struct VkDisplayPlanePropertiesKHR { + VkDisplayKHR currentDisplay; + uint32_t currentStackIndex; +} VkDisplayPlanePropertiesKHR; -typedef VkExternalMemoryFeatureFlagBits VkExternalMemoryFeatureFlagBitsKHR; +typedef struct VkDisplayPropertiesKHR { + VkDisplayKHR display; + const char* displayName; + VkExtent2D physicalDimensions; + VkExtent2D physicalResolution; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkBool32 planeReorderPossible; + VkBool32 persistentContent; +} VkDisplayPropertiesKHR; -typedef VkExternalMemoryProperties VkExternalMemoryPropertiesKHR; +typedef struct VkDisplaySurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplaySurfaceCreateFlagsKHR flags; + VkDisplayModeKHR displayMode; + uint32_t planeIndex; + uint32_t planeStackIndex; + VkSurfaceTransformFlagBitsKHR transform; + float globalAlpha; + VkDisplayPlaneAlphaFlagBitsKHR alphaMode; + VkExtent2D imageExtent; +} VkDisplaySurfaceCreateInfoKHR; -typedef VkPhysicalDeviceExternalImageFormatInfo VkPhysicalDeviceExternalImageFormatInfoKHR; +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); -typedef VkExternalImageFormatProperties VkExternalImageFormatPropertiesKHR; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPropertiesKHR* pProperties); -typedef VkPhysicalDeviceExternalBufferInfo VkPhysicalDeviceExternalBufferInfoKHR; +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlanePropertiesKHR* pProperties); -typedef VkExternalBufferProperties VkExternalBufferPropertiesKHR; +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR( + VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t* pDisplayCount, + VkDisplayKHR* pDisplays); -typedef VkPhysicalDeviceIDProperties VkPhysicalDeviceIDPropertiesKHR; +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModePropertiesKHR* pProperties); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDisplayModeKHR* pMode); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR( +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, - VkExternalBufferProperties* pExternalBufferProperties); + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR* pCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR( + VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); #endif -#define VK_KHR_external_memory 1 -#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory" -#define VK_QUEUE_FAMILY_EXTERNAL_KHR VK_QUEUE_FAMILY_EXTERNAL -typedef VkExternalMemoryImageCreateInfo VkExternalMemoryImageCreateInfoKHR; +// VK_KHR_display_swapchain is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_display_swapchain 1 +#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 10 +#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain" +typedef struct VkDisplayPresentInfoKHR { + VkStructureType sType; + const void* pNext; + VkRect2D srcRect; + VkRect2D dstRect; + VkBool32 persistent; +} VkDisplayPresentInfoKHR; -typedef VkExternalMemoryBufferCreateInfo VkExternalMemoryBufferCreateInfoKHR; +typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains); -typedef VkExportMemoryAllocateInfo VkExportMemoryAllocateInfoKHR; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchains); +#endif +// VK_KHR_sampler_mirror_clamp_to_edge is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_sampler_mirror_clamp_to_edge 1 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 3 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge" -#define VK_KHR_external_memory_fd 1 -#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd" -typedef struct VkImportMemoryFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagBits handleType; - int fd; -} VkImportMemoryFdInfoKHR; -typedef struct VkMemoryFdPropertiesKHR { +// VK_KHR_video_queue is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_queue 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionKHR) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionParametersKHR) +#define VK_KHR_VIDEO_QUEUE_SPEC_VERSION 8 +#define VK_KHR_VIDEO_QUEUE_EXTENSION_NAME "VK_KHR_video_queue" + +typedef enum VkQueryResultStatusKHR { + VK_QUERY_RESULT_STATUS_ERROR_KHR = -1, + VK_QUERY_RESULT_STATUS_NOT_READY_KHR = 0, + VK_QUERY_RESULT_STATUS_COMPLETE_KHR = 1, + VK_QUERY_RESULT_STATUS_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_KHR = -1000299000, + VK_QUERY_RESULT_STATUS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkQueryResultStatusKHR; + +typedef enum VkVideoCodecOperationFlagBitsKHR { + VK_VIDEO_CODEC_OPERATION_NONE_KHR = 0, + VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR = 0x00010000, + VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR = 0x00020000, + VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR = 0x00000001, + VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR = 0x00000002, + VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR = 0x00000004, + VK_VIDEO_CODEC_OPERATION_ENCODE_AV1_BIT_KHR = 0x00040000, + VK_VIDEO_CODEC_OPERATION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCodecOperationFlagBitsKHR; +typedef VkFlags VkVideoCodecOperationFlagsKHR; + +typedef enum VkVideoChromaSubsamplingFlagBitsKHR { + VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR = 0, + VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR = 0x00000001, + VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR = 0x00000002, + VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR = 0x00000004, + VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR = 0x00000008, + VK_VIDEO_CHROMA_SUBSAMPLING_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoChromaSubsamplingFlagBitsKHR; +typedef VkFlags VkVideoChromaSubsamplingFlagsKHR; + +typedef enum VkVideoComponentBitDepthFlagBitsKHR { + VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR = 0, + VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR = 0x00000001, + VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR = 0x00000004, + VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR = 0x00000010, + VK_VIDEO_COMPONENT_BIT_DEPTH_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoComponentBitDepthFlagBitsKHR; +typedef VkFlags VkVideoComponentBitDepthFlagsKHR; + +typedef enum VkVideoCapabilityFlagBitsKHR { + VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR = 0x00000001, + VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR = 0x00000002, + VK_VIDEO_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCapabilityFlagBitsKHR; +typedef VkFlags VkVideoCapabilityFlagsKHR; + +typedef enum VkVideoSessionCreateFlagBitsKHR { + VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR = 0x00000001, + VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_PARAMETER_OPTIMIZATIONS_BIT_KHR = 0x00000002, + VK_VIDEO_SESSION_CREATE_INLINE_QUERIES_BIT_KHR = 0x00000004, + VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x00000008, + VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_EMPHASIS_MAP_BIT_KHR = 0x00000010, + VK_VIDEO_SESSION_CREATE_INLINE_SESSION_PARAMETERS_BIT_KHR = 0x00000020, + VK_VIDEO_SESSION_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoSessionCreateFlagBitsKHR; +typedef VkFlags VkVideoSessionCreateFlagsKHR; + +typedef enum VkVideoSessionParametersCreateFlagBitsKHR { + VK_VIDEO_SESSION_PARAMETERS_CREATE_QUANTIZATION_MAP_COMPATIBLE_BIT_KHR = 0x00000001, + VK_VIDEO_SESSION_PARAMETERS_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoSessionParametersCreateFlagBitsKHR; +typedef VkFlags VkVideoSessionParametersCreateFlagsKHR; +typedef VkFlags VkVideoBeginCodingFlagsKHR; +typedef VkFlags VkVideoEndCodingFlagsKHR; + +typedef enum VkVideoCodingControlFlagBitsKHR { + VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR = 0x00000001, + VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR = 0x00000002, + VK_VIDEO_CODING_CONTROL_ENCODE_QUALITY_LEVEL_BIT_KHR = 0x00000004, + VK_VIDEO_CODING_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCodingControlFlagBitsKHR; +typedef VkFlags VkVideoCodingControlFlagsKHR; +typedef struct VkQueueFamilyQueryResultStatusPropertiesKHR { VkStructureType sType; void* pNext; - uint32_t memoryTypeBits; -} VkMemoryFdPropertiesKHR; - -typedef struct VkMemoryGetFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkDeviceMemory memory; - VkExternalMemoryHandleTypeFlagBits handleType; -} VkMemoryGetFdInfoKHR; - -typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd); -typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR( - VkDevice device, - const VkMemoryGetFdInfoKHR* pGetFdInfo, - int* pFd); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR( - VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - int fd, - VkMemoryFdPropertiesKHR* pMemoryFdProperties); -#endif + VkBool32 queryResultStatusSupport; +} VkQueueFamilyQueryResultStatusPropertiesKHR; +typedef struct VkQueueFamilyVideoPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoCodecOperationFlagsKHR videoCodecOperations; +} VkQueueFamilyVideoPropertiesKHR; -#define VK_KHR_external_semaphore_capabilities 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities" -typedef VkExternalSemaphoreHandleTypeFlags VkExternalSemaphoreHandleTypeFlagsKHR; +typedef struct VkVideoProfileInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoCodecOperationFlagBitsKHR videoCodecOperation; + VkVideoChromaSubsamplingFlagsKHR chromaSubsampling; + VkVideoComponentBitDepthFlagsKHR lumaBitDepth; + VkVideoComponentBitDepthFlagsKHR chromaBitDepth; +} VkVideoProfileInfoKHR; -typedef VkExternalSemaphoreHandleTypeFlagBits VkExternalSemaphoreHandleTypeFlagBitsKHR; +typedef struct VkVideoProfileListInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t profileCount; + const VkVideoProfileInfoKHR* pProfiles; +} VkVideoProfileListInfoKHR; -typedef VkExternalSemaphoreFeatureFlags VkExternalSemaphoreFeatureFlagsKHR; +typedef struct VkVideoCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoCapabilityFlagsKHR flags; + VkDeviceSize minBitstreamBufferOffsetAlignment; + VkDeviceSize minBitstreamBufferSizeAlignment; + VkExtent2D pictureAccessGranularity; + VkExtent2D minCodedExtent; + VkExtent2D maxCodedExtent; + uint32_t maxDpbSlots; + uint32_t maxActiveReferencePictures; + VkExtensionProperties stdHeaderVersion; +} VkVideoCapabilitiesKHR; + +typedef struct VkPhysicalDeviceVideoFormatInfoKHR { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags imageUsage; +} VkPhysicalDeviceVideoFormatInfoKHR; -typedef VkExternalSemaphoreFeatureFlagBits VkExternalSemaphoreFeatureFlagBitsKHR; +typedef struct VkVideoFormatPropertiesKHR { + VkStructureType sType; + void* pNext; + VkFormat format; + VkComponentMapping componentMapping; + VkImageCreateFlags imageCreateFlags; + VkImageType imageType; + VkImageTiling imageTiling; + VkImageUsageFlags imageUsageFlags; +} VkVideoFormatPropertiesKHR; -typedef VkPhysicalDeviceExternalSemaphoreInfo VkPhysicalDeviceExternalSemaphoreInfoKHR; +typedef struct VkVideoPictureResourceInfoKHR { + VkStructureType sType; + const void* pNext; + VkOffset2D codedOffset; + VkExtent2D codedExtent; + uint32_t baseArrayLayer; + VkImageView imageViewBinding; +} VkVideoPictureResourceInfoKHR; -typedef VkExternalSemaphoreProperties VkExternalSemaphorePropertiesKHR; +typedef struct VkVideoReferenceSlotInfoKHR { + VkStructureType sType; + const void* pNext; + int32_t slotIndex; + const VkVideoPictureResourceInfoKHR* pPictureResource; +} VkVideoReferenceSlotInfoKHR; -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +typedef struct VkVideoSessionMemoryRequirementsKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryBindIndex; + VkMemoryRequirements memoryRequirements; +} VkVideoSessionMemoryRequirementsKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, - VkExternalSemaphoreProperties* pExternalSemaphoreProperties); -#endif +typedef struct VkBindVideoSessionMemoryInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t memoryBindIndex; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkDeviceSize memorySize; +} VkBindVideoSessionMemoryInfoKHR; +typedef struct VkVideoSessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t queueFamilyIndex; + VkVideoSessionCreateFlagsKHR flags; + const VkVideoProfileInfoKHR* pVideoProfile; + VkFormat pictureFormat; + VkExtent2D maxCodedExtent; + VkFormat referencePictureFormat; + uint32_t maxDpbSlots; + uint32_t maxActiveReferencePictures; + const VkExtensionProperties* pStdHeaderVersion; +} VkVideoSessionCreateInfoKHR; + +typedef struct VkVideoSessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoSessionParametersCreateFlagsKHR flags; + VkVideoSessionParametersKHR videoSessionParametersTemplate; + VkVideoSessionKHR videoSession; +} VkVideoSessionParametersCreateInfoKHR; -#define VK_KHR_external_semaphore 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore" -typedef VkSemaphoreImportFlags VkSemaphoreImportFlagsKHR; +typedef struct VkVideoSessionParametersUpdateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t updateSequenceCount; +} VkVideoSessionParametersUpdateInfoKHR; -typedef VkSemaphoreImportFlagBits VkSemaphoreImportFlagBitsKHR; +typedef struct VkVideoBeginCodingInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoBeginCodingFlagsKHR flags; + VkVideoSessionKHR videoSession; + VkVideoSessionParametersKHR videoSessionParameters; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotInfoKHR* pReferenceSlots; +} VkVideoBeginCodingInfoKHR; + +typedef struct VkVideoEndCodingInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEndCodingFlagsKHR flags; +} VkVideoEndCodingInfoKHR; -typedef VkExportSemaphoreCreateInfo VkExportSemaphoreCreateInfoKHR; +typedef struct VkVideoCodingControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoCodingControlFlagsKHR flags; +} VkVideoCodingControlInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR)(VkPhysicalDevice physicalDevice, const VkVideoProfileInfoKHR* pVideoProfile, VkVideoCapabilitiesKHR* pCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, uint32_t* pVideoFormatPropertyCount, VkVideoFormatPropertiesKHR* pVideoFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionKHR)(VkDevice device, const VkVideoSessionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionKHR* pVideoSession); +typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionKHR)(VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetVideoSessionMemoryRequirementsKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t* pMemoryRequirementsCount, VkVideoSessionMemoryRequirementsKHR* pMemoryRequirements); +typedef VkResult (VKAPI_PTR *PFN_vkBindVideoSessionMemoryKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t bindSessionMemoryInfoCount, const VkBindVideoSessionMemoryInfoKHR* pBindSessionMemoryInfos); +typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionParametersKHR)(VkDevice device, const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionParametersKHR* pVideoSessionParameters); +typedef VkResult (VKAPI_PTR *PFN_vkUpdateVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo); +typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdBeginVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR* pBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR* pEndCodingInfo); +typedef void (VKAPI_PTR *PFN_vkCmdControlVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR* pCodingControlInfo); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + const VkVideoProfileInfoKHR* pVideoProfile, + VkVideoCapabilitiesKHR* pCapabilities); +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoFormatPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, + uint32_t* pVideoFormatPropertyCount, + VkVideoFormatPropertiesKHR* pVideoFormatProperties); -#define VK_KHR_external_semaphore_fd 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd" -typedef struct VkImportSemaphoreFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkSemaphore semaphore; - VkSemaphoreImportFlags flags; - VkExternalSemaphoreHandleTypeFlagBits handleType; - int fd; -} VkImportSemaphoreFdInfoKHR; +VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionKHR( + VkDevice device, + const VkVideoSessionCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkVideoSessionKHR* pVideoSession); -typedef struct VkSemaphoreGetFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkSemaphore semaphore; - VkExternalSemaphoreHandleTypeFlagBits handleType; -} VkSemaphoreGetFdInfoKHR; +VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd); +VKAPI_ATTR VkResult VKAPI_CALL vkGetVideoSessionMemoryRequirementsKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t* pMemoryRequirementsCount, + VkVideoSessionMemoryRequirementsKHR* pMemoryRequirements); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR( +VKAPI_ATTR VkResult VKAPI_CALL vkBindVideoSessionMemoryKHR( VkDevice device, - const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); + VkVideoSessionKHR videoSession, + uint32_t bindSessionMemoryInfoCount, + const VkBindVideoSessionMemoryInfoKHR* pBindSessionMemoryInfos); -VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR( +VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionParametersKHR( VkDevice device, - const VkSemaphoreGetFdInfoKHR* pGetFdInfo, - int* pFd); -#endif + const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkVideoSessionParametersKHR* pVideoSessionParameters); +VKAPI_ATTR VkResult VKAPI_CALL vkUpdateVideoSessionParametersKHR( + VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo); -#define VK_KHR_push_descriptor 1 -#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 2 -#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor" -typedef struct VkPhysicalDevicePushDescriptorPropertiesKHR { - VkStructureType sType; - void* pNext; - uint32_t maxPushDescriptors; -} VkPhysicalDevicePushDescriptorPropertiesKHR; +VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionParametersKHR( + VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites); -typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData); +VKAPI_ATTR void VKAPI_CALL vkCmdBeginVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoBeginCodingInfoKHR* pBeginInfo); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR( +VKAPI_ATTR void VKAPI_CALL vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t set, - uint32_t descriptorWriteCount, - const VkWriteDescriptorSet* pDescriptorWrites); + const VkVideoEndCodingInfoKHR* pEndCodingInfo); -VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR( +VKAPI_ATTR void VKAPI_CALL vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - VkPipelineLayout layout, - uint32_t set, - const void* pData); + const VkVideoCodingControlInfoKHR* pCodingControlInfo); #endif -#define VK_KHR_shader_float16_int8 1 -#define VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION 1 -#define VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME "VK_KHR_shader_float16_int8" -typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceShaderFloat16Int8FeaturesKHR; - -typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceFloat16Int8FeaturesKHR; - +// VK_KHR_video_decode_queue is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_decode_queue 1 +#define VK_KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION 8 +#define VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME "VK_KHR_video_decode_queue" + +typedef enum VkVideoDecodeCapabilityFlagBitsKHR { + VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR = 0x00000001, + VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR = 0x00000002, + VK_VIDEO_DECODE_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoDecodeCapabilityFlagBitsKHR; +typedef VkFlags VkVideoDecodeCapabilityFlagsKHR; + +typedef enum VkVideoDecodeUsageFlagBitsKHR { + VK_VIDEO_DECODE_USAGE_DEFAULT_KHR = 0, + VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR = 0x00000001, + VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR = 0x00000002, + VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR = 0x00000004, + VK_VIDEO_DECODE_USAGE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoDecodeUsageFlagBitsKHR; +typedef VkFlags VkVideoDecodeUsageFlagsKHR; +typedef VkFlags VkVideoDecodeFlagsKHR; +typedef struct VkVideoDecodeCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoDecodeCapabilityFlagsKHR flags; +} VkVideoDecodeCapabilitiesKHR; +typedef struct VkVideoDecodeUsageInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoDecodeUsageFlagsKHR videoUsageHints; +} VkVideoDecodeUsageInfoKHR; -#define VK_KHR_16bit_storage 1 -#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1 -#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage" -typedef VkPhysicalDevice16BitStorageFeatures VkPhysicalDevice16BitStorageFeaturesKHR; +typedef struct VkVideoDecodeInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoDecodeFlagsKHR flags; + VkBuffer srcBuffer; + VkDeviceSize srcBufferOffset; + VkDeviceSize srcBufferRange; + VkVideoPictureResourceInfoKHR dstPictureResource; + const VkVideoReferenceSlotInfoKHR* pSetupReferenceSlot; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotInfoKHR* pReferenceSlots; +} VkVideoDecodeInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdDecodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR* pDecodeInfo); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDecodeVideoKHR( + VkCommandBuffer commandBuffer, + const VkVideoDecodeInfoKHR* pDecodeInfo); +#endif -#define VK_KHR_incremental_present 1 -#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 2 -#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present" -typedef struct VkRectLayerKHR { - VkOffset2D offset; - VkExtent2D extent; - uint32_t layer; -} VkRectLayerKHR; +// VK_KHR_video_encode_h264 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_h264 1 +#include "vk_video/vulkan_video_codec_h264std.h" +#include "vk_video/vulkan_video_codec_h264std_encode.h" +#define VK_KHR_VIDEO_ENCODE_H264_SPEC_VERSION 14 +#define VK_KHR_VIDEO_ENCODE_H264_EXTENSION_NAME "VK_KHR_video_encode_h264" + +typedef enum VkVideoEncodeH264CapabilityFlagBitsKHR { + VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H264_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR = 0x00000020, + VK_VIDEO_ENCODE_H264_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR = 0x00000040, + VK_VIDEO_ENCODE_H264_CAPABILITY_PER_SLICE_CONSTANT_QP_BIT_KHR = 0x00000080, + VK_VIDEO_ENCODE_H264_CAPABILITY_GENERATE_PREFIX_NALU_BIT_KHR = 0x00000100, + VK_VIDEO_ENCODE_H264_CAPABILITY_MB_QP_DIFF_WRAPAROUND_BIT_KHR = 0x00000200, + VK_VIDEO_ENCODE_H264_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH264CapabilityFlagBitsKHR; +typedef VkFlags VkVideoEncodeH264CapabilityFlagsKHR; + +typedef enum VkVideoEncodeH264StdFlagBitsKHR { + VK_VIDEO_ENCODE_H264_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H264_STD_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_FLAG_SET_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H264_STD_SCALING_MATRIX_PRESENT_FLAG_SET_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H264_STD_CHROMA_QP_INDEX_OFFSET_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H264_STD_SECOND_CHROMA_QP_INDEX_OFFSET_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H264_STD_PIC_INIT_QP_MINUS26_BIT_KHR = 0x00000020, + VK_VIDEO_ENCODE_H264_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR = 0x00000040, + VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_EXPLICIT_BIT_KHR = 0x00000080, + VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_IMPLICIT_BIT_KHR = 0x00000100, + VK_VIDEO_ENCODE_H264_STD_TRANSFORM_8X8_MODE_FLAG_SET_BIT_KHR = 0x00000200, + VK_VIDEO_ENCODE_H264_STD_DIRECT_SPATIAL_MV_PRED_FLAG_UNSET_BIT_KHR = 0x00000400, + VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_UNSET_BIT_KHR = 0x00000800, + VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_SET_BIT_KHR = 0x00001000, + VK_VIDEO_ENCODE_H264_STD_DIRECT_8X8_INFERENCE_FLAG_UNSET_BIT_KHR = 0x00002000, + VK_VIDEO_ENCODE_H264_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR = 0x00004000, + VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_DISABLED_BIT_KHR = 0x00008000, + VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_ENABLED_BIT_KHR = 0x00010000, + VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_PARTIAL_BIT_KHR = 0x00020000, + VK_VIDEO_ENCODE_H264_STD_SLICE_QP_DELTA_BIT_KHR = 0x00080000, + VK_VIDEO_ENCODE_H264_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR = 0x00100000, + VK_VIDEO_ENCODE_H264_STD_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH264StdFlagBitsKHR; +typedef VkFlags VkVideoEncodeH264StdFlagsKHR; + +typedef enum VkVideoEncodeH264RateControlFlagBitsKHR { + VK_VIDEO_ENCODE_H264_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_REGULAR_GOP_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH264RateControlFlagBitsKHR; +typedef VkFlags VkVideoEncodeH264RateControlFlagsKHR; +typedef struct VkVideoEncodeH264CapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH264CapabilityFlagsKHR flags; + StdVideoH264LevelIdc maxLevelIdc; + uint32_t maxSliceCount; + uint32_t maxPPictureL0ReferenceCount; + uint32_t maxBPictureL0ReferenceCount; + uint32_t maxL1ReferenceCount; + uint32_t maxTemporalLayerCount; + VkBool32 expectDyadicTemporalLayerPattern; + int32_t minQp; + int32_t maxQp; + VkBool32 prefersGopRemainingFrames; + VkBool32 requiresGopRemainingFrames; + VkVideoEncodeH264StdFlagsKHR stdSyntaxFlags; +} VkVideoEncodeH264CapabilitiesKHR; + +typedef struct VkVideoEncodeH264QpKHR { + int32_t qpI; + int32_t qpP; + int32_t qpB; +} VkVideoEncodeH264QpKHR; + +typedef struct VkVideoEncodeH264QualityLevelPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH264RateControlFlagsKHR preferredRateControlFlags; + uint32_t preferredGopFrameCount; + uint32_t preferredIdrPeriod; + uint32_t preferredConsecutiveBFrameCount; + uint32_t preferredTemporalLayerCount; + VkVideoEncodeH264QpKHR preferredConstantQp; + uint32_t preferredMaxL0ReferenceCount; + uint32_t preferredMaxL1ReferenceCount; + VkBool32 preferredStdEntropyCodingModeFlag; +} VkVideoEncodeH264QualityLevelPropertiesKHR; + +typedef struct VkVideoEncodeH264SessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMaxLevelIdc; + StdVideoH264LevelIdc maxLevelIdc; +} VkVideoEncodeH264SessionCreateInfoKHR; -typedef struct VkPresentRegionKHR { - uint32_t rectangleCount; - const VkRectLayerKHR* pRectangles; -} VkPresentRegionKHR; +typedef struct VkVideoEncodeH264SessionParametersAddInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t stdSPSCount; + const StdVideoH264SequenceParameterSet* pStdSPSs; + uint32_t stdPPSCount; + const StdVideoH264PictureParameterSet* pStdPPSs; +} VkVideoEncodeH264SessionParametersAddInfoKHR; -typedef struct VkPresentRegionsKHR { - VkStructureType sType; - const void* pNext; - uint32_t swapchainCount; - const VkPresentRegionKHR* pRegions; -} VkPresentRegionsKHR; +typedef struct VkVideoEncodeH264SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxStdSPSCount; + uint32_t maxStdPPSCount; + const VkVideoEncodeH264SessionParametersAddInfoKHR* pParametersAddInfo; +} VkVideoEncodeH264SessionParametersCreateInfoKHR; +typedef struct VkVideoEncodeH264SessionParametersGetInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 writeStdSPS; + VkBool32 writeStdPPS; + uint32_t stdSPSId; + uint32_t stdPPSId; +} VkVideoEncodeH264SessionParametersGetInfoKHR; +typedef struct VkVideoEncodeH264SessionParametersFeedbackInfoKHR { + VkStructureType sType; + void* pNext; + VkBool32 hasStdSPSOverrides; + VkBool32 hasStdPPSOverrides; +} VkVideoEncodeH264SessionParametersFeedbackInfoKHR; -#define VK_KHR_descriptor_update_template 1 -typedef VkDescriptorUpdateTemplate VkDescriptorUpdateTemplateKHR; +typedef struct VkVideoEncodeH264NaluSliceInfoKHR { + VkStructureType sType; + const void* pNext; + int32_t constantQp; + const StdVideoEncodeH264SliceHeader* pStdSliceHeader; +} VkVideoEncodeH264NaluSliceInfoKHR; -#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1 -#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template" -typedef VkDescriptorUpdateTemplateType VkDescriptorUpdateTemplateTypeKHR; +typedef struct VkVideoEncodeH264PictureInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t naluSliceEntryCount; + const VkVideoEncodeH264NaluSliceInfoKHR* pNaluSliceEntries; + const StdVideoEncodeH264PictureInfo* pStdPictureInfo; + VkBool32 generatePrefixNalu; +} VkVideoEncodeH264PictureInfoKHR; -typedef VkDescriptorUpdateTemplateCreateFlags VkDescriptorUpdateTemplateCreateFlagsKHR; +typedef struct VkVideoEncodeH264DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoEncodeH264ReferenceInfo* pStdReferenceInfo; +} VkVideoEncodeH264DpbSlotInfoKHR; -typedef VkDescriptorUpdateTemplateEntry VkDescriptorUpdateTemplateEntryKHR; +typedef struct VkVideoEncodeH264ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoH264ProfileIdc stdProfileIdc; +} VkVideoEncodeH264ProfileInfoKHR; -typedef VkDescriptorUpdateTemplateCreateInfo VkDescriptorUpdateTemplateCreateInfoKHR; +typedef struct VkVideoEncodeH264RateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeH264RateControlFlagsKHR flags; + uint32_t gopFrameCount; + uint32_t idrPeriod; + uint32_t consecutiveBFrameCount; + uint32_t temporalLayerCount; +} VkVideoEncodeH264RateControlInfoKHR; + +typedef struct VkVideoEncodeH264FrameSizeKHR { + uint32_t frameISize; + uint32_t framePSize; + uint32_t frameBSize; +} VkVideoEncodeH264FrameSizeKHR; + +typedef struct VkVideoEncodeH264RateControlLayerInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMinQp; + VkVideoEncodeH264QpKHR minQp; + VkBool32 useMaxQp; + VkVideoEncodeH264QpKHR maxQp; + VkBool32 useMaxFrameSize; + VkVideoEncodeH264FrameSizeKHR maxFrameSize; +} VkVideoEncodeH264RateControlLayerInfoKHR; -typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); -typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); +typedef struct VkVideoEncodeH264GopRemainingFrameInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useGopRemainingFrames; + uint32_t gopRemainingI; + uint32_t gopRemainingP; + uint32_t gopRemainingB; +} VkVideoEncodeH264GopRemainingFrameInfoKHR; + + + +// VK_KHR_video_encode_h265 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_h265 1 +#include "vk_video/vulkan_video_codec_h265std.h" +#include "vk_video/vulkan_video_codec_h265std_encode.h" +#define VK_KHR_VIDEO_ENCODE_H265_SPEC_VERSION 14 +#define VK_KHR_VIDEO_ENCODE_H265_EXTENSION_NAME "VK_KHR_video_encode_h265" + +typedef enum VkVideoEncodeH265CapabilityFlagBitsKHR { + VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_SEGMENT_TYPE_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR = 0x00000020, + VK_VIDEO_ENCODE_H265_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR = 0x00000040, + VK_VIDEO_ENCODE_H265_CAPABILITY_PER_SLICE_SEGMENT_CONSTANT_QP_BIT_KHR = 0x00000080, + VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILES_PER_SLICE_SEGMENT_BIT_KHR = 0x00000100, + VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_SEGMENTS_PER_TILE_BIT_KHR = 0x00000200, + VK_VIDEO_ENCODE_H265_CAPABILITY_CU_QP_DIFF_WRAPAROUND_BIT_KHR = 0x00000400, + VK_VIDEO_ENCODE_H265_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265CapabilityFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265CapabilityFlagsKHR; + +typedef enum VkVideoEncodeH265StdFlagBitsKHR { + VK_VIDEO_ENCODE_H265_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_STD_SAMPLE_ADAPTIVE_OFFSET_ENABLED_FLAG_SET_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_STD_SCALING_LIST_DATA_PRESENT_FLAG_SET_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_STD_PCM_ENABLED_FLAG_SET_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H265_STD_SPS_TEMPORAL_MVP_ENABLED_FLAG_SET_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H265_STD_INIT_QP_MINUS26_BIT_KHR = 0x00000020, + VK_VIDEO_ENCODE_H265_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR = 0x00000040, + VK_VIDEO_ENCODE_H265_STD_WEIGHTED_BIPRED_FLAG_SET_BIT_KHR = 0x00000080, + VK_VIDEO_ENCODE_H265_STD_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_KHR = 0x00000100, + VK_VIDEO_ENCODE_H265_STD_SIGN_DATA_HIDING_ENABLED_FLAG_SET_BIT_KHR = 0x00000200, + VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_SET_BIT_KHR = 0x00000400, + VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_UNSET_BIT_KHR = 0x00000800, + VK_VIDEO_ENCODE_H265_STD_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_FLAG_SET_BIT_KHR = 0x00001000, + VK_VIDEO_ENCODE_H265_STD_TRANSQUANT_BYPASS_ENABLED_FLAG_SET_BIT_KHR = 0x00002000, + VK_VIDEO_ENCODE_H265_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR = 0x00004000, + VK_VIDEO_ENCODE_H265_STD_ENTROPY_CODING_SYNC_ENABLED_FLAG_SET_BIT_KHR = 0x00008000, + VK_VIDEO_ENCODE_H265_STD_DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_SET_BIT_KHR = 0x00010000, + VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENTS_ENABLED_FLAG_SET_BIT_KHR = 0x00020000, + VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENT_FLAG_SET_BIT_KHR = 0x00040000, + VK_VIDEO_ENCODE_H265_STD_SLICE_QP_DELTA_BIT_KHR = 0x00080000, + VK_VIDEO_ENCODE_H265_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR = 0x00100000, + VK_VIDEO_ENCODE_H265_STD_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265StdFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265StdFlagsKHR; + +typedef enum VkVideoEncodeH265CtbSizeFlagBitsKHR { + VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_CTB_SIZE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265CtbSizeFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265CtbSizeFlagsKHR; + +typedef enum VkVideoEncodeH265TransformBlockSizeFlagBitsKHR { + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265TransformBlockSizeFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265TransformBlockSizeFlagsKHR; + +typedef enum VkVideoEncodeH265RateControlFlagBitsKHR { + VK_VIDEO_ENCODE_H265_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_REGULAR_GOP_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_TEMPORAL_SUB_LAYER_PATTERN_DYADIC_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265RateControlFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265RateControlFlagsKHR; +typedef struct VkVideoEncodeH265CapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH265CapabilityFlagsKHR flags; + StdVideoH265LevelIdc maxLevelIdc; + uint32_t maxSliceSegmentCount; + VkExtent2D maxTiles; + VkVideoEncodeH265CtbSizeFlagsKHR ctbSizes; + VkVideoEncodeH265TransformBlockSizeFlagsKHR transformBlockSizes; + uint32_t maxPPictureL0ReferenceCount; + uint32_t maxBPictureL0ReferenceCount; + uint32_t maxL1ReferenceCount; + uint32_t maxSubLayerCount; + VkBool32 expectDyadicTemporalSubLayerPattern; + int32_t minQp; + int32_t maxQp; + VkBool32 prefersGopRemainingFrames; + VkBool32 requiresGopRemainingFrames; + VkVideoEncodeH265StdFlagsKHR stdSyntaxFlags; +} VkVideoEncodeH265CapabilitiesKHR; + +typedef struct VkVideoEncodeH265SessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMaxLevelIdc; + StdVideoH265LevelIdc maxLevelIdc; +} VkVideoEncodeH265SessionCreateInfoKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR( - VkDevice device, - const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +typedef struct VkVideoEncodeH265QpKHR { + int32_t qpI; + int32_t qpP; + int32_t qpB; +} VkVideoEncodeH265QpKHR; -VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR( - VkDevice device, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const VkAllocationCallbacks* pAllocator); +typedef struct VkVideoEncodeH265QualityLevelPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH265RateControlFlagsKHR preferredRateControlFlags; + uint32_t preferredGopFrameCount; + uint32_t preferredIdrPeriod; + uint32_t preferredConsecutiveBFrameCount; + uint32_t preferredSubLayerCount; + VkVideoEncodeH265QpKHR preferredConstantQp; + uint32_t preferredMaxL0ReferenceCount; + uint32_t preferredMaxL1ReferenceCount; +} VkVideoEncodeH265QualityLevelPropertiesKHR; + +typedef struct VkVideoEncodeH265SessionParametersAddInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t stdVPSCount; + const StdVideoH265VideoParameterSet* pStdVPSs; + uint32_t stdSPSCount; + const StdVideoH265SequenceParameterSet* pStdSPSs; + uint32_t stdPPSCount; + const StdVideoH265PictureParameterSet* pStdPPSs; +} VkVideoEncodeH265SessionParametersAddInfoKHR; + +typedef struct VkVideoEncodeH265SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxStdVPSCount; + uint32_t maxStdSPSCount; + uint32_t maxStdPPSCount; + const VkVideoEncodeH265SessionParametersAddInfoKHR* pParametersAddInfo; +} VkVideoEncodeH265SessionParametersCreateInfoKHR; -VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR( - VkDevice device, - VkDescriptorSet descriptorSet, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const void* pData); -#endif +typedef struct VkVideoEncodeH265SessionParametersGetInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 writeStdVPS; + VkBool32 writeStdSPS; + VkBool32 writeStdPPS; + uint32_t stdVPSId; + uint32_t stdSPSId; + uint32_t stdPPSId; +} VkVideoEncodeH265SessionParametersGetInfoKHR; +typedef struct VkVideoEncodeH265SessionParametersFeedbackInfoKHR { + VkStructureType sType; + void* pNext; + VkBool32 hasStdVPSOverrides; + VkBool32 hasStdSPSOverrides; + VkBool32 hasStdPPSOverrides; +} VkVideoEncodeH265SessionParametersFeedbackInfoKHR; -#define VK_KHR_imageless_framebuffer 1 -#define VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION 1 -#define VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME "VK_KHR_imageless_framebuffer" -typedef VkPhysicalDeviceImagelessFramebufferFeatures VkPhysicalDeviceImagelessFramebufferFeaturesKHR; +typedef struct VkVideoEncodeH265NaluSliceSegmentInfoKHR { + VkStructureType sType; + const void* pNext; + int32_t constantQp; + const StdVideoEncodeH265SliceSegmentHeader* pStdSliceSegmentHeader; +} VkVideoEncodeH265NaluSliceSegmentInfoKHR; -typedef VkFramebufferAttachmentsCreateInfo VkFramebufferAttachmentsCreateInfoKHR; +typedef struct VkVideoEncodeH265PictureInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t naluSliceSegmentEntryCount; + const VkVideoEncodeH265NaluSliceSegmentInfoKHR* pNaluSliceSegmentEntries; + const StdVideoEncodeH265PictureInfo* pStdPictureInfo; +} VkVideoEncodeH265PictureInfoKHR; -typedef VkFramebufferAttachmentImageInfo VkFramebufferAttachmentImageInfoKHR; +typedef struct VkVideoEncodeH265DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoEncodeH265ReferenceInfo* pStdReferenceInfo; +} VkVideoEncodeH265DpbSlotInfoKHR; -typedef VkRenderPassAttachmentBeginInfo VkRenderPassAttachmentBeginInfoKHR; +typedef struct VkVideoEncodeH265ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoH265ProfileIdc stdProfileIdc; +} VkVideoEncodeH265ProfileInfoKHR; +typedef struct VkVideoEncodeH265RateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeH265RateControlFlagsKHR flags; + uint32_t gopFrameCount; + uint32_t idrPeriod; + uint32_t consecutiveBFrameCount; + uint32_t subLayerCount; +} VkVideoEncodeH265RateControlInfoKHR; + +typedef struct VkVideoEncodeH265FrameSizeKHR { + uint32_t frameISize; + uint32_t framePSize; + uint32_t frameBSize; +} VkVideoEncodeH265FrameSizeKHR; + +typedef struct VkVideoEncodeH265RateControlLayerInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMinQp; + VkVideoEncodeH265QpKHR minQp; + VkBool32 useMaxQp; + VkVideoEncodeH265QpKHR maxQp; + VkBool32 useMaxFrameSize; + VkVideoEncodeH265FrameSizeKHR maxFrameSize; +} VkVideoEncodeH265RateControlLayerInfoKHR; +typedef struct VkVideoEncodeH265GopRemainingFrameInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useGopRemainingFrames; + uint32_t gopRemainingI; + uint32_t gopRemainingP; + uint32_t gopRemainingB; +} VkVideoEncodeH265GopRemainingFrameInfoKHR; + + + +// VK_KHR_video_decode_h264 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_decode_h264 1 +#include "vk_video/vulkan_video_codec_h264std_decode.h" +#define VK_KHR_VIDEO_DECODE_H264_SPEC_VERSION 9 +#define VK_KHR_VIDEO_DECODE_H264_EXTENSION_NAME "VK_KHR_video_decode_h264" + +typedef enum VkVideoDecodeH264PictureLayoutFlagBitsKHR { + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_KHR = 0, + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_KHR = 0x00000001, + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_KHR = 0x00000002, + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoDecodeH264PictureLayoutFlagBitsKHR; +typedef VkFlags VkVideoDecodeH264PictureLayoutFlagsKHR; +typedef struct VkVideoDecodeH264ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoH264ProfileIdc stdProfileIdc; + VkVideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout; +} VkVideoDecodeH264ProfileInfoKHR; -#define VK_KHR_create_renderpass2 1 -#define VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION 1 -#define VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME "VK_KHR_create_renderpass2" -typedef VkRenderPassCreateInfo2 VkRenderPassCreateInfo2KHR; +typedef struct VkVideoDecodeH264CapabilitiesKHR { + VkStructureType sType; + void* pNext; + StdVideoH264LevelIdc maxLevelIdc; + VkOffset2D fieldOffsetGranularity; +} VkVideoDecodeH264CapabilitiesKHR; -typedef VkAttachmentDescription2 VkAttachmentDescription2KHR; +typedef struct VkVideoDecodeH264SessionParametersAddInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t stdSPSCount; + const StdVideoH264SequenceParameterSet* pStdSPSs; + uint32_t stdPPSCount; + const StdVideoH264PictureParameterSet* pStdPPSs; +} VkVideoDecodeH264SessionParametersAddInfoKHR; -typedef VkAttachmentReference2 VkAttachmentReference2KHR; +typedef struct VkVideoDecodeH264SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxStdSPSCount; + uint32_t maxStdPPSCount; + const VkVideoDecodeH264SessionParametersAddInfoKHR* pParametersAddInfo; +} VkVideoDecodeH264SessionParametersCreateInfoKHR; -typedef VkSubpassDescription2 VkSubpassDescription2KHR; +typedef struct VkVideoDecodeH264PictureInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH264PictureInfo* pStdPictureInfo; + uint32_t sliceCount; + const uint32_t* pSliceOffsets; +} VkVideoDecodeH264PictureInfoKHR; -typedef VkSubpassDependency2 VkSubpassDependency2KHR; +typedef struct VkVideoDecodeH264DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH264ReferenceInfo* pStdReferenceInfo; +} VkVideoDecodeH264DpbSlotInfoKHR; -typedef VkSubpassBeginInfo VkSubpassBeginInfoKHR; -typedef VkSubpassEndInfo VkSubpassEndInfoKHR; -typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2KHR)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); -typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo); -typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo); -typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo); +// VK_KHR_dynamic_rendering is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_dynamic_rendering 1 +#define VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION 1 +#define VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME "VK_KHR_dynamic_rendering" +typedef VkRenderingFlags VkRenderingFlagsKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2KHR( - VkDevice device, - const VkRenderPassCreateInfo2* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkRenderPass* pRenderPass); +typedef VkRenderingFlagBits VkRenderingFlagBitsKHR; -VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2KHR( - VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo* pRenderPassBegin, - const VkSubpassBeginInfo* pSubpassBeginInfo); +typedef VkRenderingInfo VkRenderingInfoKHR; -VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2KHR( - VkCommandBuffer commandBuffer, - const VkSubpassBeginInfo* pSubpassBeginInfo, - const VkSubpassEndInfo* pSubpassEndInfo); +typedef VkRenderingAttachmentInfo VkRenderingAttachmentInfoKHR; -VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2KHR( - VkCommandBuffer commandBuffer, - const VkSubpassEndInfo* pSubpassEndInfo); -#endif +typedef VkPipelineRenderingCreateInfo VkPipelineRenderingCreateInfoKHR; +typedef VkPhysicalDeviceDynamicRenderingFeatures VkPhysicalDeviceDynamicRenderingFeaturesKHR; -#define VK_KHR_shared_presentable_image 1 -#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1 -#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image" -typedef struct VkSharedPresentSurfaceCapabilitiesKHR { - VkStructureType sType; - void* pNext; - VkImageUsageFlags sharedPresentSupportedUsageFlags; -} VkSharedPresentSurfaceCapabilitiesKHR; +typedef VkCommandBufferInheritanceRenderingInfo VkCommandBufferInheritanceRenderingInfoKHR; -typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderingKHR)(VkCommandBuffer commandBuffer, const VkRenderingInfo* pRenderingInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderingKHR)(VkCommandBuffer commandBuffer); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR( - VkDevice device, - VkSwapchainKHR swapchain); +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderingKHR( + VkCommandBuffer commandBuffer, + const VkRenderingInfo* pRenderingInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderingKHR( + VkCommandBuffer commandBuffer); #endif -#define VK_KHR_external_fence_capabilities 1 -#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities" -typedef VkExternalFenceHandleTypeFlags VkExternalFenceHandleTypeFlagsKHR; +// VK_KHR_multiview is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_multiview 1 +#define VK_KHR_MULTIVIEW_SPEC_VERSION 1 +#define VK_KHR_MULTIVIEW_EXTENSION_NAME "VK_KHR_multiview" +typedef VkRenderPassMultiviewCreateInfo VkRenderPassMultiviewCreateInfoKHR; -typedef VkExternalFenceHandleTypeFlagBits VkExternalFenceHandleTypeFlagBitsKHR; +typedef VkPhysicalDeviceMultiviewFeatures VkPhysicalDeviceMultiviewFeaturesKHR; -typedef VkExternalFenceFeatureFlags VkExternalFenceFeatureFlagsKHR; +typedef VkPhysicalDeviceMultiviewProperties VkPhysicalDeviceMultiviewPropertiesKHR; -typedef VkExternalFenceFeatureFlagBits VkExternalFenceFeatureFlagBitsKHR; -typedef VkPhysicalDeviceExternalFenceInfo VkPhysicalDeviceExternalFenceInfoKHR; -typedef VkExternalFenceProperties VkExternalFencePropertiesKHR; +// VK_KHR_get_physical_device_properties2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_get_physical_device_properties2 1 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 2 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2" +typedef VkPhysicalDeviceFeatures2 VkPhysicalDeviceFeatures2KHR; -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); +typedef VkPhysicalDeviceProperties2 VkPhysicalDeviceProperties2KHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, - VkExternalFenceProperties* pExternalFenceProperties); -#endif +typedef VkFormatProperties2 VkFormatProperties2KHR; +typedef VkImageFormatProperties2 VkImageFormatProperties2KHR; -#define VK_KHR_external_fence 1 -#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence" -typedef VkFenceImportFlags VkFenceImportFlagsKHR; +typedef VkPhysicalDeviceImageFormatInfo2 VkPhysicalDeviceImageFormatInfo2KHR; -typedef VkFenceImportFlagBits VkFenceImportFlagBitsKHR; +typedef VkQueueFamilyProperties2 VkQueueFamilyProperties2KHR; -typedef VkExportFenceCreateInfo VkExportFenceCreateInfoKHR; +typedef VkPhysicalDeviceMemoryProperties2 VkPhysicalDeviceMemoryProperties2KHR; +typedef VkSparseImageFormatProperties2 VkSparseImageFormatProperties2KHR; +typedef VkPhysicalDeviceSparseImageFormatInfo2 VkPhysicalDeviceSparseImageFormatInfo2KHR; -#define VK_KHR_external_fence_fd 1 -#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd" -typedef struct VkImportFenceFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkFence fence; - VkFenceImportFlags flags; - VkExternalFenceHandleTypeFlagBits handleType; - int fd; -} VkImportFenceFdInfoKHR; +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); -typedef struct VkFenceGetFdInfoKHR { - VkStructureType sType; - const void* pNext; - VkFence fence; - VkExternalFenceHandleTypeFlagBits handleType; -} VkFenceGetFdInfoKHR; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2* pFeatures); -typedef VkResult (VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd); +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2* pProperties); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR( - VkDevice device, - const VkImportFenceFdInfoKHR* pImportFenceFdInfo); +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2* pFormatProperties); -VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR( - VkDevice device, - const VkFenceGetFdInfoKHR* pGetFdInfo, - int* pFd); -#endif +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, + VkImageFormatProperties2* pImageFormatProperties); +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2* pQueueFamilyProperties); -#define VK_KHR_performance_query 1 -#define VK_KHR_PERFORMANCE_QUERY_SPEC_VERSION 1 -#define VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME "VK_KHR_performance_query" +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2* pMemoryProperties); -typedef enum VkPerformanceCounterUnitKHR { - VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR = 0, - VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR = 1, - VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR = 2, - VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR = 3, - VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR = 4, - VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR = 5, - VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR = 6, - VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR = 7, - VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR = 8, - VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR = 9, - VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR = 10, - VK_PERFORMANCE_COUNTER_UNIT_MAX_ENUM_KHR = 0x7FFFFFFF -} VkPerformanceCounterUnitKHR; +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2* pProperties); +#endif -typedef enum VkPerformanceCounterScopeKHR { - VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR = 0, - VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR = 1, - VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR = 2, - VK_QUERY_SCOPE_COMMAND_BUFFER_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, - VK_QUERY_SCOPE_RENDER_PASS_KHR = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, - VK_QUERY_SCOPE_COMMAND_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, - VK_PERFORMANCE_COUNTER_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkPerformanceCounterScopeKHR; -typedef enum VkPerformanceCounterStorageKHR { - VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR = 0, - VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR = 1, - VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR = 2, - VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR = 3, - VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR = 4, - VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR = 5, - VK_PERFORMANCE_COUNTER_STORAGE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkPerformanceCounterStorageKHR; +// VK_KHR_device_group is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_device_group 1 +#define VK_KHR_DEVICE_GROUP_SPEC_VERSION 4 +#define VK_KHR_DEVICE_GROUP_EXTENSION_NAME "VK_KHR_device_group" +typedef VkPeerMemoryFeatureFlags VkPeerMemoryFeatureFlagsKHR; -typedef enum VkPerformanceCounterDescriptionFlagBitsKHR { - VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR = 0x00000001, - VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR = 0x00000002, - VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR, - VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR, - VK_PERFORMANCE_COUNTER_DESCRIPTION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkPerformanceCounterDescriptionFlagBitsKHR; -typedef VkFlags VkPerformanceCounterDescriptionFlagsKHR; +typedef VkPeerMemoryFeatureFlagBits VkPeerMemoryFeatureFlagBitsKHR; -typedef enum VkAcquireProfilingLockFlagBitsKHR { - VK_ACQUIRE_PROFILING_LOCK_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkAcquireProfilingLockFlagBitsKHR; -typedef VkFlags VkAcquireProfilingLockFlagsKHR; -typedef struct VkPhysicalDevicePerformanceQueryFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 performanceCounterQueryPools; - VkBool32 performanceCounterMultipleQueryPools; -} VkPhysicalDevicePerformanceQueryFeaturesKHR; +typedef VkMemoryAllocateFlags VkMemoryAllocateFlagsKHR; -typedef struct VkPhysicalDevicePerformanceQueryPropertiesKHR { - VkStructureType sType; - void* pNext; - VkBool32 allowCommandBufferQueryCopies; -} VkPhysicalDevicePerformanceQueryPropertiesKHR; +typedef VkMemoryAllocateFlagBits VkMemoryAllocateFlagBitsKHR; -typedef struct VkPerformanceCounterKHR { - VkStructureType sType; - void* pNext; - VkPerformanceCounterUnitKHR unit; - VkPerformanceCounterScopeKHR scope; - VkPerformanceCounterStorageKHR storage; - uint8_t uuid[VK_UUID_SIZE]; -} VkPerformanceCounterKHR; +typedef VkMemoryAllocateFlagsInfo VkMemoryAllocateFlagsInfoKHR; -typedef struct VkPerformanceCounterDescriptionKHR { - VkStructureType sType; - void* pNext; - VkPerformanceCounterDescriptionFlagsKHR flags; - char name[VK_MAX_DESCRIPTION_SIZE]; - char category[VK_MAX_DESCRIPTION_SIZE]; - char description[VK_MAX_DESCRIPTION_SIZE]; -} VkPerformanceCounterDescriptionKHR; - -typedef struct VkQueryPoolPerformanceCreateInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t queueFamilyIndex; - uint32_t counterIndexCount; - const uint32_t* pCounterIndices; -} VkQueryPoolPerformanceCreateInfoKHR; +typedef VkDeviceGroupRenderPassBeginInfo VkDeviceGroupRenderPassBeginInfoKHR; -typedef union VkPerformanceCounterResultKHR { - int32_t int32; - int64_t int64; - uint32_t uint32; - uint64_t uint64; - float float32; - double float64; -} VkPerformanceCounterResultKHR; +typedef VkDeviceGroupCommandBufferBeginInfo VkDeviceGroupCommandBufferBeginInfoKHR; -typedef struct VkAcquireProfilingLockInfoKHR { - VkStructureType sType; - const void* pNext; - VkAcquireProfilingLockFlagsKHR flags; - uint64_t timeout; -} VkAcquireProfilingLockInfoKHR; +typedef VkDeviceGroupSubmitInfo VkDeviceGroupSubmitInfoKHR; -typedef struct VkPerformanceQuerySubmitInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t counterPassIndex; -} VkPerformanceQuerySubmitInfoKHR; +typedef VkDeviceGroupBindSparseInfo VkDeviceGroupBindSparseInfoKHR; -typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR)(VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses); -typedef VkResult (VKAPI_PTR *PFN_vkAcquireProfilingLockKHR)(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo); -typedef void (VKAPI_PTR *PFN_vkReleaseProfilingLockKHR)(VkDevice device); +typedef VkBindBufferMemoryDeviceGroupInfo VkBindBufferMemoryDeviceGroupInfoKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( - VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - uint32_t* pCounterCount, - VkPerformanceCounterKHR* pCounters, - VkPerformanceCounterDescriptionKHR* pCounterDescriptions); +typedef VkBindImageMemoryDeviceGroupInfo VkBindImageMemoryDeviceGroupInfoKHR; -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( - VkPhysicalDevice physicalDevice, - const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, - uint32_t* pNumPasses); +typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); +typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHR)(VkCommandBuffer commandBuffer, uint32_t deviceMask); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchBaseKHR)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); -VKAPI_ATTR VkResult VKAPI_CALL vkAcquireProfilingLockKHR( +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, - const VkAcquireProfilingLockInfoKHR* pInfo); - -VKAPI_ATTR void VKAPI_CALL vkReleaseProfilingLockKHR( - VkDevice device); -#endif - - -#define VK_KHR_maintenance2 1 -#define VK_KHR_MAINTENANCE_2_SPEC_VERSION 1 -#define VK_KHR_MAINTENANCE_2_EXTENSION_NAME "VK_KHR_maintenance2" -#define VK_KHR_MAINTENANCE2_SPEC_VERSION VK_KHR_MAINTENANCE_2_SPEC_VERSION -#define VK_KHR_MAINTENANCE2_EXTENSION_NAME VK_KHR_MAINTENANCE_2_EXTENSION_NAME -typedef VkPointClippingBehavior VkPointClippingBehaviorKHR; + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); -typedef VkTessellationDomainOrigin VkTessellationDomainOriginKHR; +VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHR( + VkCommandBuffer commandBuffer, + uint32_t deviceMask); -typedef VkPhysicalDevicePointClippingProperties VkPhysicalDevicePointClippingPropertiesKHR; +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHR( + VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); +#endif -typedef VkRenderPassInputAttachmentAspectCreateInfo VkRenderPassInputAttachmentAspectCreateInfoKHR; -typedef VkInputAttachmentAspectReference VkInputAttachmentAspectReferenceKHR; +// VK_KHR_shader_draw_parameters is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_draw_parameters 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters" -typedef VkImageViewUsageCreateInfo VkImageViewUsageCreateInfoKHR; -typedef VkPipelineTessellationDomainOriginStateCreateInfo VkPipelineTessellationDomainOriginStateCreateInfoKHR; +// VK_KHR_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance1 1 +#define VK_KHR_MAINTENANCE_1_SPEC_VERSION 2 +#define VK_KHR_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_maintenance1" +// VK_KHR_MAINTENANCE1_SPEC_VERSION is a deprecated alias +#define VK_KHR_MAINTENANCE1_SPEC_VERSION VK_KHR_MAINTENANCE_1_SPEC_VERSION +// VK_KHR_MAINTENANCE1_EXTENSION_NAME is a deprecated alias +#define VK_KHR_MAINTENANCE1_EXTENSION_NAME VK_KHR_MAINTENANCE_1_EXTENSION_NAME +typedef VkCommandPoolTrimFlags VkCommandPoolTrimFlagsKHR; +typedef void (VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags); +#endif -#define VK_KHR_get_surface_capabilities2 1 -#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1 -#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2" -typedef struct VkPhysicalDeviceSurfaceInfo2KHR { - VkStructureType sType; - const void* pNext; - VkSurfaceKHR surface; -} VkPhysicalDeviceSurfaceInfo2KHR; -typedef struct VkSurfaceCapabilities2KHR { - VkStructureType sType; - void* pNext; - VkSurfaceCapabilitiesKHR surfaceCapabilities; -} VkSurfaceCapabilities2KHR; +// VK_KHR_device_group_creation is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_device_group_creation 1 +#define VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION 1 +#define VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHR_device_group_creation" +#define VK_MAX_DEVICE_GROUP_SIZE_KHR VK_MAX_DEVICE_GROUP_SIZE +typedef VkPhysicalDeviceGroupProperties VkPhysicalDeviceGroupPropertiesKHR; -typedef struct VkSurfaceFormat2KHR { - VkStructureType sType; - void* pNext; - VkSurfaceFormatKHR surfaceFormat; -} VkSurfaceFormat2KHR; +typedef VkDeviceGroupDeviceCreateInfo VkDeviceGroupDeviceCreateInfoKHR; -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats); +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHR)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, - VkSurfaceCapabilities2KHR* pSurfaceCapabilities); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, - uint32_t* pSurfaceFormatCount, - VkSurfaceFormat2KHR* pSurfaceFormats); +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHR( + VkInstance instance, + uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); #endif -#define VK_KHR_variable_pointers 1 -#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1 -#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers" -typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeaturesKHR; - -typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointersFeaturesKHR; - +// VK_KHR_external_memory_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_memory_capabilities 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities" +#define VK_LUID_SIZE_KHR VK_LUID_SIZE +typedef VkExternalMemoryHandleTypeFlags VkExternalMemoryHandleTypeFlagsKHR; +typedef VkExternalMemoryHandleTypeFlagBits VkExternalMemoryHandleTypeFlagBitsKHR; -#define VK_KHR_get_display_properties2 1 -#define VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION 1 -#define VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_display_properties2" -typedef struct VkDisplayProperties2KHR { - VkStructureType sType; - void* pNext; - VkDisplayPropertiesKHR displayProperties; -} VkDisplayProperties2KHR; +typedef VkExternalMemoryFeatureFlags VkExternalMemoryFeatureFlagsKHR; -typedef struct VkDisplayPlaneProperties2KHR { - VkStructureType sType; - void* pNext; - VkDisplayPlanePropertiesKHR displayPlaneProperties; -} VkDisplayPlaneProperties2KHR; +typedef VkExternalMemoryFeatureFlagBits VkExternalMemoryFeatureFlagBitsKHR; -typedef struct VkDisplayModeProperties2KHR { - VkStructureType sType; - void* pNext; - VkDisplayModePropertiesKHR displayModeProperties; -} VkDisplayModeProperties2KHR; +typedef VkExternalMemoryProperties VkExternalMemoryPropertiesKHR; -typedef struct VkDisplayPlaneInfo2KHR { - VkStructureType sType; - const void* pNext; - VkDisplayModeKHR mode; - uint32_t planeIndex; -} VkDisplayPlaneInfo2KHR; +typedef VkPhysicalDeviceExternalImageFormatInfo VkPhysicalDeviceExternalImageFormatInfoKHR; -typedef struct VkDisplayPlaneCapabilities2KHR { - VkStructureType sType; - void* pNext; - VkDisplayPlaneCapabilitiesKHR capabilities; -} VkDisplayPlaneCapabilities2KHR; +typedef VkExternalImageFormatProperties VkExternalImageFormatPropertiesKHR; -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModeProperties2KHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities); +typedef VkPhysicalDeviceExternalBufferInfo VkPhysicalDeviceExternalBufferInfoKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayProperties2KHR( - VkPhysicalDevice physicalDevice, - uint32_t* pPropertyCount, - VkDisplayProperties2KHR* pProperties); +typedef VkExternalBufferProperties VkExternalBufferPropertiesKHR; -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlaneProperties2KHR( - VkPhysicalDevice physicalDevice, - uint32_t* pPropertyCount, - VkDisplayPlaneProperties2KHR* pProperties); +typedef VkPhysicalDeviceIDProperties VkPhysicalDeviceIDPropertiesKHR; -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModeProperties2KHR( - VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - uint32_t* pPropertyCount, - VkDisplayModeProperties2KHR* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilities2KHR( +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, - const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, - VkDisplayPlaneCapabilities2KHR* pCapabilities); + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, + VkExternalBufferProperties* pExternalBufferProperties); #endif -#define VK_KHR_dedicated_allocation 1 -#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 3 -#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation" -typedef VkMemoryDedicatedRequirements VkMemoryDedicatedRequirementsKHR; - -typedef VkMemoryDedicatedAllocateInfo VkMemoryDedicatedAllocateInfoKHR; - +// VK_KHR_external_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_memory 1 +#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory" +#define VK_QUEUE_FAMILY_EXTERNAL_KHR VK_QUEUE_FAMILY_EXTERNAL +typedef VkExternalMemoryImageCreateInfo VkExternalMemoryImageCreateInfoKHR; +typedef VkExternalMemoryBufferCreateInfo VkExternalMemoryBufferCreateInfoKHR; -#define VK_KHR_storage_buffer_storage_class 1 -#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1 -#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class" +typedef VkExportMemoryAllocateInfo VkExportMemoryAllocateInfoKHR; -#define VK_KHR_relaxed_block_layout 1 -#define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1 -#define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout" +// VK_KHR_external_memory_fd is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_memory_fd 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd" +typedef struct VkImportMemoryFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + int fd; +} VkImportMemoryFdInfoKHR; -#define VK_KHR_get_memory_requirements2 1 -#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1 -#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2" -typedef VkBufferMemoryRequirementsInfo2 VkBufferMemoryRequirementsInfo2KHR; - -typedef VkImageMemoryRequirementsInfo2 VkImageMemoryRequirementsInfo2KHR; - -typedef VkImageSparseMemoryRequirementsInfo2 VkImageSparseMemoryRequirementsInfo2KHR; - -typedef VkMemoryRequirements2 VkMemoryRequirements2KHR; +typedef struct VkMemoryFdPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryFdPropertiesKHR; -typedef VkSparseImageMemoryRequirements2 VkSparseImageMemoryRequirements2KHR; +typedef struct VkMemoryGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetFdInfoKHR; -typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR( - VkDevice device, - const VkImageMemoryRequirementsInfo2* pInfo, - VkMemoryRequirements2* pMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR( +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR( VkDevice device, - const VkBufferMemoryRequirementsInfo2* pInfo, - VkMemoryRequirements2* pMemoryRequirements); + const VkMemoryGetFdInfoKHR* pGetFdInfo, + int* pFd); -VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR( +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR( VkDevice device, - const VkImageSparseMemoryRequirementsInfo2* pInfo, - uint32_t* pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR* pMemoryFdProperties); #endif -#define VK_KHR_image_format_list 1 -#define VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION 1 -#define VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME "VK_KHR_image_format_list" -typedef VkImageFormatListCreateInfo VkImageFormatListCreateInfoKHR; - - - -#define VK_KHR_sampler_ycbcr_conversion 1 -typedef VkSamplerYcbcrConversion VkSamplerYcbcrConversionKHR; +// VK_KHR_external_semaphore_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_semaphore_capabilities 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities" +typedef VkExternalSemaphoreHandleTypeFlags VkExternalSemaphoreHandleTypeFlagsKHR; -#define VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION 14 -#define VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME "VK_KHR_sampler_ycbcr_conversion" -typedef VkSamplerYcbcrModelConversion VkSamplerYcbcrModelConversionKHR; +typedef VkExternalSemaphoreHandleTypeFlagBits VkExternalSemaphoreHandleTypeFlagBitsKHR; -typedef VkSamplerYcbcrRange VkSamplerYcbcrRangeKHR; +typedef VkExternalSemaphoreFeatureFlags VkExternalSemaphoreFeatureFlagsKHR; -typedef VkChromaLocation VkChromaLocationKHR; +typedef VkExternalSemaphoreFeatureFlagBits VkExternalSemaphoreFeatureFlagBitsKHR; -typedef VkSamplerYcbcrConversionCreateInfo VkSamplerYcbcrConversionCreateInfoKHR; +typedef VkPhysicalDeviceExternalSemaphoreInfo VkPhysicalDeviceExternalSemaphoreInfoKHR; -typedef VkSamplerYcbcrConversionInfo VkSamplerYcbcrConversionInfoKHR; +typedef VkExternalSemaphoreProperties VkExternalSemaphorePropertiesKHR; -typedef VkBindImagePlaneMemoryInfo VkBindImagePlaneMemoryInfoKHR; +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); -typedef VkImagePlaneMemoryRequirementsInfo VkImagePlaneMemoryRequirementsInfoKHR; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, + VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +#endif -typedef VkPhysicalDeviceSamplerYcbcrConversionFeatures VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR; -typedef VkSamplerYcbcrConversionImageFormatProperties VkSamplerYcbcrConversionImageFormatPropertiesKHR; +// VK_KHR_external_semaphore is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_semaphore 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore" +typedef VkSemaphoreImportFlags VkSemaphoreImportFlagsKHR; -typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversionKHR)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); -typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversionKHR)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); +typedef VkSemaphoreImportFlagBits VkSemaphoreImportFlagBitsKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversionKHR( - VkDevice device, - const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSamplerYcbcrConversion* pYcbcrConversion); +typedef VkExportSemaphoreCreateInfo VkExportSemaphoreCreateInfoKHR; -VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversionKHR( - VkDevice device, - VkSamplerYcbcrConversion ycbcrConversion, - const VkAllocationCallbacks* pAllocator); -#endif -#define VK_KHR_bind_memory2 1 -#define VK_KHR_BIND_MEMORY_2_SPEC_VERSION 1 -#define VK_KHR_BIND_MEMORY_2_EXTENSION_NAME "VK_KHR_bind_memory2" -typedef VkBindBufferMemoryInfo VkBindBufferMemoryInfoKHR; +// VK_KHR_external_semaphore_fd is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_semaphore_fd 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd" +typedef struct VkImportSemaphoreFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + int fd; +} VkImportSemaphoreFdInfoKHR; -typedef VkBindImageMemoryInfo VkBindImageMemoryInfoKHR; +typedef struct VkSemaphoreGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetFdInfoKHR; -typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); -typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHR( +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR( VkDevice device, - uint32_t bindInfoCount, - const VkBindBufferMemoryInfo* pBindInfos); + const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); -VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR( +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR( VkDevice device, - uint32_t bindInfoCount, - const VkBindImageMemoryInfo* pBindInfos); + const VkSemaphoreGetFdInfoKHR* pGetFdInfo, + int* pFd); #endif -#define VK_KHR_maintenance3 1 -#define VK_KHR_MAINTENANCE_3_SPEC_VERSION 1 -#define VK_KHR_MAINTENANCE_3_EXTENSION_NAME "VK_KHR_maintenance3" -#define VK_KHR_MAINTENANCE3_SPEC_VERSION VK_KHR_MAINTENANCE_3_SPEC_VERSION -#define VK_KHR_MAINTENANCE3_EXTENSION_NAME VK_KHR_MAINTENANCE_3_EXTENSION_NAME -typedef VkPhysicalDeviceMaintenance3Properties VkPhysicalDeviceMaintenance3PropertiesKHR; - -typedef VkDescriptorSetLayoutSupport VkDescriptorSetLayoutSupportKHR; - -typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupportKHR)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupportKHR( - VkDevice device, - const VkDescriptorSetLayoutCreateInfo* pCreateInfo, - VkDescriptorSetLayoutSupport* pSupport); -#endif - +// VK_KHR_push_descriptor is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_push_descriptor 1 +#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 2 +#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor" +typedef VkPhysicalDevicePushDescriptorProperties VkPhysicalDevicePushDescriptorPropertiesKHR; -#define VK_KHR_draw_indirect_count 1 -#define VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION 1 -#define VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_KHR_draw_indirect_count" -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountKHR( +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride); + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites); -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountKHR( +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride); + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void* pData); #endif -#define VK_KHR_shader_subgroup_extended_types 1 -#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION 1 -#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME "VK_KHR_shader_subgroup_extended_types" -typedef VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR; +// VK_KHR_shader_float16_int8 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_float16_int8 1 +#define VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION 1 +#define VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME "VK_KHR_shader_float16_int8" +typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceShaderFloat16Int8FeaturesKHR; +typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceFloat16Int8FeaturesKHR; -#define VK_KHR_8bit_storage 1 -#define VK_KHR_8BIT_STORAGE_SPEC_VERSION 1 -#define VK_KHR_8BIT_STORAGE_EXTENSION_NAME "VK_KHR_8bit_storage" -typedef VkPhysicalDevice8BitStorageFeatures VkPhysicalDevice8BitStorageFeaturesKHR; +// VK_KHR_16bit_storage is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_16bit_storage 1 +#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1 +#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage" +typedef VkPhysicalDevice16BitStorageFeatures VkPhysicalDevice16BitStorageFeaturesKHR; -#define VK_KHR_shader_atomic_int64 1 -#define VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION 1 -#define VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME "VK_KHR_shader_atomic_int64" -typedef VkPhysicalDeviceShaderAtomicInt64Features VkPhysicalDeviceShaderAtomicInt64FeaturesKHR; +// VK_KHR_incremental_present is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_incremental_present 1 +#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 2 +#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present" +typedef struct VkRectLayerKHR { + VkOffset2D offset; + VkExtent2D extent; + uint32_t layer; +} VkRectLayerKHR; +typedef struct VkPresentRegionKHR { + uint32_t rectangleCount; + const VkRectLayerKHR* pRectangles; +} VkPresentRegionKHR; -#define VK_KHR_shader_clock 1 -#define VK_KHR_SHADER_CLOCK_SPEC_VERSION 1 -#define VK_KHR_SHADER_CLOCK_EXTENSION_NAME "VK_KHR_shader_clock" -typedef struct VkPhysicalDeviceShaderClockFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 shaderSubgroupClock; - VkBool32 shaderDeviceClock; -} VkPhysicalDeviceShaderClockFeaturesKHR; +typedef struct VkPresentRegionsKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentRegionKHR* pRegions; +} VkPresentRegionsKHR; -#define VK_KHR_global_priority 1 -#define VK_MAX_GLOBAL_PRIORITY_SIZE_KHR 16U -#define VK_KHR_GLOBAL_PRIORITY_SPEC_VERSION 1 -#define VK_KHR_GLOBAL_PRIORITY_EXTENSION_NAME "VK_KHR_global_priority" +// VK_KHR_descriptor_update_template is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_descriptor_update_template 1 +typedef VkDescriptorUpdateTemplate VkDescriptorUpdateTemplateKHR; -typedef enum VkQueueGlobalPriorityKHR { - VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR = 128, - VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR = 256, - VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR = 512, - VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR = 1024, - VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR, - VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR, - VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR, - VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR, - VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM_KHR = 0x7FFFFFFF -} VkQueueGlobalPriorityKHR; -typedef struct VkDeviceQueueGlobalPriorityCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkQueueGlobalPriorityKHR globalPriority; -} VkDeviceQueueGlobalPriorityCreateInfoKHR; +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1 +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template" +typedef VkDescriptorUpdateTemplateType VkDescriptorUpdateTemplateTypeKHR; -typedef struct VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 globalPriorityQuery; -} VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR; +typedef VkDescriptorUpdateTemplateCreateFlags VkDescriptorUpdateTemplateCreateFlagsKHR; -typedef struct VkQueueFamilyGlobalPriorityPropertiesKHR { - VkStructureType sType; - void* pNext; - uint32_t priorityCount; - VkQueueGlobalPriorityKHR priorities[VK_MAX_GLOBAL_PRIORITY_SIZE_KHR]; -} VkQueueFamilyGlobalPriorityPropertiesKHR; +typedef VkDescriptorUpdateTemplateEntry VkDescriptorUpdateTemplateEntryKHR; +typedef VkDescriptorUpdateTemplateCreateInfo VkDescriptorUpdateTemplateCreateInfoKHR; +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); -#define VK_KHR_driver_properties 1 -#define VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION 1 -#define VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME "VK_KHR_driver_properties" -#define VK_MAX_DRIVER_NAME_SIZE_KHR VK_MAX_DRIVER_NAME_SIZE -#define VK_MAX_DRIVER_INFO_SIZE_KHR VK_MAX_DRIVER_INFO_SIZE -typedef VkDriverId VkDriverIdKHR; - -typedef VkConformanceVersion VkConformanceVersionKHR; - -typedef VkPhysicalDeviceDriverProperties VkPhysicalDeviceDriverPropertiesKHR; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR( + VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR( + VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator); +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR( + VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void* pData); +#endif -#define VK_KHR_shader_float_controls 1 -#define VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION 4 -#define VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME "VK_KHR_shader_float_controls" -typedef VkShaderFloatControlsIndependence VkShaderFloatControlsIndependenceKHR; -typedef VkPhysicalDeviceFloatControlsProperties VkPhysicalDeviceFloatControlsPropertiesKHR; +// VK_KHR_imageless_framebuffer is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_imageless_framebuffer 1 +#define VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION 1 +#define VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME "VK_KHR_imageless_framebuffer" +typedef VkPhysicalDeviceImagelessFramebufferFeatures VkPhysicalDeviceImagelessFramebufferFeaturesKHR; +typedef VkFramebufferAttachmentsCreateInfo VkFramebufferAttachmentsCreateInfoKHR; +typedef VkFramebufferAttachmentImageInfo VkFramebufferAttachmentImageInfoKHR; -#define VK_KHR_depth_stencil_resolve 1 -#define VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION 1 -#define VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME "VK_KHR_depth_stencil_resolve" -typedef VkResolveModeFlagBits VkResolveModeFlagBitsKHR; +typedef VkRenderPassAttachmentBeginInfo VkRenderPassAttachmentBeginInfoKHR; -typedef VkResolveModeFlags VkResolveModeFlagsKHR; -typedef VkSubpassDescriptionDepthStencilResolve VkSubpassDescriptionDepthStencilResolveKHR; -typedef VkPhysicalDeviceDepthStencilResolveProperties VkPhysicalDeviceDepthStencilResolvePropertiesKHR; +// VK_KHR_create_renderpass2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_create_renderpass2 1 +#define VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION 1 +#define VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME "VK_KHR_create_renderpass2" +typedef VkRenderPassCreateInfo2 VkRenderPassCreateInfo2KHR; +typedef VkAttachmentDescription2 VkAttachmentDescription2KHR; +typedef VkAttachmentReference2 VkAttachmentReference2KHR; -#define VK_KHR_swapchain_mutable_format 1 -#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION 1 -#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME "VK_KHR_swapchain_mutable_format" +typedef VkSubpassDescription2 VkSubpassDescription2KHR; +typedef VkSubpassDependency2 VkSubpassDependency2KHR; -#define VK_KHR_timeline_semaphore 1 -#define VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION 2 -#define VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME "VK_KHR_timeline_semaphore" -typedef VkSemaphoreType VkSemaphoreTypeKHR; +typedef VkSubpassBeginInfo VkSubpassBeginInfoKHR; -typedef VkSemaphoreWaitFlagBits VkSemaphoreWaitFlagBitsKHR; +typedef VkSubpassEndInfo VkSubpassEndInfoKHR; -typedef VkSemaphoreWaitFlags VkSemaphoreWaitFlagsKHR; +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2KHR)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo); -typedef VkPhysicalDeviceTimelineSemaphoreFeatures VkPhysicalDeviceTimelineSemaphoreFeaturesKHR; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2KHR( + VkDevice device, + const VkRenderPassCreateInfo2* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); -typedef VkPhysicalDeviceTimelineSemaphoreProperties VkPhysicalDeviceTimelineSemaphorePropertiesKHR; +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2KHR( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + const VkSubpassBeginInfo* pSubpassBeginInfo); -typedef VkSemaphoreTypeCreateInfo VkSemaphoreTypeCreateInfoKHR; +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2KHR( + VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo* pSubpassBeginInfo, + const VkSubpassEndInfo* pSubpassEndInfo); -typedef VkTimelineSemaphoreSubmitInfo VkTimelineSemaphoreSubmitInfoKHR; +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2KHR( + VkCommandBuffer commandBuffer, + const VkSubpassEndInfo* pSubpassEndInfo); +#endif -typedef VkSemaphoreWaitInfo VkSemaphoreWaitInfoKHR; -typedef VkSemaphoreSignalInfo VkSemaphoreSignalInfoKHR; +// VK_KHR_shared_presentable_image is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shared_presentable_image 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image" +typedef struct VkSharedPresentSurfaceCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkImageUsageFlags sharedPresentSupportedUsageFlags; +} VkSharedPresentSurfaceCapabilitiesKHR; -typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValueKHR)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue); -typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphoresKHR)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout); -typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphoreKHR)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValueKHR( +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR( VkDevice device, - VkSemaphore semaphore, - uint64_t* pValue); + VkSwapchainKHR swapchain); +#endif -VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphoresKHR( - VkDevice device, - const VkSemaphoreWaitInfo* pWaitInfo, - uint64_t timeout); -VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphoreKHR( - VkDevice device, - const VkSemaphoreSignalInfo* pSignalInfo); -#endif +// VK_KHR_external_fence_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_fence_capabilities 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities" +typedef VkExternalFenceHandleTypeFlags VkExternalFenceHandleTypeFlagsKHR; +typedef VkExternalFenceHandleTypeFlagBits VkExternalFenceHandleTypeFlagBitsKHR; -#define VK_KHR_vulkan_memory_model 1 -#define VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION 3 -#define VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME "VK_KHR_vulkan_memory_model" -typedef VkPhysicalDeviceVulkanMemoryModelFeatures VkPhysicalDeviceVulkanMemoryModelFeaturesKHR; +typedef VkExternalFenceFeatureFlags VkExternalFenceFeatureFlagsKHR; +typedef VkExternalFenceFeatureFlagBits VkExternalFenceFeatureFlagBitsKHR; +typedef VkPhysicalDeviceExternalFenceInfo VkPhysicalDeviceExternalFenceInfoKHR; -#define VK_KHR_shader_terminate_invocation 1 -#define VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION 1 -#define VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME "VK_KHR_shader_terminate_invocation" -typedef VkPhysicalDeviceShaderTerminateInvocationFeatures VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR; +typedef VkExternalFenceProperties VkExternalFencePropertiesKHR; +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, + VkExternalFenceProperties* pExternalFenceProperties); +#endif -#define VK_KHR_fragment_shading_rate 1 -#define VK_KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION 2 -#define VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME "VK_KHR_fragment_shading_rate" -typedef enum VkFragmentShadingRateCombinerOpKHR { - VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR = 0, - VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR = 1, - VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR = 2, - VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR = 3, - VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR = 4, - VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_ENUM_KHR = 0x7FFFFFFF -} VkFragmentShadingRateCombinerOpKHR; -typedef struct VkFragmentShadingRateAttachmentInfoKHR { - VkStructureType sType; - const void* pNext; - const VkAttachmentReference2* pFragmentShadingRateAttachment; - VkExtent2D shadingRateAttachmentTexelSize; -} VkFragmentShadingRateAttachmentInfoKHR; +// VK_KHR_external_fence is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_fence 1 +#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence" +typedef VkFenceImportFlags VkFenceImportFlagsKHR; -typedef struct VkPipelineFragmentShadingRateStateCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkExtent2D fragmentSize; - VkFragmentShadingRateCombinerOpKHR combinerOps[2]; -} VkPipelineFragmentShadingRateStateCreateInfoKHR; +typedef VkFenceImportFlagBits VkFenceImportFlagBitsKHR; -typedef struct VkPhysicalDeviceFragmentShadingRateFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 pipelineFragmentShadingRate; - VkBool32 primitiveFragmentShadingRate; - VkBool32 attachmentFragmentShadingRate; -} VkPhysicalDeviceFragmentShadingRateFeaturesKHR; +typedef VkExportFenceCreateInfo VkExportFenceCreateInfoKHR; -typedef struct VkPhysicalDeviceFragmentShadingRatePropertiesKHR { - VkStructureType sType; - void* pNext; - VkExtent2D minFragmentShadingRateAttachmentTexelSize; - VkExtent2D maxFragmentShadingRateAttachmentTexelSize; - uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio; - VkBool32 primitiveFragmentShadingRateWithMultipleViewports; - VkBool32 layeredShadingRateAttachments; - VkBool32 fragmentShadingRateNonTrivialCombinerOps; - VkExtent2D maxFragmentSize; - uint32_t maxFragmentSizeAspectRatio; - uint32_t maxFragmentShadingRateCoverageSamples; - VkSampleCountFlagBits maxFragmentShadingRateRasterizationSamples; - VkBool32 fragmentShadingRateWithShaderDepthStencilWrites; - VkBool32 fragmentShadingRateWithSampleMask; - VkBool32 fragmentShadingRateWithShaderSampleMask; - VkBool32 fragmentShadingRateWithConservativeRasterization; - VkBool32 fragmentShadingRateWithFragmentShaderInterlock; - VkBool32 fragmentShadingRateWithCustomSampleLocations; - VkBool32 fragmentShadingRateStrictMultiplyCombiner; -} VkPhysicalDeviceFragmentShadingRatePropertiesKHR; -typedef struct VkPhysicalDeviceFragmentShadingRateKHR { - VkStructureType sType; - void* pNext; - VkSampleCountFlags sampleCounts; - VkExtent2D fragmentSize; -} VkPhysicalDeviceFragmentShadingRateKHR; -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); -typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateKHR)(VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); +// VK_KHR_external_fence_fd is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_fence_fd 1 +#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd" +typedef struct VkImportFenceFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkFenceImportFlags flags; + VkExternalFenceHandleTypeFlagBits handleType; + int fd; +} VkImportFenceFdInfoKHR; + +typedef struct VkFenceGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkExternalFenceHandleTypeFlagBits handleType; +} VkFenceGetFdInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceFragmentShadingRatesKHR( - VkPhysicalDevice physicalDevice, - uint32_t* pFragmentShadingRateCount, - VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); +VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR( + VkDevice device, + const VkImportFenceFdInfoKHR* pImportFenceFdInfo); -VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateKHR( - VkCommandBuffer commandBuffer, - const VkExtent2D* pFragmentSize, - const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR( + VkDevice device, + const VkFenceGetFdInfoKHR* pGetFdInfo, + int* pFd); #endif -#define VK_KHR_spirv_1_4 1 -#define VK_KHR_SPIRV_1_4_SPEC_VERSION 1 -#define VK_KHR_SPIRV_1_4_EXTENSION_NAME "VK_KHR_spirv_1_4" - +// VK_KHR_performance_query is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_performance_query 1 +#define VK_KHR_PERFORMANCE_QUERY_SPEC_VERSION 1 +#define VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME "VK_KHR_performance_query" -#define VK_KHR_surface_protected_capabilities 1 -#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION 1 -#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME "VK_KHR_surface_protected_capabilities" -typedef struct VkSurfaceProtectedCapabilitiesKHR { - VkStructureType sType; - const void* pNext; - VkBool32 supportsProtected; -} VkSurfaceProtectedCapabilitiesKHR; - - - -#define VK_KHR_separate_depth_stencil_layouts 1 -#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION 1 -#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME "VK_KHR_separate_depth_stencil_layouts" -typedef VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR; - -typedef VkAttachmentReferenceStencilLayout VkAttachmentReferenceStencilLayoutKHR; +typedef enum VkPerformanceCounterUnitKHR { + VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR = 0, + VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR = 1, + VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR = 2, + VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR = 3, + VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR = 4, + VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR = 5, + VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR = 6, + VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR = 7, + VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR = 8, + VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR = 9, + VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR = 10, + VK_PERFORMANCE_COUNTER_UNIT_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterUnitKHR; -typedef VkAttachmentDescriptionStencilLayout VkAttachmentDescriptionStencilLayoutKHR; +typedef enum VkPerformanceCounterScopeKHR { + VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR = 0, + VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR = 1, + VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR = 2, + // VK_QUERY_SCOPE_COMMAND_BUFFER_KHR is a deprecated alias + VK_QUERY_SCOPE_COMMAND_BUFFER_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, + // VK_QUERY_SCOPE_RENDER_PASS_KHR is a deprecated alias + VK_QUERY_SCOPE_RENDER_PASS_KHR = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, + // VK_QUERY_SCOPE_COMMAND_KHR is a deprecated alias + VK_QUERY_SCOPE_COMMAND_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, + VK_PERFORMANCE_COUNTER_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterScopeKHR; +typedef enum VkPerformanceCounterStorageKHR { + VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR = 0, + VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR = 1, + VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR = 2, + VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR = 3, + VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR = 4, + VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR = 5, + VK_PERFORMANCE_COUNTER_STORAGE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterStorageKHR; +typedef enum VkPerformanceCounterDescriptionFlagBitsKHR { + VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR = 0x00000001, + VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR = 0x00000002, + // VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR is a deprecated alias + VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR, + // VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR is a deprecated alias + VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR, + VK_PERFORMANCE_COUNTER_DESCRIPTION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterDescriptionFlagBitsKHR; +typedef VkFlags VkPerformanceCounterDescriptionFlagsKHR; -#define VK_KHR_present_wait 1 -#define VK_KHR_PRESENT_WAIT_SPEC_VERSION 1 -#define VK_KHR_PRESENT_WAIT_EXTENSION_NAME "VK_KHR_present_wait" -typedef struct VkPhysicalDevicePresentWaitFeaturesKHR { +typedef enum VkAcquireProfilingLockFlagBitsKHR { + VK_ACQUIRE_PROFILING_LOCK_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAcquireProfilingLockFlagBitsKHR; +typedef VkFlags VkAcquireProfilingLockFlagsKHR; +typedef struct VkPhysicalDevicePerformanceQueryFeaturesKHR { VkStructureType sType; void* pNext; - VkBool32 presentWait; -} VkPhysicalDevicePresentWaitFeaturesKHR; - -typedef VkResult (VKAPI_PTR *PFN_vkWaitForPresentKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkWaitForPresentKHR( - VkDevice device, - VkSwapchainKHR swapchain, - uint64_t presentId, - uint64_t timeout); -#endif - - -#define VK_KHR_uniform_buffer_standard_layout 1 -#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION 1 -#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME "VK_KHR_uniform_buffer_standard_layout" -typedef VkPhysicalDeviceUniformBufferStandardLayoutFeatures VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR; + VkBool32 performanceCounterQueryPools; + VkBool32 performanceCounterMultipleQueryPools; +} VkPhysicalDevicePerformanceQueryFeaturesKHR; +typedef struct VkPhysicalDevicePerformanceQueryPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 allowCommandBufferQueryCopies; +} VkPhysicalDevicePerformanceQueryPropertiesKHR; +typedef struct VkPerformanceCounterKHR { + VkStructureType sType; + void* pNext; + VkPerformanceCounterUnitKHR unit; + VkPerformanceCounterScopeKHR scope; + VkPerformanceCounterStorageKHR storage; + uint8_t uuid[VK_UUID_SIZE]; +} VkPerformanceCounterKHR; -#define VK_KHR_buffer_device_address 1 -#define VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 1 -#define VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_KHR_buffer_device_address" -typedef VkPhysicalDeviceBufferDeviceAddressFeatures VkPhysicalDeviceBufferDeviceAddressFeaturesKHR; +typedef struct VkPerformanceCounterDescriptionKHR { + VkStructureType sType; + void* pNext; + VkPerformanceCounterDescriptionFlagsKHR flags; + char name[VK_MAX_DESCRIPTION_SIZE]; + char category[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; +} VkPerformanceCounterDescriptionKHR; -typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoKHR; +typedef struct VkQueryPoolPerformanceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t queueFamilyIndex; + uint32_t counterIndexCount; + const uint32_t* pCounterIndices; +} VkQueryPoolPerformanceCreateInfoKHR; -typedef VkBufferOpaqueCaptureAddressCreateInfo VkBufferOpaqueCaptureAddressCreateInfoKHR; +typedef union VkPerformanceCounterResultKHR { + int32_t int32; + int64_t int64; + uint32_t uint32; + uint64_t uint64; + float float32; + double float64; +} VkPerformanceCounterResultKHR; -typedef VkMemoryOpaqueCaptureAddressAllocateInfo VkMemoryOpaqueCaptureAddressAllocateInfoKHR; +typedef struct VkAcquireProfilingLockInfoKHR { + VkStructureType sType; + const void* pNext; + VkAcquireProfilingLockFlagsKHR flags; + uint64_t timeout; +} VkAcquireProfilingLockInfoKHR; -typedef VkDeviceMemoryOpaqueCaptureAddressInfo VkDeviceMemoryOpaqueCaptureAddressInfoKHR; +typedef struct VkPerformanceQuerySubmitInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t counterPassIndex; +} VkPerformanceQuerySubmitInfoKHR; -typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); -typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); -typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR)(VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireProfilingLockKHR)(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkReleaseProfilingLockKHR)(VkDevice device); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressKHR( - VkDevice device, - const VkBufferDeviceAddressInfo* pInfo); +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + uint32_t* pCounterCount, + VkPerformanceCounterKHR* pCounters, + VkPerformanceCounterDescriptionKHR* pCounterDescriptions); -VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddressKHR( - VkDevice device, - const VkBufferDeviceAddressInfo* pInfo); +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + VkPhysicalDevice physicalDevice, + const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, + uint32_t* pNumPasses); -VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddressKHR( +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireProfilingLockKHR( VkDevice device, - const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); + const VkAcquireProfilingLockInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkReleaseProfilingLockKHR( + VkDevice device); #endif -#define VK_KHR_deferred_host_operations 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeferredOperationKHR) -#define VK_KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION 4 -#define VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME "VK_KHR_deferred_host_operations" -typedef VkResult (VKAPI_PTR *PFN_vkCreateDeferredOperationKHR)(VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation); -typedef void (VKAPI_PTR *PFN_vkDestroyDeferredOperationKHR)(VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator); -typedef uint32_t (VKAPI_PTR *PFN_vkGetDeferredOperationMaxConcurrencyKHR)(VkDevice device, VkDeferredOperationKHR operation); -typedef VkResult (VKAPI_PTR *PFN_vkGetDeferredOperationResultKHR)(VkDevice device, VkDeferredOperationKHR operation); -typedef VkResult (VKAPI_PTR *PFN_vkDeferredOperationJoinKHR)(VkDevice device, VkDeferredOperationKHR operation); +// VK_KHR_maintenance2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance2 1 +#define VK_KHR_MAINTENANCE_2_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_2_EXTENSION_NAME "VK_KHR_maintenance2" +// VK_KHR_MAINTENANCE2_SPEC_VERSION is a deprecated alias +#define VK_KHR_MAINTENANCE2_SPEC_VERSION VK_KHR_MAINTENANCE_2_SPEC_VERSION +// VK_KHR_MAINTENANCE2_EXTENSION_NAME is a deprecated alias +#define VK_KHR_MAINTENANCE2_EXTENSION_NAME VK_KHR_MAINTENANCE_2_EXTENSION_NAME +typedef VkPointClippingBehavior VkPointClippingBehaviorKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDeferredOperationKHR( - VkDevice device, - const VkAllocationCallbacks* pAllocator, - VkDeferredOperationKHR* pDeferredOperation); +typedef VkTessellationDomainOrigin VkTessellationDomainOriginKHR; -VKAPI_ATTR void VKAPI_CALL vkDestroyDeferredOperationKHR( - VkDevice device, - VkDeferredOperationKHR operation, - const VkAllocationCallbacks* pAllocator); +typedef VkPhysicalDevicePointClippingProperties VkPhysicalDevicePointClippingPropertiesKHR; -VKAPI_ATTR uint32_t VKAPI_CALL vkGetDeferredOperationMaxConcurrencyKHR( - VkDevice device, - VkDeferredOperationKHR operation); +typedef VkRenderPassInputAttachmentAspectCreateInfo VkRenderPassInputAttachmentAspectCreateInfoKHR; -VKAPI_ATTR VkResult VKAPI_CALL vkGetDeferredOperationResultKHR( - VkDevice device, - VkDeferredOperationKHR operation); +typedef VkInputAttachmentAspectReference VkInputAttachmentAspectReferenceKHR; -VKAPI_ATTR VkResult VKAPI_CALL vkDeferredOperationJoinKHR( - VkDevice device, - VkDeferredOperationKHR operation); -#endif +typedef VkImageViewUsageCreateInfo VkImageViewUsageCreateInfoKHR; +typedef VkPipelineTessellationDomainOriginStateCreateInfo VkPipelineTessellationDomainOriginStateCreateInfoKHR; -#define VK_KHR_pipeline_executable_properties 1 -#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION 1 -#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME "VK_KHR_pipeline_executable_properties" -typedef enum VkPipelineExecutableStatisticFormatKHR { - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR = 0, - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR = 1, - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR = 2, - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR = 3, - VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_MAX_ENUM_KHR = 0x7FFFFFFF -} VkPipelineExecutableStatisticFormatKHR; -typedef struct VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 pipelineExecutableInfo; -} VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR; -typedef struct VkPipelineInfoKHR { +// VK_KHR_get_surface_capabilities2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_get_surface_capabilities2 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2" +typedef struct VkPhysicalDeviceSurfaceInfo2KHR { VkStructureType sType; const void* pNext; - VkPipeline pipeline; -} VkPipelineInfoKHR; + VkSurfaceKHR surface; +} VkPhysicalDeviceSurfaceInfo2KHR; -typedef struct VkPipelineExecutablePropertiesKHR { +typedef struct VkSurfaceCapabilities2KHR { + VkStructureType sType; + void* pNext; + VkSurfaceCapabilitiesKHR surfaceCapabilities; +} VkSurfaceCapabilities2KHR; + +typedef struct VkSurfaceFormat2KHR { VkStructureType sType; void* pNext; - VkShaderStageFlags stages; - char name[VK_MAX_DESCRIPTION_SIZE]; - char description[VK_MAX_DESCRIPTION_SIZE]; - uint32_t subgroupSize; -} VkPipelineExecutablePropertiesKHR; + VkSurfaceFormatKHR surfaceFormat; +} VkSurfaceFormat2KHR; -typedef struct VkPipelineExecutableInfoKHR { - VkStructureType sType; - const void* pNext; - VkPipeline pipeline; - uint32_t executableIndex; -} VkPipelineExecutableInfoKHR; +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats); -typedef union VkPipelineExecutableStatisticValueKHR { - VkBool32 b32; - int64_t i64; - uint64_t u64; - double f64; -} VkPipelineExecutableStatisticValueKHR; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + VkSurfaceCapabilities2KHR* pSurfaceCapabilities); -typedef struct VkPipelineExecutableStatisticKHR { - VkStructureType sType; - void* pNext; - char name[VK_MAX_DESCRIPTION_SIZE]; - char description[VK_MAX_DESCRIPTION_SIZE]; - VkPipelineExecutableStatisticFormatKHR format; - VkPipelineExecutableStatisticValueKHR value; -} VkPipelineExecutableStatisticKHR; +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormat2KHR* pSurfaceFormats); +#endif -typedef struct VkPipelineExecutableInternalRepresentationKHR { - VkStructureType sType; - void* pNext; - char name[VK_MAX_DESCRIPTION_SIZE]; - char description[VK_MAX_DESCRIPTION_SIZE]; - VkBool32 isText; - size_t dataSize; - void* pData; -} VkPipelineExecutableInternalRepresentationKHR; -typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutablePropertiesKHR)(VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableStatisticsKHR)(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics); -typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableInternalRepresentationsKHR)(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); +// VK_KHR_variable_pointers is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_variable_pointers 1 +#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1 +#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers" +typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeaturesKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutablePropertiesKHR( - VkDevice device, - const VkPipelineInfoKHR* pPipelineInfo, - uint32_t* pExecutableCount, - VkPipelineExecutablePropertiesKHR* pProperties); +typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointersFeaturesKHR; -VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableStatisticsKHR( - VkDevice device, - const VkPipelineExecutableInfoKHR* pExecutableInfo, - uint32_t* pStatisticCount, - VkPipelineExecutableStatisticKHR* pStatistics); -VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableInternalRepresentationsKHR( - VkDevice device, - const VkPipelineExecutableInfoKHR* pExecutableInfo, - uint32_t* pInternalRepresentationCount, - VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); -#endif +// VK_KHR_get_display_properties2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_get_display_properties2 1 +#define VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_display_properties2" +typedef struct VkDisplayProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPropertiesKHR displayProperties; +} VkDisplayProperties2KHR; -#define VK_KHR_shader_integer_dot_product 1 -#define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION 1 -#define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME "VK_KHR_shader_integer_dot_product" -typedef VkPhysicalDeviceShaderIntegerDotProductFeatures VkPhysicalDeviceShaderIntegerDotProductFeaturesKHR; +typedef struct VkDisplayPlaneProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPlanePropertiesKHR displayPlaneProperties; +} VkDisplayPlaneProperties2KHR; -typedef VkPhysicalDeviceShaderIntegerDotProductProperties VkPhysicalDeviceShaderIntegerDotProductPropertiesKHR; +typedef struct VkDisplayModeProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayModePropertiesKHR displayModeProperties; +} VkDisplayModeProperties2KHR; +typedef struct VkDisplayPlaneInfo2KHR { + VkStructureType sType; + const void* pNext; + VkDisplayModeKHR mode; + uint32_t planeIndex; +} VkDisplayPlaneInfo2KHR; +typedef struct VkDisplayPlaneCapabilities2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPlaneCapabilitiesKHR capabilities; +} VkDisplayPlaneCapabilities2KHR; -#define VK_KHR_pipeline_library 1 -#define VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION 1 -#define VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME "VK_KHR_pipeline_library" -typedef struct VkPipelineLibraryCreateInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t libraryCount; - const VkPipeline* pLibraries; -} VkPipelineLibraryCreateInfoKHR; +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModeProperties2KHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayProperties2KHR* pProperties); +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlaneProperties2KHR* pProperties); -#define VK_KHR_shader_non_semantic_info 1 -#define VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION 1 -#define VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME "VK_KHR_shader_non_semantic_info" +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModeProperties2KHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModeProperties2KHR* pProperties); +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilities2KHR( + VkPhysicalDevice physicalDevice, + const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, + VkDisplayPlaneCapabilities2KHR* pCapabilities); +#endif -#define VK_KHR_present_id 1 -#define VK_KHR_PRESENT_ID_SPEC_VERSION 1 -#define VK_KHR_PRESENT_ID_EXTENSION_NAME "VK_KHR_present_id" -typedef struct VkPresentIdKHR { - VkStructureType sType; - const void* pNext; - uint32_t swapchainCount; - const uint64_t* pPresentIds; -} VkPresentIdKHR; -typedef struct VkPhysicalDevicePresentIdFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 presentId; -} VkPhysicalDevicePresentIdFeaturesKHR; +// VK_KHR_dedicated_allocation is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_dedicated_allocation 1 +#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 3 +#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation" +typedef VkMemoryDedicatedRequirements VkMemoryDedicatedRequirementsKHR; +typedef VkMemoryDedicatedAllocateInfo VkMemoryDedicatedAllocateInfoKHR; -#define VK_KHR_synchronization2 1 -#define VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION 1 -#define VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME "VK_KHR_synchronization2" -typedef VkPipelineStageFlags2 VkPipelineStageFlags2KHR; -typedef VkPipelineStageFlagBits2 VkPipelineStageFlagBits2KHR; +// VK_KHR_storage_buffer_storage_class is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_storage_buffer_storage_class 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class" -typedef VkAccessFlags2 VkAccessFlags2KHR; -typedef VkAccessFlagBits2 VkAccessFlagBits2KHR; +// VK_KHR_relaxed_block_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_relaxed_block_layout 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout" -typedef VkSubmitFlagBits VkSubmitFlagBitsKHR; -typedef VkSubmitFlags VkSubmitFlagsKHR; +// VK_KHR_get_memory_requirements2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_get_memory_requirements2 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2" +typedef VkBufferMemoryRequirementsInfo2 VkBufferMemoryRequirementsInfo2KHR; -typedef VkMemoryBarrier2 VkMemoryBarrier2KHR; +typedef VkImageMemoryRequirementsInfo2 VkImageMemoryRequirementsInfo2KHR; -typedef VkBufferMemoryBarrier2 VkBufferMemoryBarrier2KHR; +typedef VkImageSparseMemoryRequirementsInfo2 VkImageSparseMemoryRequirementsInfo2KHR; -typedef VkImageMemoryBarrier2 VkImageMemoryBarrier2KHR; +typedef VkMemoryRequirements2 VkMemoryRequirements2KHR; -typedef VkDependencyInfo VkDependencyInfoKHR; +typedef VkSparseImageMemoryRequirements2 VkSparseImageMemoryRequirements2KHR; -typedef VkSubmitInfo2 VkSubmitInfo2KHR; +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); -typedef VkSemaphoreSubmitInfo VkSemaphoreSubmitInfoKHR; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR( + VkDevice device, + const VkImageMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); -typedef VkCommandBufferSubmitInfo VkCommandBufferSubmitInfoKHR; +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR( + VkDevice device, + const VkBufferMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); -typedef VkPhysicalDeviceSynchronization2Features VkPhysicalDeviceSynchronization2FeaturesKHR; +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR( + VkDevice device, + const VkImageSparseMemoryRequirementsInfo2* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#endif -typedef struct VkQueueFamilyCheckpointProperties2NV { - VkStructureType sType; - void* pNext; - VkPipelineStageFlags2 checkpointExecutionStageMask; -} VkQueueFamilyCheckpointProperties2NV; -typedef struct VkCheckpointData2NV { - VkStructureType sType; - void* pNext; - VkPipelineStageFlags2 stage; - void* pCheckpointMarker; -} VkCheckpointData2NV; +// VK_KHR_image_format_list is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_image_format_list 1 +#define VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION 1 +#define VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME "VK_KHR_image_format_list" +typedef VkImageFormatListCreateInfo VkImageFormatListCreateInfoKHR; -typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo* pDependencyInfo); -typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask); -typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2KHR)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfo* pDependencyInfos); -typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2KHR)(VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo); -typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2KHR)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query); -typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2KHR)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); -typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); -typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2KHR( - VkCommandBuffer commandBuffer, - VkEvent event, - const VkDependencyInfo* pDependencyInfo); -VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2KHR( - VkCommandBuffer commandBuffer, - VkEvent event, - VkPipelineStageFlags2 stageMask); +// VK_KHR_sampler_ycbcr_conversion is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_sampler_ycbcr_conversion 1 +typedef VkSamplerYcbcrConversion VkSamplerYcbcrConversionKHR; -VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2KHR( - VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent* pEvents, - const VkDependencyInfo* pDependencyInfos); +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION 14 +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME "VK_KHR_sampler_ycbcr_conversion" +typedef VkSamplerYcbcrModelConversion VkSamplerYcbcrModelConversionKHR; -VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2KHR( - VkCommandBuffer commandBuffer, - const VkDependencyInfo* pDependencyInfo); +typedef VkSamplerYcbcrRange VkSamplerYcbcrRangeKHR; -VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2KHR( - VkCommandBuffer commandBuffer, - VkPipelineStageFlags2 stage, - VkQueryPool queryPool, - uint32_t query); +typedef VkChromaLocation VkChromaLocationKHR; -VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2KHR( - VkQueue queue, - uint32_t submitCount, - const VkSubmitInfo2* pSubmits, - VkFence fence); +typedef VkSamplerYcbcrConversionCreateInfo VkSamplerYcbcrConversionCreateInfoKHR; -VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD( - VkCommandBuffer commandBuffer, - VkPipelineStageFlags2 stage, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - uint32_t marker); - -VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV( - VkQueue queue, - uint32_t* pCheckpointDataCount, - VkCheckpointData2NV* pCheckpointData); -#endif - - -#define VK_KHR_fragment_shader_barycentric 1 -#define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1 -#define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_KHR_fragment_shader_barycentric" -typedef struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 fragmentShaderBarycentric; -} VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR; - -typedef struct VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR { - VkStructureType sType; - void* pNext; - VkBool32 triStripVertexOrderIndependentOfProvokingVertex; -} VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR; - - - -#define VK_KHR_shader_subgroup_uniform_control_flow 1 -#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION 1 -#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_EXTENSION_NAME "VK_KHR_shader_subgroup_uniform_control_flow" -typedef struct VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 shaderSubgroupUniformControlFlow; -} VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; +typedef VkSamplerYcbcrConversionInfo VkSamplerYcbcrConversionInfoKHR; +typedef VkBindImagePlaneMemoryInfo VkBindImagePlaneMemoryInfoKHR; +typedef VkImagePlaneMemoryRequirementsInfo VkImagePlaneMemoryRequirementsInfoKHR; -#define VK_KHR_zero_initialize_workgroup_memory 1 -#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION 1 -#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME "VK_KHR_zero_initialize_workgroup_memory" -typedef VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR; +typedef VkPhysicalDeviceSamplerYcbcrConversionFeatures VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR; +typedef VkSamplerYcbcrConversionImageFormatProperties VkSamplerYcbcrConversionImageFormatPropertiesKHR; +typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversionKHR)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); +typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversionKHR)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); -#define VK_KHR_workgroup_memory_explicit_layout 1 -#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION 1 -#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME "VK_KHR_workgroup_memory_explicit_layout" -typedef struct VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 workgroupMemoryExplicitLayout; - VkBool32 workgroupMemoryExplicitLayoutScalarBlockLayout; - VkBool32 workgroupMemoryExplicitLayout8BitAccess; - VkBool32 workgroupMemoryExplicitLayout16BitAccess; -} VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversionKHR( + VkDevice device, + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversion* pYcbcrConversion); +VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversionKHR( + VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks* pAllocator); +#endif -#define VK_KHR_copy_commands2 1 -#define VK_KHR_COPY_COMMANDS_2_SPEC_VERSION 1 -#define VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME "VK_KHR_copy_commands2" -typedef VkCopyBufferInfo2 VkCopyBufferInfo2KHR; +// VK_KHR_bind_memory2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_bind_memory2 1 +#define VK_KHR_BIND_MEMORY_2_SPEC_VERSION 1 +#define VK_KHR_BIND_MEMORY_2_EXTENSION_NAME "VK_KHR_bind_memory2" +typedef VkBindBufferMemoryInfo VkBindBufferMemoryInfoKHR; -typedef VkCopyImageInfo2 VkCopyImageInfo2KHR; +typedef VkBindImageMemoryInfo VkBindImageMemoryInfoKHR; -typedef VkCopyBufferToImageInfo2 VkCopyBufferToImageInfo2KHR; +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); -typedef VkCopyImageToBufferInfo2 VkCopyImageToBufferInfo2KHR; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo* pBindInfos); -typedef VkBlitImageInfo2 VkBlitImageInfo2KHR; +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo* pBindInfos); +#endif -typedef VkResolveImageInfo2 VkResolveImageInfo2KHR; -typedef VkBufferCopy2 VkBufferCopy2KHR; +// VK_KHR_maintenance3 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance3 1 +#define VK_KHR_MAINTENANCE_3_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_3_EXTENSION_NAME "VK_KHR_maintenance3" +// VK_KHR_MAINTENANCE3_SPEC_VERSION is a deprecated alias +#define VK_KHR_MAINTENANCE3_SPEC_VERSION VK_KHR_MAINTENANCE_3_SPEC_VERSION +// VK_KHR_MAINTENANCE3_EXTENSION_NAME is a deprecated alias +#define VK_KHR_MAINTENANCE3_EXTENSION_NAME VK_KHR_MAINTENANCE_3_EXTENSION_NAME +typedef VkPhysicalDeviceMaintenance3Properties VkPhysicalDeviceMaintenance3PropertiesKHR; -typedef VkImageCopy2 VkImageCopy2KHR; +typedef VkDescriptorSetLayoutSupport VkDescriptorSetLayoutSupportKHR; -typedef VkImageBlit2 VkImageBlit2KHR; +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupportKHR)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); -typedef VkBufferImageCopy2 VkBufferImageCopy2KHR; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupportKHR( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupport* pSupport); +#endif -typedef VkImageResolve2 VkImageResolve2KHR; -typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo); -typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo); -typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); -typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); -typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2KHR)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo); -typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2KHR)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo); +// VK_KHR_draw_indirect_count is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_draw_indirect_count 1 +#define VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION 1 +#define VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_KHR_draw_indirect_count" +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2KHR( +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, - const VkCopyBufferInfo2* pCopyBufferInfo); + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); -VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2KHR( +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, - const VkCopyImageInfo2* pCopyImageInfo); + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif -VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2KHR( - VkCommandBuffer commandBuffer, - const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); -VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2KHR( - VkCommandBuffer commandBuffer, - const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); +// VK_KHR_shader_subgroup_extended_types is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_subgroup_extended_types 1 +#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION 1 +#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME "VK_KHR_shader_subgroup_extended_types" +typedef VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR; -VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2KHR( - VkCommandBuffer commandBuffer, - const VkBlitImageInfo2* pBlitImageInfo); -VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2KHR( - VkCommandBuffer commandBuffer, - const VkResolveImageInfo2* pResolveImageInfo); -#endif +// VK_KHR_8bit_storage is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_8bit_storage 1 +#define VK_KHR_8BIT_STORAGE_SPEC_VERSION 1 +#define VK_KHR_8BIT_STORAGE_EXTENSION_NAME "VK_KHR_8bit_storage" +typedef VkPhysicalDevice8BitStorageFeatures VkPhysicalDevice8BitStorageFeaturesKHR; -#define VK_KHR_format_feature_flags2 1 -#define VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION 2 -#define VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME "VK_KHR_format_feature_flags2" -typedef VkFormatFeatureFlags2 VkFormatFeatureFlags2KHR; -typedef VkFormatFeatureFlagBits2 VkFormatFeatureFlagBits2KHR; -typedef VkFormatProperties3 VkFormatProperties3KHR; +// VK_KHR_shader_atomic_int64 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_atomic_int64 1 +#define VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION 1 +#define VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME "VK_KHR_shader_atomic_int64" +typedef VkPhysicalDeviceShaderAtomicInt64Features VkPhysicalDeviceShaderAtomicInt64FeaturesKHR; -#define VK_KHR_ray_tracing_maintenance1 1 -#define VK_KHR_RAY_TRACING_MAINTENANCE_1_SPEC_VERSION 1 -#define VK_KHR_RAY_TRACING_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_ray_tracing_maintenance1" -typedef struct VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR { +// VK_KHR_shader_clock is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_clock 1 +#define VK_KHR_SHADER_CLOCK_SPEC_VERSION 1 +#define VK_KHR_SHADER_CLOCK_EXTENSION_NAME "VK_KHR_shader_clock" +typedef struct VkPhysicalDeviceShaderClockFeaturesKHR { VkStructureType sType; void* pNext; - VkBool32 rayTracingMaintenance1; - VkBool32 rayTracingPipelineTraceRaysIndirect2; -} VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR; + VkBool32 shaderSubgroupClock; + VkBool32 shaderDeviceClock; +} VkPhysicalDeviceShaderClockFeaturesKHR; -typedef struct VkTraceRaysIndirectCommand2KHR { - VkDeviceAddress raygenShaderRecordAddress; - VkDeviceSize raygenShaderRecordSize; - VkDeviceAddress missShaderBindingTableAddress; - VkDeviceSize missShaderBindingTableSize; - VkDeviceSize missShaderBindingTableStride; - VkDeviceAddress hitShaderBindingTableAddress; - VkDeviceSize hitShaderBindingTableSize; - VkDeviceSize hitShaderBindingTableStride; - VkDeviceAddress callableShaderBindingTableAddress; - VkDeviceSize callableShaderBindingTableSize; - VkDeviceSize callableShaderBindingTableStride; - uint32_t width; - uint32_t height; - uint32_t depth; -} VkTraceRaysIndirectCommand2KHR; -typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirect2KHR)(VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirect2KHR( - VkCommandBuffer commandBuffer, - VkDeviceAddress indirectDeviceAddress); -#endif +// VK_KHR_video_decode_h265 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_decode_h265 1 +#include "vk_video/vulkan_video_codec_h265std_decode.h" +#define VK_KHR_VIDEO_DECODE_H265_SPEC_VERSION 8 +#define VK_KHR_VIDEO_DECODE_H265_EXTENSION_NAME "VK_KHR_video_decode_h265" +typedef struct VkVideoDecodeH265ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoH265ProfileIdc stdProfileIdc; +} VkVideoDecodeH265ProfileInfoKHR; +typedef struct VkVideoDecodeH265CapabilitiesKHR { + VkStructureType sType; + void* pNext; + StdVideoH265LevelIdc maxLevelIdc; +} VkVideoDecodeH265CapabilitiesKHR; -#define VK_KHR_portability_enumeration 1 -#define VK_KHR_PORTABILITY_ENUMERATION_SPEC_VERSION 1 -#define VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME "VK_KHR_portability_enumeration" +typedef struct VkVideoDecodeH265SessionParametersAddInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t stdVPSCount; + const StdVideoH265VideoParameterSet* pStdVPSs; + uint32_t stdSPSCount; + const StdVideoH265SequenceParameterSet* pStdSPSs; + uint32_t stdPPSCount; + const StdVideoH265PictureParameterSet* pStdPPSs; +} VkVideoDecodeH265SessionParametersAddInfoKHR; + +typedef struct VkVideoDecodeH265SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxStdVPSCount; + uint32_t maxStdSPSCount; + uint32_t maxStdPPSCount; + const VkVideoDecodeH265SessionParametersAddInfoKHR* pParametersAddInfo; +} VkVideoDecodeH265SessionParametersCreateInfoKHR; +typedef struct VkVideoDecodeH265PictureInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH265PictureInfo* pStdPictureInfo; + uint32_t sliceSegmentCount; + const uint32_t* pSliceSegmentOffsets; +} VkVideoDecodeH265PictureInfoKHR; -#define VK_KHR_maintenance4 1 -#define VK_KHR_MAINTENANCE_4_SPEC_VERSION 2 -#define VK_KHR_MAINTENANCE_4_EXTENSION_NAME "VK_KHR_maintenance4" -typedef VkPhysicalDeviceMaintenance4Features VkPhysicalDeviceMaintenance4FeaturesKHR; +typedef struct VkVideoDecodeH265DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH265ReferenceInfo* pStdReferenceInfo; +} VkVideoDecodeH265DpbSlotInfoKHR; -typedef VkPhysicalDeviceMaintenance4Properties VkPhysicalDeviceMaintenance4PropertiesKHR; -typedef VkDeviceBufferMemoryRequirements VkDeviceBufferMemoryRequirementsKHR; -typedef VkDeviceImageMemoryRequirements VkDeviceImageMemoryRequirementsKHR; +// VK_KHR_global_priority is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_global_priority 1 +#define VK_KHR_GLOBAL_PRIORITY_SPEC_VERSION 1 +#define VK_KHR_GLOBAL_PRIORITY_EXTENSION_NAME "VK_KHR_global_priority" +#define VK_MAX_GLOBAL_PRIORITY_SIZE_KHR VK_MAX_GLOBAL_PRIORITY_SIZE +typedef VkQueueGlobalPriority VkQueueGlobalPriorityKHR; -typedef void (VKAPI_PTR *PFN_vkGetDeviceBufferMemoryRequirementsKHR)(VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkGetDeviceImageMemoryRequirementsKHR)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSparseMemoryRequirementsKHR)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +typedef VkDeviceQueueGlobalPriorityCreateInfo VkDeviceQueueGlobalPriorityCreateInfoKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetDeviceBufferMemoryRequirementsKHR( - VkDevice device, - const VkDeviceBufferMemoryRequirements* pInfo, - VkMemoryRequirements2* pMemoryRequirements); +typedef VkPhysicalDeviceGlobalPriorityQueryFeatures VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR; -VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageMemoryRequirementsKHR( - VkDevice device, - const VkDeviceImageMemoryRequirements* pInfo, - VkMemoryRequirements2* pMemoryRequirements); +typedef VkQueueFamilyGlobalPriorityProperties VkQueueFamilyGlobalPriorityPropertiesKHR; -VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirementsKHR( - VkDevice device, - const VkDeviceImageMemoryRequirements* pInfo, - uint32_t* pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); -#endif -#define VK_EXT_debug_report 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) -#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 10 -#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report" +// VK_KHR_driver_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_driver_properties 1 +#define VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION 1 +#define VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME "VK_KHR_driver_properties" +#define VK_MAX_DRIVER_NAME_SIZE_KHR VK_MAX_DRIVER_NAME_SIZE +#define VK_MAX_DRIVER_INFO_SIZE_KHR VK_MAX_DRIVER_INFO_SIZE +typedef VkDriverId VkDriverIdKHR; -typedef enum VkDebugReportObjectTypeEXT { - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0, - VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1, - VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2, - VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3, - VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4, - VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5, - VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6, - VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7, - VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8, - VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9, - VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10, - VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11, - VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12, - VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13, - VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14, - VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17, - VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20, - VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23, - VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24, - VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25, - VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26, - VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27, - VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28, - VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29, - VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30, - VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33, - VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000, - VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT = 1000029000, - VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT = 1000029001, - VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT = 1000150000, - VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000, - VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT = 1000366000, - VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDebugReportObjectTypeEXT; +typedef VkConformanceVersion VkConformanceVersionKHR; -typedef enum VkDebugReportFlagBitsEXT { - VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001, - VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002, - VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004, - VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008, - VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010, - VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDebugReportFlagBitsEXT; -typedef VkFlags VkDebugReportFlagsEXT; -typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)( - VkDebugReportFlagsEXT flags, - VkDebugReportObjectTypeEXT objectType, - uint64_t object, - size_t location, - int32_t messageCode, - const char* pLayerPrefix, - const char* pMessage, - void* pUserData); +typedef VkPhysicalDeviceDriverProperties VkPhysicalDeviceDriverPropertiesKHR; -typedef struct VkDebugReportCallbackCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkDebugReportFlagsEXT flags; - PFN_vkDebugReportCallbackEXT pfnCallback; - void* pUserData; -} VkDebugReportCallbackCreateInfoEXT; -typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback); -typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT( - VkInstance instance, - const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDebugReportCallbackEXT* pCallback); +// VK_KHR_shader_float_controls is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_float_controls 1 +#define VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION 4 +#define VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME "VK_KHR_shader_float_controls" +typedef VkShaderFloatControlsIndependence VkShaderFloatControlsIndependenceKHR; -VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT( - VkInstance instance, - VkDebugReportCallbackEXT callback, - const VkAllocationCallbacks* pAllocator); +typedef VkPhysicalDeviceFloatControlsProperties VkPhysicalDeviceFloatControlsPropertiesKHR; -VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT( - VkInstance instance, - VkDebugReportFlagsEXT flags, - VkDebugReportObjectTypeEXT objectType, - uint64_t object, - size_t location, - int32_t messageCode, - const char* pLayerPrefix, - const char* pMessage); -#endif -#define VK_NV_glsl_shader 1 -#define VK_NV_GLSL_SHADER_SPEC_VERSION 1 -#define VK_NV_GLSL_SHADER_EXTENSION_NAME "VK_NV_glsl_shader" +// VK_KHR_depth_stencil_resolve is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_depth_stencil_resolve 1 +#define VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION 1 +#define VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME "VK_KHR_depth_stencil_resolve" +typedef VkResolveModeFlagBits VkResolveModeFlagBitsKHR; +typedef VkResolveModeFlags VkResolveModeFlagsKHR; -#define VK_EXT_depth_range_unrestricted 1 -#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1 -#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted" +typedef VkSubpassDescriptionDepthStencilResolve VkSubpassDescriptionDepthStencilResolveKHR; +typedef VkPhysicalDeviceDepthStencilResolveProperties VkPhysicalDeviceDepthStencilResolvePropertiesKHR; -#define VK_IMG_filter_cubic 1 -#define VK_IMG_FILTER_CUBIC_SPEC_VERSION 1 -#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic" -#define VK_AMD_rasterization_order 1 -#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1 -#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order" +// VK_KHR_swapchain_mutable_format is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_swapchain_mutable_format 1 +#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION 1 +#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME "VK_KHR_swapchain_mutable_format" -typedef enum VkRasterizationOrderAMD { - VK_RASTERIZATION_ORDER_STRICT_AMD = 0, - VK_RASTERIZATION_ORDER_RELAXED_AMD = 1, - VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF -} VkRasterizationOrderAMD; -typedef struct VkPipelineRasterizationStateRasterizationOrderAMD { - VkStructureType sType; - const void* pNext; - VkRasterizationOrderAMD rasterizationOrder; -} VkPipelineRasterizationStateRasterizationOrderAMD; +// VK_KHR_timeline_semaphore is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_timeline_semaphore 1 +#define VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION 2 +#define VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME "VK_KHR_timeline_semaphore" +typedef VkSemaphoreType VkSemaphoreTypeKHR; +typedef VkSemaphoreWaitFlagBits VkSemaphoreWaitFlagBitsKHR; -#define VK_AMD_shader_trinary_minmax 1 -#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1 -#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax" +typedef VkSemaphoreWaitFlags VkSemaphoreWaitFlagsKHR; +typedef VkPhysicalDeviceTimelineSemaphoreFeatures VkPhysicalDeviceTimelineSemaphoreFeaturesKHR; -#define VK_AMD_shader_explicit_vertex_parameter 1 -#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1 -#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter" +typedef VkPhysicalDeviceTimelineSemaphoreProperties VkPhysicalDeviceTimelineSemaphorePropertiesKHR; +typedef VkSemaphoreTypeCreateInfo VkSemaphoreTypeCreateInfoKHR; -#define VK_EXT_debug_marker 1 -#define VK_EXT_DEBUG_MARKER_SPEC_VERSION 4 -#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker" -typedef struct VkDebugMarkerObjectNameInfoEXT { - VkStructureType sType; - const void* pNext; - VkDebugReportObjectTypeEXT objectType; - uint64_t object; - const char* pObjectName; -} VkDebugMarkerObjectNameInfoEXT; +typedef VkTimelineSemaphoreSubmitInfo VkTimelineSemaphoreSubmitInfoKHR; -typedef struct VkDebugMarkerObjectTagInfoEXT { - VkStructureType sType; - const void* pNext; - VkDebugReportObjectTypeEXT objectType; - uint64_t object; - uint64_t tagName; - size_t tagSize; - const void* pTag; -} VkDebugMarkerObjectTagInfoEXT; +typedef VkSemaphoreWaitInfo VkSemaphoreWaitInfoKHR; -typedef struct VkDebugMarkerMarkerInfoEXT { - VkStructureType sType; - const void* pNext; - const char* pMarkerName; - float color[4]; -} VkDebugMarkerMarkerInfoEXT; +typedef VkSemaphoreSignalInfo VkSemaphoreSignalInfoKHR; -typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo); -typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo); -typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); -typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer); -typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValueKHR)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue); +typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphoresKHR)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphoreKHR)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT( +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValueKHR( VkDevice device, - const VkDebugMarkerObjectTagInfoEXT* pTagInfo); + VkSemaphore semaphore, + uint64_t* pValue); -VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT( +VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphoresKHR( VkDevice device, - const VkDebugMarkerObjectNameInfoEXT* pNameInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT( - VkCommandBuffer commandBuffer, - const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT( - VkCommandBuffer commandBuffer); + const VkSemaphoreWaitInfo* pWaitInfo, + uint64_t timeout); -VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT( - VkCommandBuffer commandBuffer, - const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphoreKHR( + VkDevice device, + const VkSemaphoreSignalInfo* pSignalInfo); #endif -#define VK_AMD_gcn_shader 1 -#define VK_AMD_GCN_SHADER_SPEC_VERSION 1 -#define VK_AMD_GCN_SHADER_EXTENSION_NAME "VK_AMD_gcn_shader" +// VK_KHR_vulkan_memory_model is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_vulkan_memory_model 1 +#define VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION 3 +#define VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME "VK_KHR_vulkan_memory_model" +typedef VkPhysicalDeviceVulkanMemoryModelFeatures VkPhysicalDeviceVulkanMemoryModelFeaturesKHR; -#define VK_NV_dedicated_allocation 1 -#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1 -#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation" -typedef struct VkDedicatedAllocationImageCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkBool32 dedicatedAllocation; -} VkDedicatedAllocationImageCreateInfoNV; -typedef struct VkDedicatedAllocationBufferCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkBool32 dedicatedAllocation; -} VkDedicatedAllocationBufferCreateInfoNV; +// VK_KHR_shader_terminate_invocation is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_terminate_invocation 1 +#define VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION 1 +#define VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME "VK_KHR_shader_terminate_invocation" +typedef VkPhysicalDeviceShaderTerminateInvocationFeatures VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR; -typedef struct VkDedicatedAllocationMemoryAllocateInfoNV { - VkStructureType sType; - const void* pNext; - VkImage image; - VkBuffer buffer; -} VkDedicatedAllocationMemoryAllocateInfoNV; +// VK_KHR_fragment_shading_rate is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_fragment_shading_rate 1 +#define VK_KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION 2 +#define VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME "VK_KHR_fragment_shading_rate" -#define VK_EXT_transform_feedback 1 -#define VK_EXT_TRANSFORM_FEEDBACK_SPEC_VERSION 1 -#define VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME "VK_EXT_transform_feedback" -typedef VkFlags VkPipelineRasterizationStateStreamCreateFlagsEXT; -typedef struct VkPhysicalDeviceTransformFeedbackFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 transformFeedback; - VkBool32 geometryStreams; -} VkPhysicalDeviceTransformFeedbackFeaturesEXT; +typedef enum VkFragmentShadingRateCombinerOpKHR { + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR = 0, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR = 1, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR = 2, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR = 3, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR = 4, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_ENUM_KHR = 0x7FFFFFFF +} VkFragmentShadingRateCombinerOpKHR; +typedef struct VkFragmentShadingRateAttachmentInfoKHR { + VkStructureType sType; + const void* pNext; + const VkAttachmentReference2* pFragmentShadingRateAttachment; + VkExtent2D shadingRateAttachmentTexelSize; +} VkFragmentShadingRateAttachmentInfoKHR; -typedef struct VkPhysicalDeviceTransformFeedbackPropertiesEXT { +typedef struct VkPipelineFragmentShadingRateStateCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkExtent2D fragmentSize; + VkFragmentShadingRateCombinerOpKHR combinerOps[2]; +} VkPipelineFragmentShadingRateStateCreateInfoKHR; + +typedef struct VkPhysicalDeviceFragmentShadingRateFeaturesKHR { VkStructureType sType; void* pNext; - uint32_t maxTransformFeedbackStreams; - uint32_t maxTransformFeedbackBuffers; - VkDeviceSize maxTransformFeedbackBufferSize; - uint32_t maxTransformFeedbackStreamDataSize; - uint32_t maxTransformFeedbackBufferDataSize; - uint32_t maxTransformFeedbackBufferDataStride; - VkBool32 transformFeedbackQueries; - VkBool32 transformFeedbackStreamsLinesTriangles; - VkBool32 transformFeedbackRasterizationStreamSelect; - VkBool32 transformFeedbackDraw; -} VkPhysicalDeviceTransformFeedbackPropertiesEXT; + VkBool32 pipelineFragmentShadingRate; + VkBool32 primitiveFragmentShadingRate; + VkBool32 attachmentFragmentShadingRate; +} VkPhysicalDeviceFragmentShadingRateFeaturesKHR; -typedef struct VkPipelineRasterizationStateStreamCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkPipelineRasterizationStateStreamCreateFlagsEXT flags; - uint32_t rasterizationStream; -} VkPipelineRasterizationStateStreamCreateInfoEXT; +typedef struct VkPhysicalDeviceFragmentShadingRatePropertiesKHR { + VkStructureType sType; + void* pNext; + VkExtent2D minFragmentShadingRateAttachmentTexelSize; + VkExtent2D maxFragmentShadingRateAttachmentTexelSize; + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio; + VkBool32 primitiveFragmentShadingRateWithMultipleViewports; + VkBool32 layeredShadingRateAttachments; + VkBool32 fragmentShadingRateNonTrivialCombinerOps; + VkExtent2D maxFragmentSize; + uint32_t maxFragmentSizeAspectRatio; + uint32_t maxFragmentShadingRateCoverageSamples; + VkSampleCountFlagBits maxFragmentShadingRateRasterizationSamples; + VkBool32 fragmentShadingRateWithShaderDepthStencilWrites; + VkBool32 fragmentShadingRateWithSampleMask; + VkBool32 fragmentShadingRateWithShaderSampleMask; + VkBool32 fragmentShadingRateWithConservativeRasterization; + VkBool32 fragmentShadingRateWithFragmentShaderInterlock; + VkBool32 fragmentShadingRateWithCustomSampleLocations; + VkBool32 fragmentShadingRateStrictMultiplyCombiner; +} VkPhysicalDeviceFragmentShadingRatePropertiesKHR; -typedef void (VKAPI_PTR *PFN_vkCmdBindTransformFeedbackBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes); -typedef void (VKAPI_PTR *PFN_vkCmdBeginTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); -typedef void (VKAPI_PTR *PFN_vkCmdEndTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); -typedef void (VKAPI_PTR *PFN_vkCmdBeginQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index); -typedef void (VKAPI_PTR *PFN_vkCmdEndQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index); -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectByteCountEXT)(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride); +typedef struct VkPhysicalDeviceFragmentShadingRateKHR { + VkStructureType sType; + void* pNext; + VkSampleCountFlags sampleCounts; + VkExtent2D fragmentSize; +} VkPhysicalDeviceFragmentShadingRateKHR; + +typedef struct VkRenderingFragmentShadingRateAttachmentInfoKHR { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; + VkExtent2D shadingRateAttachmentTexelSize; +} VkRenderingFragmentShadingRateAttachmentInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); +typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateKHR)(VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdBindTransformFeedbackBuffersEXT( - VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer* pBuffers, - const VkDeviceSize* pOffsets, - const VkDeviceSize* pSizes); +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceFragmentShadingRatesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pFragmentShadingRateCount, + VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); -VKAPI_ATTR void VKAPI_CALL vkCmdBeginTransformFeedbackEXT( +VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, - uint32_t firstCounterBuffer, - uint32_t counterBufferCount, - const VkBuffer* pCounterBuffers, - const VkDeviceSize* pCounterBufferOffsets); + const VkExtent2D* pFragmentSize, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); +#endif -VKAPI_ATTR void VKAPI_CALL vkCmdEndTransformFeedbackEXT( - VkCommandBuffer commandBuffer, - uint32_t firstCounterBuffer, - uint32_t counterBufferCount, - const VkBuffer* pCounterBuffers, - const VkDeviceSize* pCounterBufferOffsets); -VKAPI_ATTR void VKAPI_CALL vkCmdBeginQueryIndexedEXT( - VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t query, - VkQueryControlFlags flags, - uint32_t index); +// VK_KHR_dynamic_rendering_local_read is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_dynamic_rendering_local_read 1 +#define VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_SPEC_VERSION 1 +#define VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_EXTENSION_NAME "VK_KHR_dynamic_rendering_local_read" +typedef VkPhysicalDeviceDynamicRenderingLocalReadFeatures VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR; -VKAPI_ATTR void VKAPI_CALL vkCmdEndQueryIndexedEXT( +typedef VkRenderingAttachmentLocationInfo VkRenderingAttachmentLocationInfoKHR; + +typedef VkRenderingInputAttachmentIndexInfo VkRenderingInputAttachmentIndexInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingAttachmentLocationsKHR)(VkCommandBuffer commandBuffer, const VkRenderingAttachmentLocationInfo* pLocationInfo); +typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingInputAttachmentIndicesKHR)(VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingAttachmentLocationsKHR( VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t query, - uint32_t index); + const VkRenderingAttachmentLocationInfo* pLocationInfo); -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectByteCountEXT( +VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingInputAttachmentIndicesKHR( VkCommandBuffer commandBuffer, - uint32_t instanceCount, - uint32_t firstInstance, - VkBuffer counterBuffer, - VkDeviceSize counterBufferOffset, - uint32_t counterOffset, - uint32_t vertexStride); + const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo); #endif -#define VK_NVX_binary_import 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuModuleNVX) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuFunctionNVX) -#define VK_NVX_BINARY_IMPORT_SPEC_VERSION 1 -#define VK_NVX_BINARY_IMPORT_EXTENSION_NAME "VK_NVX_binary_import" -typedef struct VkCuModuleCreateInfoNVX { +// VK_KHR_shader_quad_control is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_quad_control 1 +#define VK_KHR_SHADER_QUAD_CONTROL_SPEC_VERSION 1 +#define VK_KHR_SHADER_QUAD_CONTROL_EXTENSION_NAME "VK_KHR_shader_quad_control" +typedef struct VkPhysicalDeviceShaderQuadControlFeaturesKHR { VkStructureType sType; - const void* pNext; - size_t dataSize; - const void* pData; -} VkCuModuleCreateInfoNVX; + void* pNext; + VkBool32 shaderQuadControl; +} VkPhysicalDeviceShaderQuadControlFeaturesKHR; -typedef struct VkCuFunctionCreateInfoNVX { - VkStructureType sType; - const void* pNext; - VkCuModuleNVX module; - const char* pName; -} VkCuFunctionCreateInfoNVX; -typedef struct VkCuLaunchInfoNVX { - VkStructureType sType; - const void* pNext; - VkCuFunctionNVX function; - uint32_t gridDimX; - uint32_t gridDimY; - uint32_t gridDimZ; - uint32_t blockDimX; - uint32_t blockDimY; - uint32_t blockDimZ; - uint32_t sharedMemBytes; - size_t paramCount; - const void* const * pParams; - size_t extraCount; - const void* const * pExtras; -} VkCuLaunchInfoNVX; -typedef VkResult (VKAPI_PTR *PFN_vkCreateCuModuleNVX)(VkDevice device, const VkCuModuleCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuModuleNVX* pModule); -typedef VkResult (VKAPI_PTR *PFN_vkCreateCuFunctionNVX)(VkDevice device, const VkCuFunctionCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuFunctionNVX* pFunction); -typedef void (VKAPI_PTR *PFN_vkDestroyCuModuleNVX)(VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkDestroyCuFunctionNVX)(VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkCmdCuLaunchKernelNVX)(VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX* pLaunchInfo); +// VK_KHR_spirv_1_4 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_spirv_1_4 1 +#define VK_KHR_SPIRV_1_4_SPEC_VERSION 1 +#define VK_KHR_SPIRV_1_4_EXTENSION_NAME "VK_KHR_spirv_1_4" -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuModuleNVX( - VkDevice device, - const VkCuModuleCreateInfoNVX* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkCuModuleNVX* pModule); -VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuFunctionNVX( - VkDevice device, - const VkCuFunctionCreateInfoNVX* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkCuFunctionNVX* pFunction); +// VK_KHR_surface_protected_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_surface_protected_capabilities 1 +#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME "VK_KHR_surface_protected_capabilities" +typedef struct VkSurfaceProtectedCapabilitiesKHR { + VkStructureType sType; + const void* pNext; + VkBool32 supportsProtected; +} VkSurfaceProtectedCapabilitiesKHR; -VKAPI_ATTR void VKAPI_CALL vkDestroyCuModuleNVX( - VkDevice device, - VkCuModuleNVX module, - const VkAllocationCallbacks* pAllocator); -VKAPI_ATTR void VKAPI_CALL vkDestroyCuFunctionNVX( - VkDevice device, - VkCuFunctionNVX function, - const VkAllocationCallbacks* pAllocator); -VKAPI_ATTR void VKAPI_CALL vkCmdCuLaunchKernelNVX( - VkCommandBuffer commandBuffer, - const VkCuLaunchInfoNVX* pLaunchInfo); -#endif +// VK_KHR_separate_depth_stencil_layouts is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_separate_depth_stencil_layouts 1 +#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION 1 +#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME "VK_KHR_separate_depth_stencil_layouts" +typedef VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR; +typedef VkAttachmentReferenceStencilLayout VkAttachmentReferenceStencilLayoutKHR; -#define VK_NVX_image_view_handle 1 -#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 2 -#define VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME "VK_NVX_image_view_handle" -typedef struct VkImageViewHandleInfoNVX { - VkStructureType sType; - const void* pNext; - VkImageView imageView; - VkDescriptorType descriptorType; - VkSampler sampler; -} VkImageViewHandleInfoNVX; +typedef VkAttachmentDescriptionStencilLayout VkAttachmentDescriptionStencilLayoutKHR; -typedef struct VkImageViewAddressPropertiesNVX { + + +// VK_KHR_present_wait is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_present_wait 1 +#define VK_KHR_PRESENT_WAIT_SPEC_VERSION 1 +#define VK_KHR_PRESENT_WAIT_EXTENSION_NAME "VK_KHR_present_wait" +typedef struct VkPhysicalDevicePresentWaitFeaturesKHR { VkStructureType sType; void* pNext; - VkDeviceAddress deviceAddress; - VkDeviceSize size; -} VkImageViewAddressPropertiesNVX; + VkBool32 presentWait; +} VkPhysicalDevicePresentWaitFeaturesKHR; -typedef uint32_t (VKAPI_PTR *PFN_vkGetImageViewHandleNVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewAddressNVX)(VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkWaitForPresentKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR uint32_t VKAPI_CALL vkGetImageViewHandleNVX( - VkDevice device, - const VkImageViewHandleInfoNVX* pInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewAddressNVX( +VKAPI_ATTR VkResult VKAPI_CALL vkWaitForPresentKHR( VkDevice device, - VkImageView imageView, - VkImageViewAddressPropertiesNVX* pProperties); + VkSwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout); #endif -#define VK_AMD_draw_indirect_count 1 -#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 2 -#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count" -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); -typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +// VK_KHR_uniform_buffer_standard_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_uniform_buffer_standard_layout 1 +#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME "VK_KHR_uniform_buffer_standard_layout" +typedef VkPhysicalDeviceUniformBufferStandardLayoutFeatures VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride); -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride); -#endif +// VK_KHR_buffer_device_address is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_buffer_device_address 1 +#define VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 1 +#define VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_KHR_buffer_device_address" +typedef VkPhysicalDeviceBufferDeviceAddressFeatures VkPhysicalDeviceBufferDeviceAddressFeaturesKHR; -#define VK_AMD_negative_viewport_height 1 -#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1 -#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height" +typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoKHR; +typedef VkBufferOpaqueCaptureAddressCreateInfo VkBufferOpaqueCaptureAddressCreateInfoKHR; -#define VK_AMD_gpu_shader_half_float 1 -#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 2 -#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float" +typedef VkMemoryOpaqueCaptureAddressAllocateInfo VkMemoryOpaqueCaptureAddressAllocateInfoKHR; +typedef VkDeviceMemoryOpaqueCaptureAddressInfo VkDeviceMemoryOpaqueCaptureAddressInfoKHR; -#define VK_AMD_shader_ballot 1 -#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1 -#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot" +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressKHR( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); -#define VK_AMD_texture_gather_bias_lod 1 -#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1 -#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod" -typedef struct VkTextureLODGatherFormatPropertiesAMD { - VkStructureType sType; - void* pNext; - VkBool32 supportsTextureGatherLODBiasAMD; -} VkTextureLODGatherFormatPropertiesAMD; +VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddressKHR( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); +VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddressKHR( + VkDevice device, + const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); +#endif -#define VK_AMD_shader_info 1 -#define VK_AMD_SHADER_INFO_SPEC_VERSION 1 -#define VK_AMD_SHADER_INFO_EXTENSION_NAME "VK_AMD_shader_info" +// VK_KHR_deferred_host_operations is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_deferred_host_operations 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeferredOperationKHR) +#define VK_KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION 4 +#define VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME "VK_KHR_deferred_host_operations" +typedef VkResult (VKAPI_PTR *PFN_vkCreateDeferredOperationKHR)(VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation); +typedef void (VKAPI_PTR *PFN_vkDestroyDeferredOperationKHR)(VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator); +typedef uint32_t (VKAPI_PTR *PFN_vkGetDeferredOperationMaxConcurrencyKHR)(VkDevice device, VkDeferredOperationKHR operation); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeferredOperationResultKHR)(VkDevice device, VkDeferredOperationKHR operation); +typedef VkResult (VKAPI_PTR *PFN_vkDeferredOperationJoinKHR)(VkDevice device, VkDeferredOperationKHR operation); -typedef enum VkShaderInfoTypeAMD { - VK_SHADER_INFO_TYPE_STATISTICS_AMD = 0, - VK_SHADER_INFO_TYPE_BINARY_AMD = 1, - VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD = 2, - VK_SHADER_INFO_TYPE_MAX_ENUM_AMD = 0x7FFFFFFF -} VkShaderInfoTypeAMD; -typedef struct VkShaderResourceUsageAMD { - uint32_t numUsedVgprs; - uint32_t numUsedSgprs; - uint32_t ldsSizePerLocalWorkGroup; - size_t ldsUsageSizeInBytes; - size_t scratchMemUsageInBytes; -} VkShaderResourceUsageAMD; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDeferredOperationKHR( + VkDevice device, + const VkAllocationCallbacks* pAllocator, + VkDeferredOperationKHR* pDeferredOperation); -typedef struct VkShaderStatisticsInfoAMD { - VkShaderStageFlags shaderStageMask; - VkShaderResourceUsageAMD resourceUsage; - uint32_t numPhysicalVgprs; - uint32_t numPhysicalSgprs; - uint32_t numAvailableVgprs; - uint32_t numAvailableSgprs; - uint32_t computeWorkGroupSize[3]; -} VkShaderStatisticsInfoAMD; +VKAPI_ATTR void VKAPI_CALL vkDestroyDeferredOperationKHR( + VkDevice device, + VkDeferredOperationKHR operation, + const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetShaderInfoAMD)(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo); +VKAPI_ATTR uint32_t VKAPI_CALL vkGetDeferredOperationMaxConcurrencyKHR( + VkDevice device, + VkDeferredOperationKHR operation); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderInfoAMD( +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeferredOperationResultKHR( VkDevice device, - VkPipeline pipeline, - VkShaderStageFlagBits shaderStage, - VkShaderInfoTypeAMD infoType, - size_t* pInfoSize, - void* pInfo); + VkDeferredOperationKHR operation); + +VKAPI_ATTR VkResult VKAPI_CALL vkDeferredOperationJoinKHR( + VkDevice device, + VkDeferredOperationKHR operation); #endif -#define VK_AMD_shader_image_load_store_lod 1 -#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION 1 -#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME "VK_AMD_shader_image_load_store_lod" +// VK_KHR_pipeline_executable_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_pipeline_executable_properties 1 +#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME "VK_KHR_pipeline_executable_properties" +typedef enum VkPipelineExecutableStatisticFormatKHR { + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR = 0, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR = 1, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR = 2, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR = 3, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPipelineExecutableStatisticFormatKHR; +typedef struct VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineExecutableInfo; +} VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR; -#define VK_NV_corner_sampled_image 1 -#define VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION 2 -#define VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME "VK_NV_corner_sampled_image" -typedef struct VkPhysicalDeviceCornerSampledImageFeaturesNV { +typedef struct VkPipelineInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipeline pipeline; +} VkPipelineInfoKHR; + +typedef struct VkPipelineExecutablePropertiesKHR { + VkStructureType sType; + void* pNext; + VkShaderStageFlags stages; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + uint32_t subgroupSize; +} VkPipelineExecutablePropertiesKHR; + +typedef struct VkPipelineExecutableInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipeline pipeline; + uint32_t executableIndex; +} VkPipelineExecutableInfoKHR; + +typedef union VkPipelineExecutableStatisticValueKHR { + VkBool32 b32; + int64_t i64; + uint64_t u64; + double f64; +} VkPipelineExecutableStatisticValueKHR; + +typedef struct VkPipelineExecutableStatisticKHR { + VkStructureType sType; + void* pNext; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + VkPipelineExecutableStatisticFormatKHR format; + VkPipelineExecutableStatisticValueKHR value; +} VkPipelineExecutableStatisticKHR; + +typedef struct VkPipelineExecutableInternalRepresentationKHR { VkStructureType sType; void* pNext; - VkBool32 cornerSampledImage; -} VkPhysicalDeviceCornerSampledImageFeaturesNV; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + VkBool32 isText; + size_t dataSize; + void* pData; +} VkPipelineExecutableInternalRepresentationKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutablePropertiesKHR)(VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableStatisticsKHR)(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableInternalRepresentationsKHR)(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutablePropertiesKHR( + VkDevice device, + const VkPipelineInfoKHR* pPipelineInfo, + uint32_t* pExecutableCount, + VkPipelineExecutablePropertiesKHR* pProperties); +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableStatisticsKHR( + VkDevice device, + const VkPipelineExecutableInfoKHR* pExecutableInfo, + uint32_t* pStatisticCount, + VkPipelineExecutableStatisticKHR* pStatistics); -#define VK_IMG_format_pvrtc 1 -#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION 1 -#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc" +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableInternalRepresentationsKHR( + VkDevice device, + const VkPipelineExecutableInfoKHR* pExecutableInfo, + uint32_t* pInternalRepresentationCount, + VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); +#endif -#define VK_NV_external_memory_capabilities 1 -#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 -#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities" +// VK_KHR_map_memory2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_map_memory2 1 +#define VK_KHR_MAP_MEMORY_2_SPEC_VERSION 1 +#define VK_KHR_MAP_MEMORY_2_EXTENSION_NAME "VK_KHR_map_memory2" +typedef VkMemoryUnmapFlagBits VkMemoryUnmapFlagBitsKHR; -typedef enum VkExternalMemoryHandleTypeFlagBitsNV { - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkExternalMemoryHandleTypeFlagBitsNV; -typedef VkFlags VkExternalMemoryHandleTypeFlagsNV; +typedef VkMemoryUnmapFlags VkMemoryUnmapFlagsKHR; -typedef enum VkExternalMemoryFeatureFlagBitsNV { - VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001, - VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002, - VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004, - VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkExternalMemoryFeatureFlagBitsNV; -typedef VkFlags VkExternalMemoryFeatureFlagsNV; -typedef struct VkExternalImageFormatPropertiesNV { - VkImageFormatProperties imageFormatProperties; - VkExternalMemoryFeatureFlagsNV externalMemoryFeatures; - VkExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes; - VkExternalMemoryHandleTypeFlagsNV compatibleHandleTypes; -} VkExternalImageFormatPropertiesNV; +typedef VkMemoryMapInfo VkMemoryMapInfoKHR; -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); +typedef VkMemoryUnmapInfo VkMemoryUnmapInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkMapMemory2KHR)(VkDevice device, const VkMemoryMapInfo* pMemoryMapInfo, void** ppData); +typedef VkResult (VKAPI_PTR *PFN_vkUnmapMemory2KHR)(VkDevice device, const VkMemoryUnmapInfo* pMemoryUnmapInfo); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkImageTiling tiling, - VkImageUsageFlags usage, - VkImageCreateFlags flags, - VkExternalMemoryHandleTypeFlagsNV externalHandleType, - VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); +VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory2KHR( + VkDevice device, + const VkMemoryMapInfo* pMemoryMapInfo, + void** ppData); + +VKAPI_ATTR VkResult VKAPI_CALL vkUnmapMemory2KHR( + VkDevice device, + const VkMemoryUnmapInfo* pMemoryUnmapInfo); #endif -#define VK_NV_external_memory 1 -#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1 -#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory" -typedef struct VkExternalMemoryImageCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagsNV handleTypes; -} VkExternalMemoryImageCreateInfoNV; +// VK_KHR_shader_integer_dot_product is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_integer_dot_product 1 +#define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION 1 +#define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME "VK_KHR_shader_integer_dot_product" +typedef VkPhysicalDeviceShaderIntegerDotProductFeatures VkPhysicalDeviceShaderIntegerDotProductFeaturesKHR; -typedef struct VkExportMemoryAllocateInfoNV { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagsNV handleTypes; -} VkExportMemoryAllocateInfoNV; +typedef VkPhysicalDeviceShaderIntegerDotProductProperties VkPhysicalDeviceShaderIntegerDotProductPropertiesKHR; -#define VK_EXT_validation_flags 1 -#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 2 -#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags" +// VK_KHR_pipeline_library is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_pipeline_library 1 +#define VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME "VK_KHR_pipeline_library" +typedef struct VkPipelineLibraryCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t libraryCount; + const VkPipeline* pLibraries; +} VkPipelineLibraryCreateInfoKHR; -typedef enum VkValidationCheckEXT { - VK_VALIDATION_CHECK_ALL_EXT = 0, - VK_VALIDATION_CHECK_SHADERS_EXT = 1, - VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF -} VkValidationCheckEXT; -typedef struct VkValidationFlagsEXT { - VkStructureType sType; - const void* pNext; - uint32_t disabledValidationCheckCount; - const VkValidationCheckEXT* pDisabledValidationChecks; -} VkValidationFlagsEXT; +// VK_KHR_shader_non_semantic_info is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_non_semantic_info 1 +#define VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION 1 +#define VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME "VK_KHR_shader_non_semantic_info" -#define VK_EXT_shader_subgroup_ballot 1 -#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1 -#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot" +// VK_KHR_present_id is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_present_id 1 +#define VK_KHR_PRESENT_ID_SPEC_VERSION 1 +#define VK_KHR_PRESENT_ID_EXTENSION_NAME "VK_KHR_present_id" +typedef struct VkPresentIdKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const uint64_t* pPresentIds; +} VkPresentIdKHR; -#define VK_EXT_shader_subgroup_vote 1 -#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1 -#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote" +typedef struct VkPhysicalDevicePresentIdFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 presentId; +} VkPhysicalDevicePresentIdFeaturesKHR; -#define VK_EXT_texture_compression_astc_hdr 1 -#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION 1 -#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME "VK_EXT_texture_compression_astc_hdr" -typedef VkPhysicalDeviceTextureCompressionASTCHDRFeatures VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; +// VK_KHR_video_encode_queue is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_queue 1 +#define VK_KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION 12 +#define VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME "VK_KHR_video_encode_queue" + +typedef enum VkVideoEncodeTuningModeKHR { + VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR = 1, + VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR = 2, + VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR = 3, + VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR = 4, + VK_VIDEO_ENCODE_TUNING_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeTuningModeKHR; + +typedef enum VkVideoEncodeFlagBitsKHR { + VK_VIDEO_ENCODE_WITH_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_WITH_EMPHASIS_MAP_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeFlagBitsKHR; +typedef VkFlags VkVideoEncodeFlagsKHR; + +typedef enum VkVideoEncodeCapabilityFlagBitsKHR { + VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_CAPABILITY_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_CAPABILITY_EMPHASIS_MAP_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeCapabilityFlagBitsKHR; +typedef VkFlags VkVideoEncodeCapabilityFlagsKHR; + +typedef enum VkVideoEncodeRateControlModeFlagBitsKHR { + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DISABLED_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeRateControlModeFlagBitsKHR; +typedef VkFlags VkVideoEncodeRateControlModeFlagsKHR; + +typedef enum VkVideoEncodeFeedbackFlagBitsKHR { + VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BUFFER_OFFSET_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BYTES_WRITTEN_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_HAS_OVERRIDES_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_FEEDBACK_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeFeedbackFlagBitsKHR; +typedef VkFlags VkVideoEncodeFeedbackFlagsKHR; + +typedef enum VkVideoEncodeUsageFlagBitsKHR { + VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_USAGE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeUsageFlagBitsKHR; +typedef VkFlags VkVideoEncodeUsageFlagsKHR; + +typedef enum VkVideoEncodeContentFlagBitsKHR { + VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_CONTENT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeContentFlagBitsKHR; +typedef VkFlags VkVideoEncodeContentFlagsKHR; +typedef VkFlags VkVideoEncodeRateControlFlagsKHR; +typedef struct VkVideoEncodeInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeFlagsKHR flags; + VkBuffer dstBuffer; + VkDeviceSize dstBufferOffset; + VkDeviceSize dstBufferRange; + VkVideoPictureResourceInfoKHR srcPictureResource; + const VkVideoReferenceSlotInfoKHR* pSetupReferenceSlot; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotInfoKHR* pReferenceSlots; + uint32_t precedingExternallyEncodedBytes; +} VkVideoEncodeInfoKHR; + +typedef struct VkVideoEncodeCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeCapabilityFlagsKHR flags; + VkVideoEncodeRateControlModeFlagsKHR rateControlModes; + uint32_t maxRateControlLayers; + uint64_t maxBitrate; + uint32_t maxQualityLevels; + VkExtent2D encodeInputPictureGranularity; + VkVideoEncodeFeedbackFlagsKHR supportedEncodeFeedbackFlags; +} VkVideoEncodeCapabilitiesKHR; + +typedef struct VkQueryPoolVideoEncodeFeedbackCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeFeedbackFlagsKHR encodeFeedbackFlags; +} VkQueryPoolVideoEncodeFeedbackCreateInfoKHR; +typedef struct VkVideoEncodeUsageInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeUsageFlagsKHR videoUsageHints; + VkVideoEncodeContentFlagsKHR videoContentHints; + VkVideoEncodeTuningModeKHR tuningMode; +} VkVideoEncodeUsageInfoKHR; -#define VK_EXT_astc_decode_mode 1 -#define VK_EXT_ASTC_DECODE_MODE_SPEC_VERSION 1 -#define VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME "VK_EXT_astc_decode_mode" -typedef struct VkImageViewASTCDecodeModeEXT { +typedef struct VkVideoEncodeRateControlLayerInfoKHR { VkStructureType sType; const void* pNext; - VkFormat decodeMode; -} VkImageViewASTCDecodeModeEXT; - -typedef struct VkPhysicalDeviceASTCDecodeFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 decodeModeSharedExponent; -} VkPhysicalDeviceASTCDecodeFeaturesEXT; + uint64_t averageBitrate; + uint64_t maxBitrate; + uint32_t frameRateNumerator; + uint32_t frameRateDenominator; +} VkVideoEncodeRateControlLayerInfoKHR; +typedef struct VkVideoEncodeRateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeRateControlFlagsKHR flags; + VkVideoEncodeRateControlModeFlagBitsKHR rateControlMode; + uint32_t layerCount; + const VkVideoEncodeRateControlLayerInfoKHR* pLayers; + uint32_t virtualBufferSizeInMs; + uint32_t initialVirtualBufferSizeInMs; +} VkVideoEncodeRateControlInfoKHR; + +typedef struct VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR { + VkStructureType sType; + const void* pNext; + const VkVideoProfileInfoKHR* pVideoProfile; + uint32_t qualityLevel; +} VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR; +typedef struct VkVideoEncodeQualityLevelPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeRateControlModeFlagBitsKHR preferredRateControlMode; + uint32_t preferredRateControlLayerCount; +} VkVideoEncodeQualityLevelPropertiesKHR; -#define VK_EXT_pipeline_robustness 1 -#define VK_EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION 1 -#define VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_pipeline_robustness" +typedef struct VkVideoEncodeQualityLevelInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t qualityLevel; +} VkVideoEncodeQualityLevelInfoKHR; -typedef enum VkPipelineRobustnessBufferBehaviorEXT { - VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT = 0, - VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT = 1, - VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT = 2, - VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT = 3, - VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_MAX_ENUM_EXT = 0x7FFFFFFF -} VkPipelineRobustnessBufferBehaviorEXT; +typedef struct VkVideoEncodeSessionParametersGetInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoSessionParametersKHR videoSessionParameters; +} VkVideoEncodeSessionParametersGetInfoKHR; -typedef enum VkPipelineRobustnessImageBehaviorEXT { - VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT = 0, - VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT = 1, - VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT = 2, - VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT = 3, - VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_MAX_ENUM_EXT = 0x7FFFFFFF -} VkPipelineRobustnessImageBehaviorEXT; -typedef struct VkPhysicalDevicePipelineRobustnessFeaturesEXT { +typedef struct VkVideoEncodeSessionParametersFeedbackInfoKHR { VkStructureType sType; void* pNext; - VkBool32 pipelineRobustness; -} VkPhysicalDevicePipelineRobustnessFeaturesEXT; + VkBool32 hasOverrides; +} VkVideoEncodeSessionParametersFeedbackInfoKHR; -typedef struct VkPhysicalDevicePipelineRobustnessPropertiesEXT { - VkStructureType sType; - void* pNext; - VkPipelineRobustnessBufferBehaviorEXT defaultRobustnessStorageBuffers; - VkPipelineRobustnessBufferBehaviorEXT defaultRobustnessUniformBuffers; - VkPipelineRobustnessBufferBehaviorEXT defaultRobustnessVertexInputs; - VkPipelineRobustnessImageBehaviorEXT defaultRobustnessImages; -} VkPhysicalDevicePipelineRobustnessPropertiesEXT; - -typedef struct VkPipelineRobustnessCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkPipelineRobustnessBufferBehaviorEXT storageBuffers; - VkPipelineRobustnessBufferBehaviorEXT uniformBuffers; - VkPipelineRobustnessBufferBehaviorEXT vertexInputs; - VkPipelineRobustnessImageBehaviorEXT images; -} VkPipelineRobustnessCreateInfoEXT; +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR* pQualityLevelInfo, VkVideoEncodeQualityLevelPropertiesKHR* pQualityLevelProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetEncodedVideoSessionParametersKHR)(VkDevice device, const VkVideoEncodeSessionParametersGetInfoKHR* pVideoSessionParametersInfo, VkVideoEncodeSessionParametersFeedbackInfoKHR* pFeedbackInfo, size_t* pDataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdEncodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR* pEncodeInfo); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR* pQualityLevelInfo, + VkVideoEncodeQualityLevelPropertiesKHR* pQualityLevelProperties); +VKAPI_ATTR VkResult VKAPI_CALL vkGetEncodedVideoSessionParametersKHR( + VkDevice device, + const VkVideoEncodeSessionParametersGetInfoKHR* pVideoSessionParametersInfo, + VkVideoEncodeSessionParametersFeedbackInfoKHR* pFeedbackInfo, + size_t* pDataSize, + void* pData); -#define VK_EXT_conditional_rendering 1 -#define VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION 2 -#define VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME "VK_EXT_conditional_rendering" +VKAPI_ATTR void VKAPI_CALL vkCmdEncodeVideoKHR( + VkCommandBuffer commandBuffer, + const VkVideoEncodeInfoKHR* pEncodeInfo); +#endif -typedef enum VkConditionalRenderingFlagBitsEXT { - VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT = 0x00000001, - VK_CONDITIONAL_RENDERING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkConditionalRenderingFlagBitsEXT; -typedef VkFlags VkConditionalRenderingFlagsEXT; -typedef struct VkConditionalRenderingBeginInfoEXT { - VkStructureType sType; - const void* pNext; - VkBuffer buffer; - VkDeviceSize offset; - VkConditionalRenderingFlagsEXT flags; -} VkConditionalRenderingBeginInfoEXT; -typedef struct VkPhysicalDeviceConditionalRenderingFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 conditionalRendering; - VkBool32 inheritedConditionalRendering; -} VkPhysicalDeviceConditionalRenderingFeaturesEXT; +// VK_KHR_synchronization2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_synchronization2 1 +#define VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION 1 +#define VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME "VK_KHR_synchronization2" +typedef VkPipelineStageFlags2 VkPipelineStageFlags2KHR; -typedef struct VkCommandBufferInheritanceConditionalRenderingInfoEXT { - VkStructureType sType; - const void* pNext; - VkBool32 conditionalRenderingEnable; -} VkCommandBufferInheritanceConditionalRenderingInfoEXT; +typedef VkPipelineStageFlagBits2 VkPipelineStageFlagBits2KHR; -typedef void (VKAPI_PTR *PFN_vkCmdBeginConditionalRenderingEXT)(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); -typedef void (VKAPI_PTR *PFN_vkCmdEndConditionalRenderingEXT)(VkCommandBuffer commandBuffer); +typedef VkAccessFlags2 VkAccessFlags2KHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdBeginConditionalRenderingEXT( - VkCommandBuffer commandBuffer, - const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); +typedef VkAccessFlagBits2 VkAccessFlagBits2KHR; -VKAPI_ATTR void VKAPI_CALL vkCmdEndConditionalRenderingEXT( - VkCommandBuffer commandBuffer); -#endif +typedef VkSubmitFlagBits VkSubmitFlagBitsKHR; +typedef VkSubmitFlags VkSubmitFlagsKHR; -#define VK_NV_clip_space_w_scaling 1 -#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1 -#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling" -typedef struct VkViewportWScalingNV { - float xcoeff; - float ycoeff; -} VkViewportWScalingNV; +typedef VkMemoryBarrier2 VkMemoryBarrier2KHR; -typedef struct VkPipelineViewportWScalingStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkBool32 viewportWScalingEnable; - uint32_t viewportCount; - const VkViewportWScalingNV* pViewportWScalings; -} VkPipelineViewportWScalingStateCreateInfoNV; +typedef VkBufferMemoryBarrier2 VkBufferMemoryBarrier2KHR; -typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings); +typedef VkImageMemoryBarrier2 VkImageMemoryBarrier2KHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV( - VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkViewportWScalingNV* pViewportWScalings); -#endif +typedef VkDependencyInfo VkDependencyInfoKHR; +typedef VkSubmitInfo2 VkSubmitInfo2KHR; -#define VK_EXT_direct_mode_display 1 -#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1 -#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display" -typedef VkResult (VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); +typedef VkSemaphoreSubmitInfo VkSemaphoreSubmitInfoKHR; + +typedef VkCommandBufferSubmitInfo VkCommandBufferSubmitInfoKHR; + +typedef VkPhysicalDeviceSynchronization2Features VkPhysicalDeviceSynchronization2FeaturesKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2KHR)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfo* pDependencyInfos); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2KHR)(VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2KHR)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2KHR)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( - VkPhysicalDevice physicalDevice, - VkDisplayKHR display); -#endif +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2KHR( + VkCommandBuffer commandBuffer, + VkEvent event, + const VkDependencyInfo* pDependencyInfo); +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2KHR( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags2 stageMask); -#define VK_EXT_display_surface_counter 1 -#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1 -#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter" +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2KHR( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + const VkDependencyInfo* pDependencyInfos); -typedef enum VkSurfaceCounterFlagBitsEXT { - VK_SURFACE_COUNTER_VBLANK_BIT_EXT = 0x00000001, - VK_SURFACE_COUNTER_VBLANK_EXT = VK_SURFACE_COUNTER_VBLANK_BIT_EXT, - VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkSurfaceCounterFlagBitsEXT; -typedef VkFlags VkSurfaceCounterFlagsEXT; -typedef struct VkSurfaceCapabilities2EXT { - VkStructureType sType; - void* pNext; - uint32_t minImageCount; - uint32_t maxImageCount; - VkExtent2D currentExtent; - VkExtent2D minImageExtent; - VkExtent2D maxImageExtent; - uint32_t maxImageArrayLayers; - VkSurfaceTransformFlagsKHR supportedTransforms; - VkSurfaceTransformFlagBitsKHR currentTransform; - VkCompositeAlphaFlagsKHR supportedCompositeAlpha; - VkImageUsageFlags supportedUsageFlags; - VkSurfaceCounterFlagsEXT supportedSurfaceCounters; -} VkSurfaceCapabilities2EXT; +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2KHR( + VkCommandBuffer commandBuffer, + const VkDependencyInfo* pDependencyInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities); +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2KHR( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkQueryPool queryPool, + uint32_t query); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilities2EXT* pSurfaceCapabilities); +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2KHR( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo2* pSubmits, + VkFence fence); #endif -#define VK_EXT_display_control 1 -#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1 -#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control" +// VK_KHR_fragment_shader_barycentric is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_fragment_shader_barycentric 1 +#define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1 +#define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_KHR_fragment_shader_barycentric" +typedef struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShaderBarycentric; +} VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR; -typedef enum VkDisplayPowerStateEXT { - VK_DISPLAY_POWER_STATE_OFF_EXT = 0, - VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1, - VK_DISPLAY_POWER_STATE_ON_EXT = 2, - VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDisplayPowerStateEXT; +typedef struct VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 triStripVertexOrderIndependentOfProvokingVertex; +} VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR; -typedef enum VkDeviceEventTypeEXT { - VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0, - VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDeviceEventTypeEXT; -typedef enum VkDisplayEventTypeEXT { - VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0, - VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDisplayEventTypeEXT; -typedef struct VkDisplayPowerInfoEXT { - VkStructureType sType; - const void* pNext; - VkDisplayPowerStateEXT powerState; -} VkDisplayPowerInfoEXT; -typedef struct VkDeviceEventInfoEXT { - VkStructureType sType; - const void* pNext; - VkDeviceEventTypeEXT deviceEvent; -} VkDeviceEventInfoEXT; +// VK_KHR_shader_subgroup_uniform_control_flow is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_subgroup_uniform_control_flow 1 +#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION 1 +#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_EXTENSION_NAME "VK_KHR_shader_subgroup_uniform_control_flow" +typedef struct VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupUniformControlFlow; +} VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; -typedef struct VkDisplayEventInfoEXT { - VkStructureType sType; - const void* pNext; - VkDisplayEventTypeEXT displayEvent; -} VkDisplayEventInfoEXT; -typedef struct VkSwapchainCounterCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkSurfaceCounterFlagsEXT surfaceCounters; -} VkSwapchainCounterCreateInfoEXT; -typedef VkResult (VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo); -typedef VkResult (VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); -typedef VkResult (VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); -typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue); +// VK_KHR_zero_initialize_workgroup_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_zero_initialize_workgroup_memory 1 +#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION 1 +#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME "VK_KHR_zero_initialize_workgroup_memory" +typedef VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT( - VkDevice device, - VkDisplayKHR display, - const VkDisplayPowerInfoEXT* pDisplayPowerInfo); -VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT( - VkDevice device, - const VkDeviceEventInfoEXT* pDeviceEventInfo, - const VkAllocationCallbacks* pAllocator, - VkFence* pFence); -VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT( - VkDevice device, - VkDisplayKHR display, - const VkDisplayEventInfoEXT* pDisplayEventInfo, - const VkAllocationCallbacks* pAllocator, - VkFence* pFence); +// VK_KHR_workgroup_memory_explicit_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_workgroup_memory_explicit_layout 1 +#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME "VK_KHR_workgroup_memory_explicit_layout" +typedef struct VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 workgroupMemoryExplicitLayout; + VkBool32 workgroupMemoryExplicitLayoutScalarBlockLayout; + VkBool32 workgroupMemoryExplicitLayout8BitAccess; + VkBool32 workgroupMemoryExplicitLayout16BitAccess; +} VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; -VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT( - VkDevice device, - VkSwapchainKHR swapchain, - VkSurfaceCounterFlagBitsEXT counter, - uint64_t* pCounterValue); -#endif -#define VK_GOOGLE_display_timing 1 -#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1 -#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing" -typedef struct VkRefreshCycleDurationGOOGLE { - uint64_t refreshDuration; -} VkRefreshCycleDurationGOOGLE; +// VK_KHR_copy_commands2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_copy_commands2 1 +#define VK_KHR_COPY_COMMANDS_2_SPEC_VERSION 1 +#define VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME "VK_KHR_copy_commands2" +typedef VkCopyBufferInfo2 VkCopyBufferInfo2KHR; -typedef struct VkPastPresentationTimingGOOGLE { - uint32_t presentID; - uint64_t desiredPresentTime; - uint64_t actualPresentTime; - uint64_t earliestPresentTime; - uint64_t presentMargin; -} VkPastPresentationTimingGOOGLE; +typedef VkCopyImageInfo2 VkCopyImageInfo2KHR; -typedef struct VkPresentTimeGOOGLE { - uint32_t presentID; - uint64_t desiredPresentTime; -} VkPresentTimeGOOGLE; +typedef VkCopyBufferToImageInfo2 VkCopyBufferToImageInfo2KHR; -typedef struct VkPresentTimesInfoGOOGLE { - VkStructureType sType; - const void* pNext; - uint32_t swapchainCount; - const VkPresentTimeGOOGLE* pTimes; -} VkPresentTimesInfoGOOGLE; +typedef VkCopyImageToBufferInfo2 VkCopyImageToBufferInfo2KHR; -typedef VkResult (VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings); +typedef VkBlitImageInfo2 VkBlitImageInfo2KHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE( - VkDevice device, - VkSwapchainKHR swapchain, - VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); +typedef VkResolveImageInfo2 VkResolveImageInfo2KHR; -VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( - VkDevice device, - VkSwapchainKHR swapchain, - uint32_t* pPresentationTimingCount, - VkPastPresentationTimingGOOGLE* pPresentationTimings); -#endif +typedef VkBufferCopy2 VkBufferCopy2KHR; +typedef VkImageCopy2 VkImageCopy2KHR; -#define VK_NV_sample_mask_override_coverage 1 -#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1 -#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage" +typedef VkImageBlit2 VkImageBlit2KHR; +typedef VkBufferImageCopy2 VkBufferImageCopy2KHR; -#define VK_NV_geometry_shader_passthrough 1 -#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1 -#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough" +typedef VkImageResolve2 VkImageResolve2KHR; +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2KHR)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2KHR)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo); -#define VK_NV_viewport_array2 1 -#define VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION 1 -#define VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME "VK_NV_viewport_array2" -#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION -#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2* pCopyBufferInfo); +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageInfo2* pCopyImageInfo); -#define VK_NVX_multiview_per_view_attributes 1 -#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1 -#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes" -typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { - VkStructureType sType; - void* pNext; - VkBool32 perViewPositionAllComponents; -} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2KHR( + VkCommandBuffer commandBuffer, + const VkBlitImageInfo2* pBlitImageInfo); -#define VK_NV_viewport_swizzle 1 -#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1 -#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle" +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2KHR( + VkCommandBuffer commandBuffer, + const VkResolveImageInfo2* pResolveImageInfo); +#endif -typedef enum VkViewportCoordinateSwizzleNV { - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1, - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3, - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5, - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7, - VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF -} VkViewportCoordinateSwizzleNV; -typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV; -typedef struct VkViewportSwizzleNV { - VkViewportCoordinateSwizzleNV x; - VkViewportCoordinateSwizzleNV y; - VkViewportCoordinateSwizzleNV z; - VkViewportCoordinateSwizzleNV w; -} VkViewportSwizzleNV; -typedef struct VkPipelineViewportSwizzleStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineViewportSwizzleStateCreateFlagsNV flags; - uint32_t viewportCount; - const VkViewportSwizzleNV* pViewportSwizzles; -} VkPipelineViewportSwizzleStateCreateInfoNV; +// VK_KHR_format_feature_flags2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_format_feature_flags2 1 +#define VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION 2 +#define VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME "VK_KHR_format_feature_flags2" +typedef VkFormatFeatureFlags2 VkFormatFeatureFlags2KHR; +typedef VkFormatFeatureFlagBits2 VkFormatFeatureFlagBits2KHR; +typedef VkFormatProperties3 VkFormatProperties3KHR; -#define VK_EXT_discard_rectangles 1 -#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 1 -#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles" -typedef enum VkDiscardRectangleModeEXT { - VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0, - VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1, - VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDiscardRectangleModeEXT; -typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT; -typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT { + +// VK_KHR_ray_tracing_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_ray_tracing_maintenance1 1 +#define VK_KHR_RAY_TRACING_MAINTENANCE_1_SPEC_VERSION 1 +#define VK_KHR_RAY_TRACING_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_ray_tracing_maintenance1" +typedef struct VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR { VkStructureType sType; void* pNext; - uint32_t maxDiscardRectangles; -} VkPhysicalDeviceDiscardRectanglePropertiesEXT; - -typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkPipelineDiscardRectangleStateCreateFlagsEXT flags; - VkDiscardRectangleModeEXT discardRectangleMode; - uint32_t discardRectangleCount; - const VkRect2D* pDiscardRectangles; -} VkPipelineDiscardRectangleStateCreateInfoEXT; + VkBool32 rayTracingMaintenance1; + VkBool32 rayTracingPipelineTraceRaysIndirect2; +} VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR; -typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles); +typedef struct VkTraceRaysIndirectCommand2KHR { + VkDeviceAddress raygenShaderRecordAddress; + VkDeviceSize raygenShaderRecordSize; + VkDeviceAddress missShaderBindingTableAddress; + VkDeviceSize missShaderBindingTableSize; + VkDeviceSize missShaderBindingTableStride; + VkDeviceAddress hitShaderBindingTableAddress; + VkDeviceSize hitShaderBindingTableSize; + VkDeviceSize hitShaderBindingTableStride; + VkDeviceAddress callableShaderBindingTableAddress; + VkDeviceSize callableShaderBindingTableSize; + VkDeviceSize callableShaderBindingTableStride; + uint32_t width; + uint32_t height; + uint32_t depth; +} VkTraceRaysIndirectCommand2KHR; + +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirect2KHR)(VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT( +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirect2KHR( VkCommandBuffer commandBuffer, - uint32_t firstDiscardRectangle, - uint32_t discardRectangleCount, - const VkRect2D* pDiscardRectangles); + VkDeviceAddress indirectDeviceAddress); #endif -#define VK_EXT_conservative_rasterization 1 -#define VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION 1 -#define VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME "VK_EXT_conservative_rasterization" +// VK_KHR_portability_enumeration is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_portability_enumeration 1 +#define VK_KHR_PORTABILITY_ENUMERATION_SPEC_VERSION 1 +#define VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME "VK_KHR_portability_enumeration" -typedef enum VkConservativeRasterizationModeEXT { - VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT = 0, - VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT = 1, - VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT = 2, - VK_CONSERVATIVE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkConservativeRasterizationModeEXT; -typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT; -typedef struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT { - VkStructureType sType; - void* pNext; - float primitiveOverestimationSize; - float maxExtraPrimitiveOverestimationSize; - float extraPrimitiveOverestimationSizeGranularity; - VkBool32 primitiveUnderestimation; - VkBool32 conservativePointAndLineRasterization; - VkBool32 degenerateTrianglesRasterized; - VkBool32 degenerateLinesRasterized; - VkBool32 fullyCoveredFragmentShaderInputVariable; - VkBool32 conservativeRasterizationPostDepthCoverage; -} VkPhysicalDeviceConservativeRasterizationPropertiesEXT; -typedef struct VkPipelineRasterizationConservativeStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkPipelineRasterizationConservativeStateCreateFlagsEXT flags; - VkConservativeRasterizationModeEXT conservativeRasterizationMode; - float extraPrimitiveOverestimationSize; -} VkPipelineRasterizationConservativeStateCreateInfoEXT; +// VK_KHR_maintenance4 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance4 1 +#define VK_KHR_MAINTENANCE_4_SPEC_VERSION 2 +#define VK_KHR_MAINTENANCE_4_EXTENSION_NAME "VK_KHR_maintenance4" +typedef VkPhysicalDeviceMaintenance4Features VkPhysicalDeviceMaintenance4FeaturesKHR; +typedef VkPhysicalDeviceMaintenance4Properties VkPhysicalDeviceMaintenance4PropertiesKHR; +typedef VkDeviceBufferMemoryRequirements VkDeviceBufferMemoryRequirementsKHR; -#define VK_EXT_depth_clip_enable 1 -#define VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION 1 -#define VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME "VK_EXT_depth_clip_enable" -typedef VkFlags VkPipelineRasterizationDepthClipStateCreateFlagsEXT; -typedef struct VkPhysicalDeviceDepthClipEnableFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 depthClipEnable; -} VkPhysicalDeviceDepthClipEnableFeaturesEXT; +typedef VkDeviceImageMemoryRequirements VkDeviceImageMemoryRequirementsKHR; -typedef struct VkPipelineRasterizationDepthClipStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkPipelineRasterizationDepthClipStateCreateFlagsEXT flags; - VkBool32 depthClipEnable; -} VkPipelineRasterizationDepthClipStateCreateInfoEXT; +typedef void (VKAPI_PTR *PFN_vkGetDeviceBufferMemoryRequirementsKHR)(VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageMemoryRequirementsKHR)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSparseMemoryRequirementsKHR)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDeviceBufferMemoryRequirementsKHR( + VkDevice device, + const VkDeviceBufferMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageMemoryRequirementsKHR( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); -#define VK_EXT_swapchain_colorspace 1 -#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 4 -#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace" +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirementsKHR( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#endif -#define VK_EXT_hdr_metadata 1 -#define VK_EXT_HDR_METADATA_SPEC_VERSION 2 -#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata" -typedef struct VkXYColorEXT { - float x; - float y; -} VkXYColorEXT; +// VK_KHR_shader_subgroup_rotate is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_subgroup_rotate 1 +#define VK_KHR_SHADER_SUBGROUP_ROTATE_SPEC_VERSION 2 +#define VK_KHR_SHADER_SUBGROUP_ROTATE_EXTENSION_NAME "VK_KHR_shader_subgroup_rotate" +typedef VkPhysicalDeviceShaderSubgroupRotateFeatures VkPhysicalDeviceShaderSubgroupRotateFeaturesKHR; -typedef struct VkHdrMetadataEXT { - VkStructureType sType; - const void* pNext; - VkXYColorEXT displayPrimaryRed; - VkXYColorEXT displayPrimaryGreen; - VkXYColorEXT displayPrimaryBlue; - VkXYColorEXT whitePoint; - float maxLuminance; - float minLuminance; - float maxContentLightLevel; - float maxFrameAverageLightLevel; -} VkHdrMetadataEXT; -typedef void (VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT( - VkDevice device, - uint32_t swapchainCount, - const VkSwapchainKHR* pSwapchains, - const VkHdrMetadataEXT* pMetadata); -#endif +// VK_KHR_shader_maximal_reconvergence is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_maximal_reconvergence 1 +#define VK_KHR_SHADER_MAXIMAL_RECONVERGENCE_SPEC_VERSION 1 +#define VK_KHR_SHADER_MAXIMAL_RECONVERGENCE_EXTENSION_NAME "VK_KHR_shader_maximal_reconvergence" +typedef struct VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderMaximalReconvergence; +} VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR; -#define VK_EXT_external_memory_dma_buf 1 -#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION 1 -#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME "VK_EXT_external_memory_dma_buf" +// VK_KHR_maintenance5 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance5 1 +#define VK_KHR_MAINTENANCE_5_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_5_EXTENSION_NAME "VK_KHR_maintenance5" +typedef VkPipelineCreateFlags2 VkPipelineCreateFlags2KHR; -#define VK_EXT_queue_family_foreign 1 -#define VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION 1 -#define VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME "VK_EXT_queue_family_foreign" -#define VK_QUEUE_FAMILY_FOREIGN_EXT (~2U) +typedef VkPipelineCreateFlagBits2 VkPipelineCreateFlagBits2KHR; +typedef VkBufferUsageFlags2 VkBufferUsageFlags2KHR; -#define VK_EXT_debug_utils 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT) -#define VK_EXT_DEBUG_UTILS_SPEC_VERSION 2 -#define VK_EXT_DEBUG_UTILS_EXTENSION_NAME "VK_EXT_debug_utils" -typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT; +typedef VkBufferUsageFlagBits2 VkBufferUsageFlagBits2KHR; -typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT { - VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT = 0x00000001, - VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT = 0x00000010, - VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT = 0x00000100, - VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT = 0x00001000, - VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDebugUtilsMessageSeverityFlagBitsEXT; +typedef VkPhysicalDeviceMaintenance5Features VkPhysicalDeviceMaintenance5FeaturesKHR; -typedef enum VkDebugUtilsMessageTypeFlagBitsEXT { - VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001, - VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT = 0x00000002, - VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT = 0x00000004, - VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDebugUtilsMessageTypeFlagBitsEXT; -typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT; -typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT; -typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT; -typedef struct VkDebugUtilsLabelEXT { - VkStructureType sType; - const void* pNext; - const char* pLabelName; - float color[4]; -} VkDebugUtilsLabelEXT; +typedef VkPhysicalDeviceMaintenance5Properties VkPhysicalDeviceMaintenance5PropertiesKHR; -typedef struct VkDebugUtilsObjectNameInfoEXT { - VkStructureType sType; - const void* pNext; - VkObjectType objectType; - uint64_t objectHandle; - const char* pObjectName; -} VkDebugUtilsObjectNameInfoEXT; +typedef VkRenderingAreaInfo VkRenderingAreaInfoKHR; -typedef struct VkDebugUtilsMessengerCallbackDataEXT { - VkStructureType sType; - const void* pNext; - VkDebugUtilsMessengerCallbackDataFlagsEXT flags; - const char* pMessageIdName; - int32_t messageIdNumber; - const char* pMessage; - uint32_t queueLabelCount; - const VkDebugUtilsLabelEXT* pQueueLabels; - uint32_t cmdBufLabelCount; - const VkDebugUtilsLabelEXT* pCmdBufLabels; - uint32_t objectCount; - const VkDebugUtilsObjectNameInfoEXT* pObjects; -} VkDebugUtilsMessengerCallbackDataEXT; +typedef VkDeviceImageSubresourceInfo VkDeviceImageSubresourceInfoKHR; -typedef VkBool32 (VKAPI_PTR *PFN_vkDebugUtilsMessengerCallbackEXT)( - VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, - VkDebugUtilsMessageTypeFlagsEXT messageTypes, - const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, - void* pUserData); +typedef VkImageSubresource2 VkImageSubresource2KHR; -typedef struct VkDebugUtilsMessengerCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkDebugUtilsMessengerCreateFlagsEXT flags; - VkDebugUtilsMessageSeverityFlagsEXT messageSeverity; - VkDebugUtilsMessageTypeFlagsEXT messageType; - PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback; - void* pUserData; -} VkDebugUtilsMessengerCreateInfoEXT; +typedef VkSubresourceLayout2 VkSubresourceLayout2KHR; -typedef struct VkDebugUtilsObjectTagInfoEXT { - VkStructureType sType; - const void* pNext; - VkObjectType objectType; - uint64_t objectHandle; - uint64_t tagName; - size_t tagSize; - const void* pTag; -} VkDebugUtilsObjectTagInfoEXT; +typedef VkPipelineCreateFlags2CreateInfo VkPipelineCreateFlags2CreateInfoKHR; -typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectNameEXT)(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo); -typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectTagEXT)(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo); -typedef void (VKAPI_PTR *PFN_vkQueueBeginDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); -typedef void (VKAPI_PTR *PFN_vkQueueEndDebugUtilsLabelEXT)(VkQueue queue); -typedef void (VKAPI_PTR *PFN_vkQueueInsertDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); -typedef void (VKAPI_PTR *PFN_vkCmdBeginDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); -typedef void (VKAPI_PTR *PFN_vkCmdEndDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer); -typedef void (VKAPI_PTR *PFN_vkCmdInsertDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); -typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugUtilsMessengerEXT)(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger); -typedef void (VKAPI_PTR *PFN_vkDestroyDebugUtilsMessengerEXT)(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkSubmitDebugUtilsMessageEXT)(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); +typedef VkBufferUsageFlags2CreateInfo VkBufferUsageFlags2CreateInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer2KHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType); +typedef void (VKAPI_PTR *PFN_vkGetRenderingAreaGranularityKHR)(VkDevice device, const VkRenderingAreaInfo* pRenderingAreaInfo, VkExtent2D* pGranularity); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSubresourceLayoutKHR)(VkDevice device, const VkDeviceImageSubresourceInfo* pInfo, VkSubresourceLayout2* pLayout); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2KHR)(VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, VkSubresourceLayout2* pLayout); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT( +VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer2KHR( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkDeviceSize size, + VkIndexType indexType); + +VKAPI_ATTR void VKAPI_CALL vkGetRenderingAreaGranularityKHR( VkDevice device, - const VkDebugUtilsObjectNameInfoEXT* pNameInfo); + const VkRenderingAreaInfo* pRenderingAreaInfo, + VkExtent2D* pGranularity); -VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT( +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSubresourceLayoutKHR( VkDevice device, - const VkDebugUtilsObjectTagInfoEXT* pTagInfo); + const VkDeviceImageSubresourceInfo* pInfo, + VkSubresourceLayout2* pLayout); -VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT( - VkQueue queue, - const VkDebugUtilsLabelEXT* pLabelInfo); +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2KHR( + VkDevice device, + VkImage image, + const VkImageSubresource2* pSubresource, + VkSubresourceLayout2* pLayout); +#endif -VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT( - VkQueue queue); -VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT( - VkQueue queue, - const VkDebugUtilsLabelEXT* pLabelInfo); +// VK_KHR_ray_tracing_position_fetch is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_ray_tracing_position_fetch 1 +#define VK_KHR_RAY_TRACING_POSITION_FETCH_SPEC_VERSION 1 +#define VK_KHR_RAY_TRACING_POSITION_FETCH_EXTENSION_NAME "VK_KHR_ray_tracing_position_fetch" +typedef struct VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingPositionFetch; +} VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR; -VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT( - VkCommandBuffer commandBuffer, - const VkDebugUtilsLabelEXT* pLabelInfo); -VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT( - VkCommandBuffer commandBuffer); -VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT( - VkCommandBuffer commandBuffer, - const VkDebugUtilsLabelEXT* pLabelInfo); +// VK_KHR_pipeline_binary is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_pipeline_binary 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineBinaryKHR) +#define VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR 32U +#define VK_KHR_PIPELINE_BINARY_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_BINARY_EXTENSION_NAME "VK_KHR_pipeline_binary" +typedef struct VkPhysicalDevicePipelineBinaryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineBinaries; +} VkPhysicalDevicePipelineBinaryFeaturesKHR; -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT( - VkInstance instance, - const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDebugUtilsMessengerEXT* pMessenger); +typedef struct VkPhysicalDevicePipelineBinaryPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineBinaryInternalCache; + VkBool32 pipelineBinaryInternalCacheControl; + VkBool32 pipelineBinaryPrefersInternalCache; + VkBool32 pipelineBinaryPrecompiledInternalCache; + VkBool32 pipelineBinaryCompressedData; +} VkPhysicalDevicePipelineBinaryPropertiesKHR; -VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT( - VkInstance instance, - VkDebugUtilsMessengerEXT messenger, - const VkAllocationCallbacks* pAllocator); +typedef struct VkDevicePipelineBinaryInternalCacheControlKHR { + VkStructureType sType; + const void* pNext; + VkBool32 disableInternalCache; +} VkDevicePipelineBinaryInternalCacheControlKHR; -VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT( - VkInstance instance, - VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, - VkDebugUtilsMessageTypeFlagsEXT messageTypes, - const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); -#endif +typedef struct VkPipelineBinaryKeyKHR { + VkStructureType sType; + void* pNext; + uint32_t keySize; + uint8_t key[VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR]; +} VkPipelineBinaryKeyKHR; +typedef struct VkPipelineBinaryDataKHR { + size_t dataSize; + void* pData; +} VkPipelineBinaryDataKHR; -#define VK_EXT_sampler_filter_minmax 1 -#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 2 -#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax" -typedef VkSamplerReductionMode VkSamplerReductionModeEXT; +typedef struct VkPipelineBinaryKeysAndDataKHR { + uint32_t binaryCount; + const VkPipelineBinaryKeyKHR* pPipelineBinaryKeys; + const VkPipelineBinaryDataKHR* pPipelineBinaryData; +} VkPipelineBinaryKeysAndDataKHR; -typedef VkSamplerReductionModeCreateInfo VkSamplerReductionModeCreateInfoEXT; +typedef struct VkPipelineCreateInfoKHR { + VkStructureType sType; + void* pNext; +} VkPipelineCreateInfoKHR; -typedef VkPhysicalDeviceSamplerFilterMinmaxProperties VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT; +typedef struct VkPipelineBinaryCreateInfoKHR { + VkStructureType sType; + const void* pNext; + const VkPipelineBinaryKeysAndDataKHR* pKeysAndDataInfo; + VkPipeline pipeline; + const VkPipelineCreateInfoKHR* pPipelineCreateInfo; +} VkPipelineBinaryCreateInfoKHR; +typedef struct VkPipelineBinaryInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t binaryCount; + const VkPipelineBinaryKHR* pPipelineBinaries; +} VkPipelineBinaryInfoKHR; +typedef struct VkReleaseCapturedPipelineDataInfoKHR { + VkStructureType sType; + void* pNext; + VkPipeline pipeline; +} VkReleaseCapturedPipelineDataInfoKHR; -#define VK_AMD_gpu_shader_int16 1 -#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 2 -#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16" +typedef struct VkPipelineBinaryDataInfoKHR { + VkStructureType sType; + void* pNext; + VkPipelineBinaryKHR pipelineBinary; +} VkPipelineBinaryDataInfoKHR; +typedef struct VkPipelineBinaryHandlesInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t pipelineBinaryCount; + VkPipelineBinaryKHR* pPipelineBinaries; +} VkPipelineBinaryHandlesInfoKHR; -#define VK_AMD_mixed_attachment_samples 1 -#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 -#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples" +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineBinariesKHR)(VkDevice device, const VkPipelineBinaryCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineBinaryHandlesInfoKHR* pBinaries); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineBinaryKHR)(VkDevice device, VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineKeyKHR)(VkDevice device, const VkPipelineCreateInfoKHR* pPipelineCreateInfo, VkPipelineBinaryKeyKHR* pPipelineKey); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineBinaryDataKHR)(VkDevice device, const VkPipelineBinaryDataInfoKHR* pInfo, VkPipelineBinaryKeyKHR* pPipelineBinaryKey, size_t* pPipelineBinaryDataSize, void* pPipelineBinaryData); +typedef VkResult (VKAPI_PTR *PFN_vkReleaseCapturedPipelineDataKHR)(VkDevice device, const VkReleaseCapturedPipelineDataInfoKHR* pInfo, const VkAllocationCallbacks* pAllocator); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineBinariesKHR( + VkDevice device, + const VkPipelineBinaryCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineBinaryHandlesInfoKHR* pBinaries); -#define VK_AMD_shader_fragment_mask 1 -#define VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION 1 -#define VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME "VK_AMD_shader_fragment_mask" +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineBinaryKHR( + VkDevice device, + VkPipelineBinaryKHR pipelineBinary, + const VkAllocationCallbacks* pAllocator); +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineKeyKHR( + VkDevice device, + const VkPipelineCreateInfoKHR* pPipelineCreateInfo, + VkPipelineBinaryKeyKHR* pPipelineKey); -#define VK_EXT_inline_uniform_block 1 -#define VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION 1 -#define VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME "VK_EXT_inline_uniform_block" -typedef VkPhysicalDeviceInlineUniformBlockFeatures VkPhysicalDeviceInlineUniformBlockFeaturesEXT; +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineBinaryDataKHR( + VkDevice device, + const VkPipelineBinaryDataInfoKHR* pInfo, + VkPipelineBinaryKeyKHR* pPipelineBinaryKey, + size_t* pPipelineBinaryDataSize, + void* pPipelineBinaryData); -typedef VkPhysicalDeviceInlineUniformBlockProperties VkPhysicalDeviceInlineUniformBlockPropertiesEXT; +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseCapturedPipelineDataKHR( + VkDevice device, + const VkReleaseCapturedPipelineDataInfoKHR* pInfo, + const VkAllocationCallbacks* pAllocator); +#endif -typedef VkWriteDescriptorSetInlineUniformBlock VkWriteDescriptorSetInlineUniformBlockEXT; -typedef VkDescriptorPoolInlineUniformBlockCreateInfo VkDescriptorPoolInlineUniformBlockCreateInfoEXT; +// VK_KHR_cooperative_matrix is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_cooperative_matrix 1 +#define VK_KHR_COOPERATIVE_MATRIX_SPEC_VERSION 2 +#define VK_KHR_COOPERATIVE_MATRIX_EXTENSION_NAME "VK_KHR_cooperative_matrix" + +typedef enum VkComponentTypeKHR { + VK_COMPONENT_TYPE_FLOAT16_KHR = 0, + VK_COMPONENT_TYPE_FLOAT32_KHR = 1, + VK_COMPONENT_TYPE_FLOAT64_KHR = 2, + VK_COMPONENT_TYPE_SINT8_KHR = 3, + VK_COMPONENT_TYPE_SINT16_KHR = 4, + VK_COMPONENT_TYPE_SINT32_KHR = 5, + VK_COMPONENT_TYPE_SINT64_KHR = 6, + VK_COMPONENT_TYPE_UINT8_KHR = 7, + VK_COMPONENT_TYPE_UINT16_KHR = 8, + VK_COMPONENT_TYPE_UINT32_KHR = 9, + VK_COMPONENT_TYPE_UINT64_KHR = 10, + VK_COMPONENT_TYPE_SINT8_PACKED_NV = 1000491000, + VK_COMPONENT_TYPE_UINT8_PACKED_NV = 1000491001, + VK_COMPONENT_TYPE_FLOAT_E4M3_NV = 1000491002, + VK_COMPONENT_TYPE_FLOAT_E5M2_NV = 1000491003, + VK_COMPONENT_TYPE_FLOAT16_NV = VK_COMPONENT_TYPE_FLOAT16_KHR, + VK_COMPONENT_TYPE_FLOAT32_NV = VK_COMPONENT_TYPE_FLOAT32_KHR, + VK_COMPONENT_TYPE_FLOAT64_NV = VK_COMPONENT_TYPE_FLOAT64_KHR, + VK_COMPONENT_TYPE_SINT8_NV = VK_COMPONENT_TYPE_SINT8_KHR, + VK_COMPONENT_TYPE_SINT16_NV = VK_COMPONENT_TYPE_SINT16_KHR, + VK_COMPONENT_TYPE_SINT32_NV = VK_COMPONENT_TYPE_SINT32_KHR, + VK_COMPONENT_TYPE_SINT64_NV = VK_COMPONENT_TYPE_SINT64_KHR, + VK_COMPONENT_TYPE_UINT8_NV = VK_COMPONENT_TYPE_UINT8_KHR, + VK_COMPONENT_TYPE_UINT16_NV = VK_COMPONENT_TYPE_UINT16_KHR, + VK_COMPONENT_TYPE_UINT32_NV = VK_COMPONENT_TYPE_UINT32_KHR, + VK_COMPONENT_TYPE_UINT64_NV = VK_COMPONENT_TYPE_UINT64_KHR, + VK_COMPONENT_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkComponentTypeKHR; + +typedef enum VkScopeKHR { + VK_SCOPE_DEVICE_KHR = 1, + VK_SCOPE_WORKGROUP_KHR = 2, + VK_SCOPE_SUBGROUP_KHR = 3, + VK_SCOPE_QUEUE_FAMILY_KHR = 5, + VK_SCOPE_DEVICE_NV = VK_SCOPE_DEVICE_KHR, + VK_SCOPE_WORKGROUP_NV = VK_SCOPE_WORKGROUP_KHR, + VK_SCOPE_SUBGROUP_NV = VK_SCOPE_SUBGROUP_KHR, + VK_SCOPE_QUEUE_FAMILY_NV = VK_SCOPE_QUEUE_FAMILY_KHR, + VK_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkScopeKHR; +typedef struct VkCooperativeMatrixPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t MSize; + uint32_t NSize; + uint32_t KSize; + VkComponentTypeKHR AType; + VkComponentTypeKHR BType; + VkComponentTypeKHR CType; + VkComponentTypeKHR ResultType; + VkBool32 saturatingAccumulation; + VkScopeKHR scope; +} VkCooperativeMatrixPropertiesKHR; + +typedef struct VkPhysicalDeviceCooperativeMatrixFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 cooperativeMatrix; + VkBool32 cooperativeMatrixRobustBufferAccess; +} VkPhysicalDeviceCooperativeMatrixFeaturesKHR; +typedef struct VkPhysicalDeviceCooperativeMatrixPropertiesKHR { + VkStructureType sType; + void* pNext; + VkShaderStageFlags cooperativeMatrixSupportedStages; +} VkPhysicalDeviceCooperativeMatrixPropertiesKHR; +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesKHR* pProperties); -#define VK_EXT_shader_stencil_export 1 -#define VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION 1 -#define VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME "VK_EXT_shader_stencil_export" +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeMatrixPropertiesKHR* pProperties); +#endif -#define VK_EXT_sample_locations 1 -#define VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION 1 -#define VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME "VK_EXT_sample_locations" -typedef struct VkSampleLocationEXT { - float x; - float y; -} VkSampleLocationEXT; +// VK_KHR_compute_shader_derivatives is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_compute_shader_derivatives 1 +#define VK_KHR_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1 +#define VK_KHR_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_KHR_compute_shader_derivatives" +typedef struct VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 computeDerivativeGroupQuads; + VkBool32 computeDerivativeGroupLinear; +} VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR; -typedef struct VkSampleLocationsInfoEXT { - VkStructureType sType; - const void* pNext; - VkSampleCountFlagBits sampleLocationsPerPixel; - VkExtent2D sampleLocationGridSize; - uint32_t sampleLocationsCount; - const VkSampleLocationEXT* pSampleLocations; -} VkSampleLocationsInfoEXT; +typedef struct VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 meshAndTaskShaderDerivatives; +} VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR; -typedef struct VkAttachmentSampleLocationsEXT { - uint32_t attachmentIndex; - VkSampleLocationsInfoEXT sampleLocationsInfo; -} VkAttachmentSampleLocationsEXT; -typedef struct VkSubpassSampleLocationsEXT { - uint32_t subpassIndex; - VkSampleLocationsInfoEXT sampleLocationsInfo; -} VkSubpassSampleLocationsEXT; -typedef struct VkRenderPassSampleLocationsBeginInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t attachmentInitialSampleLocationsCount; - const VkAttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations; - uint32_t postSubpassSampleLocationsCount; - const VkSubpassSampleLocationsEXT* pPostSubpassSampleLocations; -} VkRenderPassSampleLocationsBeginInfoEXT; +// VK_KHR_video_decode_av1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_decode_av1 1 +#include "vk_video/vulkan_video_codec_av1std.h" +#include "vk_video/vulkan_video_codec_av1std_decode.h" +#define VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR 7U +#define VK_KHR_VIDEO_DECODE_AV1_SPEC_VERSION 1 +#define VK_KHR_VIDEO_DECODE_AV1_EXTENSION_NAME "VK_KHR_video_decode_av1" +typedef struct VkVideoDecodeAV1ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoAV1Profile stdProfile; + VkBool32 filmGrainSupport; +} VkVideoDecodeAV1ProfileInfoKHR; -typedef struct VkPipelineSampleLocationsStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkBool32 sampleLocationsEnable; - VkSampleLocationsInfoEXT sampleLocationsInfo; -} VkPipelineSampleLocationsStateCreateInfoEXT; +typedef struct VkVideoDecodeAV1CapabilitiesKHR { + VkStructureType sType; + void* pNext; + StdVideoAV1Level maxLevel; +} VkVideoDecodeAV1CapabilitiesKHR; -typedef struct VkPhysicalDeviceSampleLocationsPropertiesEXT { - VkStructureType sType; - void* pNext; - VkSampleCountFlags sampleLocationSampleCounts; - VkExtent2D maxSampleLocationGridSize; - float sampleLocationCoordinateRange[2]; - uint32_t sampleLocationSubPixelBits; - VkBool32 variableSampleLocations; -} VkPhysicalDeviceSampleLocationsPropertiesEXT; +typedef struct VkVideoDecodeAV1SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoAV1SequenceHeader* pStdSequenceHeader; +} VkVideoDecodeAV1SessionParametersCreateInfoKHR; -typedef struct VkMultisamplePropertiesEXT { +typedef struct VkVideoDecodeAV1PictureInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeAV1PictureInfo* pStdPictureInfo; + int32_t referenceNameSlotIndices[VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR]; + uint32_t frameHeaderOffset; + uint32_t tileCount; + const uint32_t* pTileOffsets; + const uint32_t* pTileSizes; +} VkVideoDecodeAV1PictureInfoKHR; + +typedef struct VkVideoDecodeAV1DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeAV1ReferenceInfo* pStdReferenceInfo; +} VkVideoDecodeAV1DpbSlotInfoKHR; + + + +// VK_KHR_video_encode_av1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_av1 1 +#include "vk_video/vulkan_video_codec_av1std_encode.h" +#define VK_KHR_VIDEO_ENCODE_AV1_SPEC_VERSION 1 +#define VK_KHR_VIDEO_ENCODE_AV1_EXTENSION_NAME "VK_KHR_video_encode_av1" + +typedef enum VkVideoEncodeAV1PredictionModeKHR { + VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_INTRA_ONLY_KHR = 0, + VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_SINGLE_REFERENCE_KHR = 1, + VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_UNIDIRECTIONAL_COMPOUND_KHR = 2, + VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_BIDIRECTIONAL_COMPOUND_KHR = 3, + VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1PredictionModeKHR; + +typedef enum VkVideoEncodeAV1RateControlGroupKHR { + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_INTRA_KHR = 0, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_PREDICTIVE_KHR = 1, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_BIPREDICTIVE_KHR = 2, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1RateControlGroupKHR; + +typedef enum VkVideoEncodeAV1CapabilityFlagBitsKHR { + VK_VIDEO_ENCODE_AV1_CAPABILITY_PER_RATE_CONTROL_GROUP_MIN_MAX_Q_INDEX_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_AV1_CAPABILITY_GENERATE_OBU_EXTENSION_HEADER_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_AV1_CAPABILITY_PRIMARY_REFERENCE_CDF_ONLY_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_AV1_CAPABILITY_FRAME_SIZE_OVERRIDE_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_AV1_CAPABILITY_MOTION_VECTOR_SCALING_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_AV1_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1CapabilityFlagBitsKHR; +typedef VkFlags VkVideoEncodeAV1CapabilityFlagsKHR; + +typedef enum VkVideoEncodeAV1StdFlagBitsKHR { + VK_VIDEO_ENCODE_AV1_STD_UNIFORM_TILE_SPACING_FLAG_SET_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_AV1_STD_SKIP_MODE_PRESENT_UNSET_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_AV1_STD_PRIMARY_REF_FRAME_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_AV1_STD_DELTA_Q_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_AV1_STD_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1StdFlagBitsKHR; +typedef VkFlags VkVideoEncodeAV1StdFlagsKHR; + +typedef enum VkVideoEncodeAV1SuperblockSizeFlagBitsKHR { + VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_64_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_128_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1SuperblockSizeFlagBitsKHR; +typedef VkFlags VkVideoEncodeAV1SuperblockSizeFlagsKHR; + +typedef enum VkVideoEncodeAV1RateControlFlagBitsKHR { + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REGULAR_GOP_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1RateControlFlagBitsKHR; +typedef VkFlags VkVideoEncodeAV1RateControlFlagsKHR; +typedef struct VkPhysicalDeviceVideoEncodeAV1FeaturesKHR { VkStructureType sType; void* pNext; - VkExtent2D maxSampleLocationGridSize; -} VkMultisamplePropertiesEXT; + VkBool32 videoEncodeAV1; +} VkPhysicalDeviceVideoEncodeAV1FeaturesKHR; -typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEXT)(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo); -typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties); +typedef struct VkVideoEncodeAV1CapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeAV1CapabilityFlagsKHR flags; + StdVideoAV1Level maxLevel; + VkExtent2D codedPictureAlignment; + VkExtent2D maxTiles; + VkExtent2D minTileSize; + VkExtent2D maxTileSize; + VkVideoEncodeAV1SuperblockSizeFlagsKHR superblockSizes; + uint32_t maxSingleReferenceCount; + uint32_t singleReferenceNameMask; + uint32_t maxUnidirectionalCompoundReferenceCount; + uint32_t maxUnidirectionalCompoundGroup1ReferenceCount; + uint32_t unidirectionalCompoundReferenceNameMask; + uint32_t maxBidirectionalCompoundReferenceCount; + uint32_t maxBidirectionalCompoundGroup1ReferenceCount; + uint32_t maxBidirectionalCompoundGroup2ReferenceCount; + uint32_t bidirectionalCompoundReferenceNameMask; + uint32_t maxTemporalLayerCount; + uint32_t maxSpatialLayerCount; + uint32_t maxOperatingPoints; + uint32_t minQIndex; + uint32_t maxQIndex; + VkBool32 prefersGopRemainingFrames; + VkBool32 requiresGopRemainingFrames; + VkVideoEncodeAV1StdFlagsKHR stdSyntaxFlags; +} VkVideoEncodeAV1CapabilitiesKHR; + +typedef struct VkVideoEncodeAV1QIndexKHR { + uint32_t intraQIndex; + uint32_t predictiveQIndex; + uint32_t bipredictiveQIndex; +} VkVideoEncodeAV1QIndexKHR; + +typedef struct VkVideoEncodeAV1QualityLevelPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeAV1RateControlFlagsKHR preferredRateControlFlags; + uint32_t preferredGopFrameCount; + uint32_t preferredKeyFramePeriod; + uint32_t preferredConsecutiveBipredictiveFrameCount; + uint32_t preferredTemporalLayerCount; + VkVideoEncodeAV1QIndexKHR preferredConstantQIndex; + uint32_t preferredMaxSingleReferenceCount; + uint32_t preferredSingleReferenceNameMask; + uint32_t preferredMaxUnidirectionalCompoundReferenceCount; + uint32_t preferredMaxUnidirectionalCompoundGroup1ReferenceCount; + uint32_t preferredUnidirectionalCompoundReferenceNameMask; + uint32_t preferredMaxBidirectionalCompoundReferenceCount; + uint32_t preferredMaxBidirectionalCompoundGroup1ReferenceCount; + uint32_t preferredMaxBidirectionalCompoundGroup2ReferenceCount; + uint32_t preferredBidirectionalCompoundReferenceNameMask; +} VkVideoEncodeAV1QualityLevelPropertiesKHR; + +typedef struct VkVideoEncodeAV1SessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMaxLevel; + StdVideoAV1Level maxLevel; +} VkVideoEncodeAV1SessionCreateInfoKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEXT( - VkCommandBuffer commandBuffer, - const VkSampleLocationsInfoEXT* pSampleLocationsInfo); +typedef struct VkVideoEncodeAV1SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoAV1SequenceHeader* pStdSequenceHeader; + const StdVideoEncodeAV1DecoderModelInfo* pStdDecoderModelInfo; + uint32_t stdOperatingPointCount; + const StdVideoEncodeAV1OperatingPointInfo* pStdOperatingPoints; +} VkVideoEncodeAV1SessionParametersCreateInfoKHR; -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMultisamplePropertiesEXT( - VkPhysicalDevice physicalDevice, - VkSampleCountFlagBits samples, - VkMultisamplePropertiesEXT* pMultisampleProperties); -#endif +typedef struct VkVideoEncodeAV1PictureInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeAV1PredictionModeKHR predictionMode; + VkVideoEncodeAV1RateControlGroupKHR rateControlGroup; + uint32_t constantQIndex; + const StdVideoEncodeAV1PictureInfo* pStdPictureInfo; + int32_t referenceNameSlotIndices[VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR]; + VkBool32 primaryReferenceCdfOnly; + VkBool32 generateObuExtensionHeader; +} VkVideoEncodeAV1PictureInfoKHR; + +typedef struct VkVideoEncodeAV1DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoEncodeAV1ReferenceInfo* pStdReferenceInfo; +} VkVideoEncodeAV1DpbSlotInfoKHR; +typedef struct VkVideoEncodeAV1ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoAV1Profile stdProfile; +} VkVideoEncodeAV1ProfileInfoKHR; -#define VK_EXT_blend_operation_advanced 1 -#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2 -#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced" +typedef struct VkVideoEncodeAV1FrameSizeKHR { + uint32_t intraFrameSize; + uint32_t predictiveFrameSize; + uint32_t bipredictiveFrameSize; +} VkVideoEncodeAV1FrameSizeKHR; -typedef enum VkBlendOverlapEXT { - VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0, - VK_BLEND_OVERLAP_DISJOINT_EXT = 1, - VK_BLEND_OVERLAP_CONJOINT_EXT = 2, - VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF -} VkBlendOverlapEXT; -typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT { +typedef struct VkVideoEncodeAV1GopRemainingFrameInfoKHR { VkStructureType sType; - void* pNext; - VkBool32 advancedBlendCoherentOperations; -} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; + const void* pNext; + VkBool32 useGopRemainingFrames; + uint32_t gopRemainingIntra; + uint32_t gopRemainingPredictive; + uint32_t gopRemainingBipredictive; +} VkVideoEncodeAV1GopRemainingFrameInfoKHR; -typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT { +typedef struct VkVideoEncodeAV1RateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeAV1RateControlFlagsKHR flags; + uint32_t gopFrameCount; + uint32_t keyFramePeriod; + uint32_t consecutiveBipredictiveFrameCount; + uint32_t temporalLayerCount; +} VkVideoEncodeAV1RateControlInfoKHR; + +typedef struct VkVideoEncodeAV1RateControlLayerInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMinQIndex; + VkVideoEncodeAV1QIndexKHR minQIndex; + VkBool32 useMaxQIndex; + VkVideoEncodeAV1QIndexKHR maxQIndex; + VkBool32 useMaxFrameSize; + VkVideoEncodeAV1FrameSizeKHR maxFrameSize; +} VkVideoEncodeAV1RateControlLayerInfoKHR; + + + +// VK_KHR_video_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_maintenance1 1 +#define VK_KHR_VIDEO_MAINTENANCE_1_SPEC_VERSION 1 +#define VK_KHR_VIDEO_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_video_maintenance1" +typedef struct VkPhysicalDeviceVideoMaintenance1FeaturesKHR { VkStructureType sType; void* pNext; - uint32_t advancedBlendMaxColorAttachments; - VkBool32 advancedBlendIndependentBlend; - VkBool32 advancedBlendNonPremultipliedSrcColor; - VkBool32 advancedBlendNonPremultipliedDstColor; - VkBool32 advancedBlendCorrelatedOverlap; - VkBool32 advancedBlendAllOperations; -} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; + VkBool32 videoMaintenance1; +} VkPhysicalDeviceVideoMaintenance1FeaturesKHR; -typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkBool32 srcPremultiplied; - VkBool32 dstPremultiplied; - VkBlendOverlapEXT blendOverlap; -} VkPipelineColorBlendAdvancedStateCreateInfoEXT; +typedef struct VkVideoInlineQueryInfoKHR { + VkStructureType sType; + const void* pNext; + VkQueryPool queryPool; + uint32_t firstQuery; + uint32_t queryCount; +} VkVideoInlineQueryInfoKHR; -#define VK_NV_fragment_coverage_to_color 1 -#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1 -#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color" -typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV; -typedef struct VkPipelineCoverageToColorStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineCoverageToColorStateCreateFlagsNV flags; - VkBool32 coverageToColorEnable; - uint32_t coverageToColorLocation; -} VkPipelineCoverageToColorStateCreateInfoNV; +// VK_KHR_vertex_attribute_divisor is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_vertex_attribute_divisor 1 +#define VK_KHR_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 1 +#define VK_KHR_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_KHR_vertex_attribute_divisor" +typedef VkPhysicalDeviceVertexAttributeDivisorProperties VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR; +typedef VkVertexInputBindingDivisorDescription VkVertexInputBindingDivisorDescriptionKHR; +typedef VkPipelineVertexInputDivisorStateCreateInfo VkPipelineVertexInputDivisorStateCreateInfoKHR; -#define VK_NV_framebuffer_mixed_samples 1 -#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1 -#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples" +typedef VkPhysicalDeviceVertexAttributeDivisorFeatures VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR; -typedef enum VkCoverageModulationModeNV { - VK_COVERAGE_MODULATION_MODE_NONE_NV = 0, - VK_COVERAGE_MODULATION_MODE_RGB_NV = 1, - VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2, - VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3, - VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF -} VkCoverageModulationModeNV; -typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV; -typedef struct VkPipelineCoverageModulationStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineCoverageModulationStateCreateFlagsNV flags; - VkCoverageModulationModeNV coverageModulationMode; - VkBool32 coverageModulationTableEnable; - uint32_t coverageModulationTableCount; - const float* pCoverageModulationTable; -} VkPipelineCoverageModulationStateCreateInfoNV; +// VK_KHR_load_store_op_none is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_load_store_op_none 1 +#define VK_KHR_LOAD_STORE_OP_NONE_SPEC_VERSION 1 +#define VK_KHR_LOAD_STORE_OP_NONE_EXTENSION_NAME "VK_KHR_load_store_op_none" -#define VK_NV_fill_rectangle 1 -#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1 -#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle" +// VK_KHR_shader_float_controls2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_float_controls2 1 +#define VK_KHR_SHADER_FLOAT_CONTROLS_2_SPEC_VERSION 1 +#define VK_KHR_SHADER_FLOAT_CONTROLS_2_EXTENSION_NAME "VK_KHR_shader_float_controls2" +typedef VkPhysicalDeviceShaderFloatControls2Features VkPhysicalDeviceShaderFloatControls2FeaturesKHR; -#define VK_NV_shader_sm_builtins 1 -#define VK_NV_SHADER_SM_BUILTINS_SPEC_VERSION 1 -#define VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME "VK_NV_shader_sm_builtins" -typedef struct VkPhysicalDeviceShaderSMBuiltinsPropertiesNV { - VkStructureType sType; - void* pNext; - uint32_t shaderSMCount; - uint32_t shaderWarpsPerSM; -} VkPhysicalDeviceShaderSMBuiltinsPropertiesNV; -typedef struct VkPhysicalDeviceShaderSMBuiltinsFeaturesNV { - VkStructureType sType; - void* pNext; - VkBool32 shaderSMBuiltins; -} VkPhysicalDeviceShaderSMBuiltinsFeaturesNV; +// VK_KHR_index_type_uint8 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_index_type_uint8 1 +#define VK_KHR_INDEX_TYPE_UINT8_SPEC_VERSION 1 +#define VK_KHR_INDEX_TYPE_UINT8_EXTENSION_NAME "VK_KHR_index_type_uint8" +typedef VkPhysicalDeviceIndexTypeUint8Features VkPhysicalDeviceIndexTypeUint8FeaturesKHR; -#define VK_EXT_post_depth_coverage 1 -#define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1 -#define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage" +// VK_KHR_line_rasterization is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_line_rasterization 1 +#define VK_KHR_LINE_RASTERIZATION_SPEC_VERSION 1 +#define VK_KHR_LINE_RASTERIZATION_EXTENSION_NAME "VK_KHR_line_rasterization" +typedef VkLineRasterizationMode VkLineRasterizationModeKHR; -#define VK_EXT_image_drm_format_modifier 1 -#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION 2 -#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME "VK_EXT_image_drm_format_modifier" -typedef struct VkDrmFormatModifierPropertiesEXT { - uint64_t drmFormatModifier; - uint32_t drmFormatModifierPlaneCount; - VkFormatFeatureFlags drmFormatModifierTilingFeatures; -} VkDrmFormatModifierPropertiesEXT; +typedef VkPhysicalDeviceLineRasterizationFeatures VkPhysicalDeviceLineRasterizationFeaturesKHR; -typedef struct VkDrmFormatModifierPropertiesListEXT { - VkStructureType sType; - void* pNext; - uint32_t drmFormatModifierCount; - VkDrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties; -} VkDrmFormatModifierPropertiesListEXT; - -typedef struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT { - VkStructureType sType; - const void* pNext; - uint64_t drmFormatModifier; - VkSharingMode sharingMode; - uint32_t queueFamilyIndexCount; - const uint32_t* pQueueFamilyIndices; -} VkPhysicalDeviceImageDrmFormatModifierInfoEXT; +typedef VkPhysicalDeviceLineRasterizationProperties VkPhysicalDeviceLineRasterizationPropertiesKHR; -typedef struct VkImageDrmFormatModifierListCreateInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t drmFormatModifierCount; - const uint64_t* pDrmFormatModifiers; -} VkImageDrmFormatModifierListCreateInfoEXT; +typedef VkPipelineRasterizationLineStateCreateInfo VkPipelineRasterizationLineStateCreateInfoKHR; -typedef struct VkImageDrmFormatModifierExplicitCreateInfoEXT { - VkStructureType sType; - const void* pNext; - uint64_t drmFormatModifier; - uint32_t drmFormatModifierPlaneCount; - const VkSubresourceLayout* pPlaneLayouts; -} VkImageDrmFormatModifierExplicitCreateInfoEXT; +typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleKHR)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern); -typedef struct VkImageDrmFormatModifierPropertiesEXT { - VkStructureType sType; - void* pNext; - uint64_t drmFormatModifier; -} VkImageDrmFormatModifierPropertiesEXT; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleKHR( + VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern); +#endif -typedef struct VkDrmFormatModifierProperties2EXT { - uint64_t drmFormatModifier; - uint32_t drmFormatModifierPlaneCount; - VkFormatFeatureFlags2 drmFormatModifierTilingFeatures; -} VkDrmFormatModifierProperties2EXT; -typedef struct VkDrmFormatModifierPropertiesList2EXT { - VkStructureType sType; - void* pNext; - uint32_t drmFormatModifierCount; - VkDrmFormatModifierProperties2EXT* pDrmFormatModifierProperties; -} VkDrmFormatModifierPropertiesList2EXT; +// VK_KHR_calibrated_timestamps is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_calibrated_timestamps 1 +#define VK_KHR_CALIBRATED_TIMESTAMPS_SPEC_VERSION 1 +#define VK_KHR_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_KHR_calibrated_timestamps" + +typedef enum VkTimeDomainKHR { + VK_TIME_DOMAIN_DEVICE_KHR = 0, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR = 1, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR = 2, + VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR = 3, + VK_TIME_DOMAIN_DEVICE_EXT = VK_TIME_DOMAIN_DEVICE_KHR, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT = VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR, + VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR, + VK_TIME_DOMAIN_MAX_ENUM_KHR = 0x7FFFFFFF +} VkTimeDomainKHR; +typedef struct VkCalibratedTimestampInfoKHR { + VkStructureType sType; + const void* pNext; + VkTimeDomainKHR timeDomain; +} VkCalibratedTimestampInfoKHR; -typedef VkResult (VKAPI_PTR *PFN_vkGetImageDrmFormatModifierPropertiesEXT)(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR)(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainKHR* pTimeDomains); +typedef VkResult (VKAPI_PTR *PFN_vkGetCalibratedTimestampsKHR)(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoKHR* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetImageDrmFormatModifierPropertiesEXT( +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pTimeDomainCount, + VkTimeDomainKHR* pTimeDomains); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsKHR( VkDevice device, - VkImage image, - VkImageDrmFormatModifierPropertiesEXT* pProperties); + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR* pTimestampInfos, + uint64_t* pTimestamps, + uint64_t* pMaxDeviation); #endif -#define VK_EXT_validation_cache 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) -#define VK_EXT_VALIDATION_CACHE_SPEC_VERSION 1 -#define VK_EXT_VALIDATION_CACHE_EXTENSION_NAME "VK_EXT_validation_cache" +// VK_KHR_shader_expect_assume is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_expect_assume 1 +#define VK_KHR_SHADER_EXPECT_ASSUME_SPEC_VERSION 1 +#define VK_KHR_SHADER_EXPECT_ASSUME_EXTENSION_NAME "VK_KHR_shader_expect_assume" +typedef VkPhysicalDeviceShaderExpectAssumeFeatures VkPhysicalDeviceShaderExpectAssumeFeaturesKHR; -typedef enum VkValidationCacheHeaderVersionEXT { - VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT = 1, - VK_VALIDATION_CACHE_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF -} VkValidationCacheHeaderVersionEXT; -typedef VkFlags VkValidationCacheCreateFlagsEXT; -typedef struct VkValidationCacheCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkValidationCacheCreateFlagsEXT flags; - size_t initialDataSize; - const void* pInitialData; -} VkValidationCacheCreateInfoEXT; -typedef struct VkShaderModuleValidationCacheCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkValidationCacheEXT validationCache; -} VkShaderModuleValidationCacheCreateInfoEXT; -typedef VkResult (VKAPI_PTR *PFN_vkCreateValidationCacheEXT)(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache); -typedef void (VKAPI_PTR *PFN_vkDestroyValidationCacheEXT)(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkMergeValidationCachesEXT)(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches); -typedef VkResult (VKAPI_PTR *PFN_vkGetValidationCacheDataEXT)(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData); +// VK_KHR_maintenance6 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance6 1 +#define VK_KHR_MAINTENANCE_6_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_6_EXTENSION_NAME "VK_KHR_maintenance6" +typedef VkPhysicalDeviceMaintenance6Features VkPhysicalDeviceMaintenance6FeaturesKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateValidationCacheEXT( - VkDevice device, - const VkValidationCacheCreateInfoEXT* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkValidationCacheEXT* pValidationCache); +typedef VkPhysicalDeviceMaintenance6Properties VkPhysicalDeviceMaintenance6PropertiesKHR; -VKAPI_ATTR void VKAPI_CALL vkDestroyValidationCacheEXT( - VkDevice device, - VkValidationCacheEXT validationCache, - const VkAllocationCallbacks* pAllocator); +typedef VkBindMemoryStatus VkBindMemoryStatusKHR; -VKAPI_ATTR VkResult VKAPI_CALL vkMergeValidationCachesEXT( - VkDevice device, - VkValidationCacheEXT dstCache, - uint32_t srcCacheCount, - const VkValidationCacheEXT* pSrcCaches); +typedef VkBindDescriptorSetsInfo VkBindDescriptorSetsInfoKHR; -VKAPI_ATTR VkResult VKAPI_CALL vkGetValidationCacheDataEXT( - VkDevice device, - VkValidationCacheEXT validationCache, - size_t* pDataSize, - void* pData); -#endif +typedef VkPushConstantsInfo VkPushConstantsInfoKHR; +typedef VkPushDescriptorSetInfo VkPushDescriptorSetInfoKHR; -#define VK_EXT_descriptor_indexing 1 -#define VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION 2 -#define VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME "VK_EXT_descriptor_indexing" -typedef VkDescriptorBindingFlagBits VkDescriptorBindingFlagBitsEXT; +typedef VkPushDescriptorSetWithTemplateInfo VkPushDescriptorSetWithTemplateInfoKHR; -typedef VkDescriptorBindingFlags VkDescriptorBindingFlagsEXT; +typedef struct VkSetDescriptorBufferOffsetsInfoEXT { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t firstSet; + uint32_t setCount; + const uint32_t* pBufferIndices; + const VkDeviceSize* pOffsets; +} VkSetDescriptorBufferOffsetsInfoEXT; + +typedef struct VkBindDescriptorBufferEmbeddedSamplersInfoEXT { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t set; +} VkBindDescriptorBufferEmbeddedSamplersInfoEXT; -typedef VkDescriptorSetLayoutBindingFlagsCreateInfo VkDescriptorSetLayoutBindingFlagsCreateInfoEXT; +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets2KHR)(VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushConstants2KHR)(VkCommandBuffer commandBuffer, const VkPushConstantsInfo* pPushConstantsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSet2KHR)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfo* pPushDescriptorSetInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplate2KHR)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo); +typedef void (VKAPI_PTR *PFN_vkCmdSetDescriptorBufferOffsets2EXT)(VkCommandBuffer commandBuffer, const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT)(VkCommandBuffer commandBuffer, const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo); -typedef VkPhysicalDeviceDescriptorIndexingFeatures VkPhysicalDeviceDescriptorIndexingFeaturesEXT; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets2KHR( + VkCommandBuffer commandBuffer, + const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo); -typedef VkPhysicalDeviceDescriptorIndexingProperties VkPhysicalDeviceDescriptorIndexingPropertiesEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants2KHR( + VkCommandBuffer commandBuffer, + const VkPushConstantsInfo* pPushConstantsInfo); -typedef VkDescriptorSetVariableDescriptorCountAllocateInfo VkDescriptorSetVariableDescriptorCountAllocateInfoEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSet2KHR( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetInfo* pPushDescriptorSetInfo); -typedef VkDescriptorSetVariableDescriptorCountLayoutSupport VkDescriptorSetVariableDescriptorCountLayoutSupportEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplate2KHR( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo); +VKAPI_ATTR void VKAPI_CALL vkCmdSetDescriptorBufferOffsets2EXT( + VkCommandBuffer commandBuffer, + const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo); +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + VkCommandBuffer commandBuffer, + const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo); +#endif -#define VK_EXT_shader_viewport_index_layer 1 -#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION 1 -#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME "VK_EXT_shader_viewport_index_layer" +// VK_KHR_video_encode_quantization_map is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_quantization_map 1 +#define VK_KHR_VIDEO_ENCODE_QUANTIZATION_MAP_SPEC_VERSION 2 +#define VK_KHR_VIDEO_ENCODE_QUANTIZATION_MAP_EXTENSION_NAME "VK_KHR_video_encode_quantization_map" +typedef struct VkVideoEncodeQuantizationMapCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkExtent2D maxQuantizationMapExtent; +} VkVideoEncodeQuantizationMapCapabilitiesKHR; -#define VK_NV_shading_rate_image 1 -#define VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION 3 -#define VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME "VK_NV_shading_rate_image" +typedef struct VkVideoFormatQuantizationMapPropertiesKHR { + VkStructureType sType; + void* pNext; + VkExtent2D quantizationMapTexelSize; +} VkVideoFormatQuantizationMapPropertiesKHR; -typedef enum VkShadingRatePaletteEntryNV { - VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV = 0, - VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV = 1, - VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV = 2, - VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV = 3, - VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV = 4, - VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV = 5, - VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV = 6, - VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV = 7, - VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV = 8, - VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV = 9, - VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV = 10, - VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV = 11, - VK_SHADING_RATE_PALETTE_ENTRY_MAX_ENUM_NV = 0x7FFFFFFF -} VkShadingRatePaletteEntryNV; +typedef struct VkVideoEncodeQuantizationMapInfoKHR { + VkStructureType sType; + const void* pNext; + VkImageView quantizationMap; + VkExtent2D quantizationMapExtent; +} VkVideoEncodeQuantizationMapInfoKHR; -typedef enum VkCoarseSampleOrderTypeNV { - VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV = 0, - VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV = 1, - VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV = 2, - VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV = 3, - VK_COARSE_SAMPLE_ORDER_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkCoarseSampleOrderTypeNV; -typedef struct VkShadingRatePaletteNV { - uint32_t shadingRatePaletteEntryCount; - const VkShadingRatePaletteEntryNV* pShadingRatePaletteEntries; -} VkShadingRatePaletteNV; +typedef struct VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkExtent2D quantizationMapTexelSize; +} VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR; -typedef struct VkPipelineViewportShadingRateImageStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkBool32 shadingRateImageEnable; - uint32_t viewportCount; - const VkShadingRatePaletteNV* pShadingRatePalettes; -} VkPipelineViewportShadingRateImageStateCreateInfoNV; +typedef struct VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 videoEncodeQuantizationMap; +} VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; -typedef struct VkPhysicalDeviceShadingRateImageFeaturesNV { +typedef struct VkVideoEncodeH264QuantizationMapCapabilitiesKHR { VkStructureType sType; void* pNext; - VkBool32 shadingRateImage; - VkBool32 shadingRateCoarseSampleOrder; -} VkPhysicalDeviceShadingRateImageFeaturesNV; + int32_t minQpDelta; + int32_t maxQpDelta; +} VkVideoEncodeH264QuantizationMapCapabilitiesKHR; -typedef struct VkPhysicalDeviceShadingRateImagePropertiesNV { +typedef struct VkVideoEncodeH265QuantizationMapCapabilitiesKHR { VkStructureType sType; void* pNext; - VkExtent2D shadingRateTexelSize; - uint32_t shadingRatePaletteSize; - uint32_t shadingRateMaxCoarseSamples; -} VkPhysicalDeviceShadingRateImagePropertiesNV; + int32_t minQpDelta; + int32_t maxQpDelta; +} VkVideoEncodeH265QuantizationMapCapabilitiesKHR; -typedef struct VkCoarseSampleLocationNV { - uint32_t pixelX; - uint32_t pixelY; - uint32_t sample; -} VkCoarseSampleLocationNV; +typedef struct VkVideoFormatH265QuantizationMapPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH265CtbSizeFlagsKHR compatibleCtbSizes; +} VkVideoFormatH265QuantizationMapPropertiesKHR; -typedef struct VkCoarseSampleOrderCustomNV { - VkShadingRatePaletteEntryNV shadingRate; - uint32_t sampleCount; - uint32_t sampleLocationCount; - const VkCoarseSampleLocationNV* pSampleLocations; -} VkCoarseSampleOrderCustomNV; +typedef struct VkVideoEncodeAV1QuantizationMapCapabilitiesKHR { + VkStructureType sType; + void* pNext; + int32_t minQIndexDelta; + int32_t maxQIndexDelta; +} VkVideoEncodeAV1QuantizationMapCapabilitiesKHR; -typedef struct VkPipelineViewportCoarseSampleOrderStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkCoarseSampleOrderTypeNV sampleOrderType; - uint32_t customSampleOrderCount; - const VkCoarseSampleOrderCustomNV* pCustomSampleOrders; -} VkPipelineViewportCoarseSampleOrderStateCreateInfoNV; +typedef struct VkVideoFormatAV1QuantizationMapPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeAV1SuperblockSizeFlagsKHR compatibleSuperblockSizes; +} VkVideoFormatAV1QuantizationMapPropertiesKHR; -typedef void (VKAPI_PTR *PFN_vkCmdBindShadingRateImageNV)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout); -typedef void (VKAPI_PTR *PFN_vkCmdSetViewportShadingRatePaletteNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes); -typedef void (VKAPI_PTR *PFN_vkCmdSetCoarseSampleOrderNV)(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdBindShadingRateImageNV( - VkCommandBuffer commandBuffer, - VkImageView imageView, - VkImageLayout imageLayout); -VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportShadingRatePaletteNV( - VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkShadingRatePaletteNV* pShadingRatePalettes); +// VK_KHR_shader_relaxed_extended_instruction is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_relaxed_extended_instruction 1 +#define VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_SPEC_VERSION 1 +#define VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_EXTENSION_NAME "VK_KHR_shader_relaxed_extended_instruction" +typedef struct VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderRelaxedExtendedInstruction; +} VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; -VKAPI_ATTR void VKAPI_CALL vkCmdSetCoarseSampleOrderNV( - VkCommandBuffer commandBuffer, - VkCoarseSampleOrderTypeNV sampleOrderType, - uint32_t customSampleOrderCount, - const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); -#endif -#define VK_NV_ray_tracing 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV) -#define VK_NV_RAY_TRACING_SPEC_VERSION 3 -#define VK_NV_RAY_TRACING_EXTENSION_NAME "VK_NV_ray_tracing" -#define VK_SHADER_UNUSED_KHR (~0U) -#define VK_SHADER_UNUSED_NV VK_SHADER_UNUSED_KHR +// VK_KHR_maintenance7 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance7 1 +#define VK_KHR_MAINTENANCE_7_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_7_EXTENSION_NAME "VK_KHR_maintenance7" -typedef enum VkRayTracingShaderGroupTypeKHR { - VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR = 0, - VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR = 1, - VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR = 2, - VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, - VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, - VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR, - VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkRayTracingShaderGroupTypeKHR; +typedef enum VkPhysicalDeviceLayeredApiKHR { + VK_PHYSICAL_DEVICE_LAYERED_API_VULKAN_KHR = 0, + VK_PHYSICAL_DEVICE_LAYERED_API_D3D12_KHR = 1, + VK_PHYSICAL_DEVICE_LAYERED_API_METAL_KHR = 2, + VK_PHYSICAL_DEVICE_LAYERED_API_OPENGL_KHR = 3, + VK_PHYSICAL_DEVICE_LAYERED_API_OPENGLES_KHR = 4, + VK_PHYSICAL_DEVICE_LAYERED_API_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPhysicalDeviceLayeredApiKHR; +typedef struct VkPhysicalDeviceMaintenance7FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 maintenance7; +} VkPhysicalDeviceMaintenance7FeaturesKHR; + +typedef struct VkPhysicalDeviceMaintenance7PropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 robustFragmentShadingRateAttachmentAccess; + VkBool32 separateDepthStencilAttachmentAccess; + uint32_t maxDescriptorSetTotalUniformBuffersDynamic; + uint32_t maxDescriptorSetTotalStorageBuffersDynamic; + uint32_t maxDescriptorSetTotalBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindTotalBuffersDynamic; +} VkPhysicalDeviceMaintenance7PropertiesKHR; + +typedef struct VkPhysicalDeviceLayeredApiPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t vendorID; + uint32_t deviceID; + VkPhysicalDeviceLayeredApiKHR layeredAPI; + char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; +} VkPhysicalDeviceLayeredApiPropertiesKHR; + +typedef struct VkPhysicalDeviceLayeredApiPropertiesListKHR { + VkStructureType sType; + void* pNext; + uint32_t layeredApiCount; + VkPhysicalDeviceLayeredApiPropertiesKHR* pLayeredApis; +} VkPhysicalDeviceLayeredApiPropertiesListKHR; + +typedef struct VkPhysicalDeviceLayeredApiVulkanPropertiesKHR { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceProperties2 properties; +} VkPhysicalDeviceLayeredApiVulkanPropertiesKHR; + + + +// VK_KHR_maintenance8 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance8 1 +#define VK_KHR_MAINTENANCE_8_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_8_EXTENSION_NAME "VK_KHR_maintenance8" +typedef VkFlags64 VkAccessFlags3KHR; + +// Flag bits for VkAccessFlagBits3KHR +typedef VkFlags64 VkAccessFlagBits3KHR; +static const VkAccessFlagBits3KHR VK_ACCESS_3_NONE_KHR = 0ULL; + +typedef struct VkPhysicalDeviceMaintenance8FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 maintenance8; +} VkPhysicalDeviceMaintenance8FeaturesKHR; + +typedef struct VkMemoryBarrierAccessFlags3KHR { + VkStructureType sType; + const void* pNext; + VkAccessFlags3KHR srcAccessMask3; + VkAccessFlags3KHR dstAccessMask3; +} VkMemoryBarrierAccessFlags3KHR; + + + +// VK_KHR_video_maintenance2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_maintenance2 1 +#define VK_KHR_VIDEO_MAINTENANCE_2_SPEC_VERSION 1 +#define VK_KHR_VIDEO_MAINTENANCE_2_EXTENSION_NAME "VK_KHR_video_maintenance2" +typedef struct VkPhysicalDeviceVideoMaintenance2FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 videoMaintenance2; +} VkPhysicalDeviceVideoMaintenance2FeaturesKHR; + +typedef struct VkVideoDecodeH264InlineSessionParametersInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoH264SequenceParameterSet* pStdSPS; + const StdVideoH264PictureParameterSet* pStdPPS; +} VkVideoDecodeH264InlineSessionParametersInfoKHR; + +typedef struct VkVideoDecodeH265InlineSessionParametersInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoH265VideoParameterSet* pStdVPS; + const StdVideoH265SequenceParameterSet* pStdSPS; + const StdVideoH265PictureParameterSet* pStdPPS; +} VkVideoDecodeH265InlineSessionParametersInfoKHR; + +typedef struct VkVideoDecodeAV1InlineSessionParametersInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoAV1SequenceHeader* pStdSequenceHeader; +} VkVideoDecodeAV1InlineSessionParametersInfoKHR; + + + +// VK_KHR_depth_clamp_zero_one is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_depth_clamp_zero_one 1 +#define VK_KHR_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION 1 +#define VK_KHR_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME "VK_KHR_depth_clamp_zero_one" +typedef struct VkPhysicalDeviceDepthClampZeroOneFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 depthClampZeroOne; +} VkPhysicalDeviceDepthClampZeroOneFeaturesKHR; + + + +// VK_EXT_debug_report is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_debug_report 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) +#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 10 +#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report" + +typedef enum VkDebugReportObjectTypeEXT { + VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0, + VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1, + VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3, + VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4, + VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6, + VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10, + VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11, + VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14, + VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17, + VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23, + VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25, + VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26, + VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27, + VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30, + VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000, + VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT = 1000029000, + VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT = 1000029001, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT = 1000150000, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000, + VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV_EXT = 1000307000, + VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV_EXT = 1000307001, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT = 1000366000, + // VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT is a deprecated alias + VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + // VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT is a deprecated alias + VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportObjectTypeEXT; + +typedef enum VkDebugReportFlagBitsEXT { + VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001, + VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002, + VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004, + VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008, + VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010, + VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportFlagBitsEXT; +typedef VkFlags VkDebugReportFlagsEXT; +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)( + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage, + void* pUserData); + +typedef struct VkDebugReportCallbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportFlagsEXT flags; + PFN_vkDebugReportCallbackEXT pfnCallback; + void* pUserData; +} VkDebugReportCallbackCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT( + VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugReportCallbackEXT* pCallback); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT( + VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT( + VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage); +#endif + + +// VK_NV_glsl_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_glsl_shader 1 +#define VK_NV_GLSL_SHADER_SPEC_VERSION 1 +#define VK_NV_GLSL_SHADER_EXTENSION_NAME "VK_NV_glsl_shader" + + +// VK_EXT_depth_range_unrestricted is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_range_unrestricted 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted" + + +// VK_IMG_filter_cubic is a preprocessor guard. Do not pass it to API calls. +#define VK_IMG_filter_cubic 1 +#define VK_IMG_FILTER_CUBIC_SPEC_VERSION 1 +#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic" + + +// VK_AMD_rasterization_order is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_rasterization_order 1 +#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1 +#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order" + +typedef enum VkRasterizationOrderAMD { + VK_RASTERIZATION_ORDER_STRICT_AMD = 0, + VK_RASTERIZATION_ORDER_RELAXED_AMD = 1, + VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF +} VkRasterizationOrderAMD; +typedef struct VkPipelineRasterizationStateRasterizationOrderAMD { + VkStructureType sType; + const void* pNext; + VkRasterizationOrderAMD rasterizationOrder; +} VkPipelineRasterizationStateRasterizationOrderAMD; + + + +// VK_AMD_shader_trinary_minmax is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_trinary_minmax 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax" + + +// VK_AMD_shader_explicit_vertex_parameter is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_explicit_vertex_parameter 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter" + + +// VK_EXT_debug_marker is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_debug_marker 1 +#define VK_EXT_DEBUG_MARKER_SPEC_VERSION 4 +#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker" +typedef struct VkDebugMarkerObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + const char* pObjectName; +} VkDebugMarkerObjectNameInfoEXT; + +typedef struct VkDebugMarkerObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugMarkerObjectTagInfoEXT; + +typedef struct VkDebugMarkerMarkerInfoEXT { + VkStructureType sType; + const void* pNext; + const char* pMarkerName; + float color[4]; +} VkDebugMarkerMarkerInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo); +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT( + VkDevice device, + const VkDebugMarkerObjectTagInfoEXT* pTagInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT( + VkDevice device, + const VkDebugMarkerObjectNameInfoEXT* pNameInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +#endif + + +// VK_AMD_gcn_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_gcn_shader 1 +#define VK_AMD_GCN_SHADER_SPEC_VERSION 1 +#define VK_AMD_GCN_SHADER_EXTENSION_NAME "VK_AMD_gcn_shader" + + +// VK_NV_dedicated_allocation is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_dedicated_allocation 1 +#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1 +#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation" +typedef struct VkDedicatedAllocationImageCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationImageCreateInfoNV; + +typedef struct VkDedicatedAllocationBufferCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationBufferCreateInfoNV; + +typedef struct VkDedicatedAllocationMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkImage image; + VkBuffer buffer; +} VkDedicatedAllocationMemoryAllocateInfoNV; + + + +// VK_EXT_transform_feedback is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_transform_feedback 1 +#define VK_EXT_TRANSFORM_FEEDBACK_SPEC_VERSION 1 +#define VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME "VK_EXT_transform_feedback" +typedef VkFlags VkPipelineRasterizationStateStreamCreateFlagsEXT; +typedef struct VkPhysicalDeviceTransformFeedbackFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 transformFeedback; + VkBool32 geometryStreams; +} VkPhysicalDeviceTransformFeedbackFeaturesEXT; + +typedef struct VkPhysicalDeviceTransformFeedbackPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxTransformFeedbackStreams; + uint32_t maxTransformFeedbackBuffers; + VkDeviceSize maxTransformFeedbackBufferSize; + uint32_t maxTransformFeedbackStreamDataSize; + uint32_t maxTransformFeedbackBufferDataSize; + uint32_t maxTransformFeedbackBufferDataStride; + VkBool32 transformFeedbackQueries; + VkBool32 transformFeedbackStreamsLinesTriangles; + VkBool32 transformFeedbackRasterizationStreamSelect; + VkBool32 transformFeedbackDraw; +} VkPhysicalDeviceTransformFeedbackPropertiesEXT; + +typedef struct VkPipelineRasterizationStateStreamCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationStateStreamCreateFlagsEXT flags; + uint32_t rasterizationStream; +} VkPipelineRasterizationStateStreamCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdBindTransformFeedbackBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes); +typedef void (VKAPI_PTR *PFN_vkCmdBeginTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdEndTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBeginQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index); +typedef void (VKAPI_PTR *PFN_vkCmdEndQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectByteCountEXT)(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindTransformFeedbackBuffersEXT( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginTransformFeedbackEXT( + VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer* pCounterBuffers, + const VkDeviceSize* pCounterBufferOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndTransformFeedbackEXT( + VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer* pCounterBuffers, + const VkDeviceSize* pCounterBufferOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginQueryIndexedEXT( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags, + uint32_t index); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndQueryIndexedEXT( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + uint32_t index); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectByteCountEXT( + VkCommandBuffer commandBuffer, + uint32_t instanceCount, + uint32_t firstInstance, + VkBuffer counterBuffer, + VkDeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride); +#endif + + +// VK_NVX_binary_import is a preprocessor guard. Do not pass it to API calls. +#define VK_NVX_binary_import 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuModuleNVX) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuFunctionNVX) +#define VK_NVX_BINARY_IMPORT_SPEC_VERSION 2 +#define VK_NVX_BINARY_IMPORT_EXTENSION_NAME "VK_NVX_binary_import" +typedef struct VkCuModuleCreateInfoNVX { + VkStructureType sType; + const void* pNext; + size_t dataSize; + const void* pData; +} VkCuModuleCreateInfoNVX; + +typedef struct VkCuModuleTexturingModeCreateInfoNVX { + VkStructureType sType; + const void* pNext; + VkBool32 use64bitTexturing; +} VkCuModuleTexturingModeCreateInfoNVX; + +typedef struct VkCuFunctionCreateInfoNVX { + VkStructureType sType; + const void* pNext; + VkCuModuleNVX module; + const char* pName; +} VkCuFunctionCreateInfoNVX; + +typedef struct VkCuLaunchInfoNVX { + VkStructureType sType; + const void* pNext; + VkCuFunctionNVX function; + uint32_t gridDimX; + uint32_t gridDimY; + uint32_t gridDimZ; + uint32_t blockDimX; + uint32_t blockDimY; + uint32_t blockDimZ; + uint32_t sharedMemBytes; + size_t paramCount; + const void* const * pParams; + size_t extraCount; + const void* const * pExtras; +} VkCuLaunchInfoNVX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateCuModuleNVX)(VkDevice device, const VkCuModuleCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuModuleNVX* pModule); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCuFunctionNVX)(VkDevice device, const VkCuFunctionCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuFunctionNVX* pFunction); +typedef void (VKAPI_PTR *PFN_vkDestroyCuModuleNVX)(VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDestroyCuFunctionNVX)(VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdCuLaunchKernelNVX)(VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX* pLaunchInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuModuleNVX( + VkDevice device, + const VkCuModuleCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCuModuleNVX* pModule); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuFunctionNVX( + VkDevice device, + const VkCuFunctionCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCuFunctionNVX* pFunction); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCuModuleNVX( + VkDevice device, + VkCuModuleNVX module, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCuFunctionNVX( + VkDevice device, + VkCuFunctionNVX function, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdCuLaunchKernelNVX( + VkCommandBuffer commandBuffer, + const VkCuLaunchInfoNVX* pLaunchInfo); +#endif + + +// VK_NVX_image_view_handle is a preprocessor guard. Do not pass it to API calls. +#define VK_NVX_image_view_handle 1 +#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 3 +#define VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME "VK_NVX_image_view_handle" +typedef struct VkImageViewHandleInfoNVX { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkDescriptorType descriptorType; + VkSampler sampler; +} VkImageViewHandleInfoNVX; + +typedef struct VkImageViewAddressPropertiesNVX { + VkStructureType sType; + void* pNext; + VkDeviceAddress deviceAddress; + VkDeviceSize size; +} VkImageViewAddressPropertiesNVX; + +typedef uint32_t (VKAPI_PTR *PFN_vkGetImageViewHandleNVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetImageViewHandle64NVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewAddressNVX)(VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR uint32_t VKAPI_CALL vkGetImageViewHandleNVX( + VkDevice device, + const VkImageViewHandleInfoNVX* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetImageViewHandle64NVX( + VkDevice device, + const VkImageViewHandleInfoNVX* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewAddressNVX( + VkDevice device, + VkImageView imageView, + VkImageViewAddressPropertiesNVX* pProperties); +#endif + + +// VK_AMD_draw_indirect_count is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_draw_indirect_count 1 +#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 2 +#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count" +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + + +// VK_AMD_negative_viewport_height is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_negative_viewport_height 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height" + + +// VK_AMD_gpu_shader_half_float is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_gpu_shader_half_float 1 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 2 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float" + + +// VK_AMD_shader_ballot is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_ballot 1 +#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1 +#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot" + + +// VK_AMD_texture_gather_bias_lod is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_texture_gather_bias_lod 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod" +typedef struct VkTextureLODGatherFormatPropertiesAMD { + VkStructureType sType; + void* pNext; + VkBool32 supportsTextureGatherLODBiasAMD; +} VkTextureLODGatherFormatPropertiesAMD; + + + +// VK_AMD_shader_info is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_info 1 +#define VK_AMD_SHADER_INFO_SPEC_VERSION 1 +#define VK_AMD_SHADER_INFO_EXTENSION_NAME "VK_AMD_shader_info" + +typedef enum VkShaderInfoTypeAMD { + VK_SHADER_INFO_TYPE_STATISTICS_AMD = 0, + VK_SHADER_INFO_TYPE_BINARY_AMD = 1, + VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD = 2, + VK_SHADER_INFO_TYPE_MAX_ENUM_AMD = 0x7FFFFFFF +} VkShaderInfoTypeAMD; +typedef struct VkShaderResourceUsageAMD { + uint32_t numUsedVgprs; + uint32_t numUsedSgprs; + uint32_t ldsSizePerLocalWorkGroup; + size_t ldsUsageSizeInBytes; + size_t scratchMemUsageInBytes; +} VkShaderResourceUsageAMD; + +typedef struct VkShaderStatisticsInfoAMD { + VkShaderStageFlags shaderStageMask; + VkShaderResourceUsageAMD resourceUsage; + uint32_t numPhysicalVgprs; + uint32_t numPhysicalSgprs; + uint32_t numAvailableVgprs; + uint32_t numAvailableSgprs; + uint32_t computeWorkGroupSize[3]; +} VkShaderStatisticsInfoAMD; + +typedef VkResult (VKAPI_PTR *PFN_vkGetShaderInfoAMD)(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderInfoAMD( + VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t* pInfoSize, + void* pInfo); +#endif + + +// VK_AMD_shader_image_load_store_lod is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_image_load_store_lod 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME "VK_AMD_shader_image_load_store_lod" + + +// VK_NV_corner_sampled_image is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_corner_sampled_image 1 +#define VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION 2 +#define VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME "VK_NV_corner_sampled_image" +typedef struct VkPhysicalDeviceCornerSampledImageFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cornerSampledImage; +} VkPhysicalDeviceCornerSampledImageFeaturesNV; + + + +// VK_IMG_format_pvrtc is a preprocessor guard. Do not pass it to API calls. +#define VK_IMG_format_pvrtc 1 +#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION 1 +#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc" + + +// VK_NV_external_memory_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_external_memory_capabilities 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities" + +typedef enum VkExternalMemoryHandleTypeFlagBitsNV { + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryHandleTypeFlagBitsNV; +typedef VkFlags VkExternalMemoryHandleTypeFlagsNV; + +typedef enum VkExternalMemoryFeatureFlagBitsNV { + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryFeatureFlagBitsNV; +typedef VkFlags VkExternalMemoryFeatureFlagsNV; +typedef struct VkExternalImageFormatPropertiesNV { + VkImageFormatProperties imageFormatProperties; + VkExternalMemoryFeatureFlagsNV externalMemoryFeatures; + VkExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes; + VkExternalMemoryHandleTypeFlagsNV compatibleHandleTypes; +} VkExternalImageFormatPropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); +#endif + + +// VK_NV_external_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_external_memory 1 +#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory" +typedef struct VkExternalMemoryImageCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExternalMemoryImageCreateInfoNV; + +typedef struct VkExportMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExportMemoryAllocateInfoNV; + + + +// VK_EXT_validation_flags is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_validation_flags 1 +#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 3 +#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags" + +typedef enum VkValidationCheckEXT { + VK_VALIDATION_CHECK_ALL_EXT = 0, + VK_VALIDATION_CHECK_SHADERS_EXT = 1, + VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCheckEXT; +typedef struct VkValidationFlagsEXT { + VkStructureType sType; + const void* pNext; + uint32_t disabledValidationCheckCount; + const VkValidationCheckEXT* pDisabledValidationChecks; +} VkValidationFlagsEXT; + + + +// VK_EXT_shader_subgroup_ballot is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_subgroup_ballot 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot" + + +// VK_EXT_shader_subgroup_vote is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_subgroup_vote 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote" + + +// VK_EXT_texture_compression_astc_hdr is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_texture_compression_astc_hdr 1 +#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION 1 +#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME "VK_EXT_texture_compression_astc_hdr" +typedef VkPhysicalDeviceTextureCompressionASTCHDRFeatures VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; + + + +// VK_EXT_astc_decode_mode is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_astc_decode_mode 1 +#define VK_EXT_ASTC_DECODE_MODE_SPEC_VERSION 1 +#define VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME "VK_EXT_astc_decode_mode" +typedef struct VkImageViewASTCDecodeModeEXT { + VkStructureType sType; + const void* pNext; + VkFormat decodeMode; +} VkImageViewASTCDecodeModeEXT; + +typedef struct VkPhysicalDeviceASTCDecodeFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 decodeModeSharedExponent; +} VkPhysicalDeviceASTCDecodeFeaturesEXT; + + + +// VK_EXT_pipeline_robustness is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_robustness 1 +#define VK_EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_pipeline_robustness" +typedef VkPipelineRobustnessBufferBehavior VkPipelineRobustnessBufferBehaviorEXT; + +typedef VkPipelineRobustnessImageBehavior VkPipelineRobustnessImageBehaviorEXT; + +typedef VkPhysicalDevicePipelineRobustnessFeatures VkPhysicalDevicePipelineRobustnessFeaturesEXT; + +typedef VkPhysicalDevicePipelineRobustnessProperties VkPhysicalDevicePipelineRobustnessPropertiesEXT; + +typedef VkPipelineRobustnessCreateInfo VkPipelineRobustnessCreateInfoEXT; + + + +// VK_EXT_conditional_rendering is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_conditional_rendering 1 +#define VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION 2 +#define VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME "VK_EXT_conditional_rendering" + +typedef enum VkConditionalRenderingFlagBitsEXT { + VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT = 0x00000001, + VK_CONDITIONAL_RENDERING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkConditionalRenderingFlagBitsEXT; +typedef VkFlags VkConditionalRenderingFlagsEXT; +typedef struct VkConditionalRenderingBeginInfoEXT { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; + VkDeviceSize offset; + VkConditionalRenderingFlagsEXT flags; +} VkConditionalRenderingBeginInfoEXT; + +typedef struct VkPhysicalDeviceConditionalRenderingFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 conditionalRendering; + VkBool32 inheritedConditionalRendering; +} VkPhysicalDeviceConditionalRenderingFeaturesEXT; + +typedef struct VkCommandBufferInheritanceConditionalRenderingInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 conditionalRenderingEnable; +} VkCommandBufferInheritanceConditionalRenderingInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdBeginConditionalRenderingEXT)(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); +typedef void (VKAPI_PTR *PFN_vkCmdEndConditionalRenderingEXT)(VkCommandBuffer commandBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBeginConditionalRenderingEXT( + VkCommandBuffer commandBuffer, + const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndConditionalRenderingEXT( + VkCommandBuffer commandBuffer); +#endif + + +// VK_NV_clip_space_w_scaling is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_clip_space_w_scaling 1 +#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1 +#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling" +typedef struct VkViewportWScalingNV { + float xcoeff; + float ycoeff; +} VkViewportWScalingNV; + +typedef struct VkPipelineViewportWScalingStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 viewportWScalingEnable; + uint32_t viewportCount; + const VkViewportWScalingNV* pViewportWScalings; +} VkPipelineViewportWScalingStateCreateInfoNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV* pViewportWScalings); +#endif + + +// VK_EXT_direct_mode_display is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_direct_mode_display 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display" +typedef VkResult (VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display); +#endif + + +// VK_EXT_display_surface_counter is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_display_surface_counter 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter" + +typedef enum VkSurfaceCounterFlagBitsEXT { + VK_SURFACE_COUNTER_VBLANK_BIT_EXT = 0x00000001, + // VK_SURFACE_COUNTER_VBLANK_EXT is a deprecated alias + VK_SURFACE_COUNTER_VBLANK_EXT = VK_SURFACE_COUNTER_VBLANK_BIT_EXT, + VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkSurfaceCounterFlagBitsEXT; +typedef VkFlags VkSurfaceCounterFlagsEXT; +typedef struct VkSurfaceCapabilities2EXT { + VkStructureType sType; + void* pNext; + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; + VkSurfaceCounterFlagsEXT supportedSurfaceCounters; +} VkSurfaceCapabilities2EXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT* pSurfaceCapabilities); +#endif + + +// VK_EXT_display_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_display_control 1 +#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control" + +typedef enum VkDisplayPowerStateEXT { + VK_DISPLAY_POWER_STATE_OFF_EXT = 0, + VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1, + VK_DISPLAY_POWER_STATE_ON_EXT = 2, + VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayPowerStateEXT; + +typedef enum VkDeviceEventTypeEXT { + VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0, + VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceEventTypeEXT; + +typedef enum VkDisplayEventTypeEXT { + VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0, + VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayEventTypeEXT; +typedef struct VkDisplayPowerInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayPowerStateEXT powerState; +} VkDisplayPowerInfoEXT; + +typedef struct VkDeviceEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceEventTypeEXT deviceEvent; +} VkDeviceEventInfoEXT; + +typedef struct VkDisplayEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayEventTypeEXT displayEvent; +} VkDisplayEventInfoEXT; + +typedef struct VkSwapchainCounterCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkSurfaceCounterFlagsEXT surfaceCounters; +} VkSwapchainCounterCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayPowerInfoEXT* pDisplayPowerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT( + VkDevice device, + const VkDeviceEventInfoEXT* pDeviceEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT* pDisplayEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT( + VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t* pCounterValue); +#endif + + +// VK_GOOGLE_display_timing is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_display_timing 1 +#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1 +#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing" +typedef struct VkRefreshCycleDurationGOOGLE { + uint64_t refreshDuration; +} VkRefreshCycleDurationGOOGLE; + +typedef struct VkPastPresentationTimingGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; + uint64_t actualPresentTime; + uint64_t earliestPresentTime; + uint64_t presentMargin; +} VkPastPresentationTimingGOOGLE; + +typedef struct VkPresentTimeGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; +} VkPresentTimeGOOGLE; + +typedef struct VkPresentTimesInfoGOOGLE { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentTimeGOOGLE* pTimes; +} VkPresentTimesInfoGOOGLE; + +typedef VkResult (VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE( + VkDevice device, + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pPresentationTimingCount, + VkPastPresentationTimingGOOGLE* pPresentationTimings); +#endif + + +// VK_NV_sample_mask_override_coverage is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_sample_mask_override_coverage 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage" + + +// VK_NV_geometry_shader_passthrough is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_geometry_shader_passthrough 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough" + + +// VK_NV_viewport_array2 is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_viewport_array2 1 +#define VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME "VK_NV_viewport_array2" +// VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION is a deprecated alias +#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION +// VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME is a deprecated alias +#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME + + +// VK_NVX_multiview_per_view_attributes is a preprocessor guard. Do not pass it to API calls. +#define VK_NVX_multiview_per_view_attributes 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes" +typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { + VkStructureType sType; + void* pNext; + VkBool32 perViewPositionAllComponents; +} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + +typedef struct VkMultiviewPerViewAttributesInfoNVX { + VkStructureType sType; + const void* pNext; + VkBool32 perViewAttributes; + VkBool32 perViewAttributesPositionXOnly; +} VkMultiviewPerViewAttributesInfoNVX; + + + +// VK_NV_viewport_swizzle is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_viewport_swizzle 1 +#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle" + +typedef enum VkViewportCoordinateSwizzleNV { + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7, + VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF +} VkViewportCoordinateSwizzleNV; +typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV; +typedef struct VkViewportSwizzleNV { + VkViewportCoordinateSwizzleNV x; + VkViewportCoordinateSwizzleNV y; + VkViewportCoordinateSwizzleNV z; + VkViewportCoordinateSwizzleNV w; +} VkViewportSwizzleNV; + +typedef struct VkPipelineViewportSwizzleStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineViewportSwizzleStateCreateFlagsNV flags; + uint32_t viewportCount; + const VkViewportSwizzleNV* pViewportSwizzles; +} VkPipelineViewportSwizzleStateCreateInfoNV; + + + +// VK_EXT_discard_rectangles is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_discard_rectangles 1 +#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 2 +#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles" + +typedef enum VkDiscardRectangleModeEXT { + VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0, + VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1, + VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDiscardRectangleModeEXT; +typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxDiscardRectangles; +} VkPhysicalDeviceDiscardRectanglePropertiesEXT; + +typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineDiscardRectangleStateCreateFlagsEXT flags; + VkDiscardRectangleModeEXT discardRectangleMode; + uint32_t discardRectangleCount; + const VkRect2D* pDiscardRectangles; +} VkPipelineDiscardRectangleStateCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles); +typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleModeEXT)(VkCommandBuffer commandBuffer, VkDiscardRectangleModeEXT discardRectangleMode); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT( + VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D* pDiscardRectangles); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 discardRectangleEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleModeEXT( + VkCommandBuffer commandBuffer, + VkDiscardRectangleModeEXT discardRectangleMode); +#endif + + +// VK_EXT_conservative_rasterization is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_conservative_rasterization 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME "VK_EXT_conservative_rasterization" + +typedef enum VkConservativeRasterizationModeEXT { + VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT = 0, + VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT = 1, + VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT = 2, + VK_CONSERVATIVE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkConservativeRasterizationModeEXT; +typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT { + VkStructureType sType; + void* pNext; + float primitiveOverestimationSize; + float maxExtraPrimitiveOverestimationSize; + float extraPrimitiveOverestimationSizeGranularity; + VkBool32 primitiveUnderestimation; + VkBool32 conservativePointAndLineRasterization; + VkBool32 degenerateTrianglesRasterized; + VkBool32 degenerateLinesRasterized; + VkBool32 fullyCoveredFragmentShaderInputVariable; + VkBool32 conservativeRasterizationPostDepthCoverage; +} VkPhysicalDeviceConservativeRasterizationPropertiesEXT; + +typedef struct VkPipelineRasterizationConservativeStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationConservativeStateCreateFlagsEXT flags; + VkConservativeRasterizationModeEXT conservativeRasterizationMode; + float extraPrimitiveOverestimationSize; +} VkPipelineRasterizationConservativeStateCreateInfoEXT; + + + +// VK_EXT_depth_clip_enable is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_clip_enable 1 +#define VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME "VK_EXT_depth_clip_enable" +typedef VkFlags VkPipelineRasterizationDepthClipStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceDepthClipEnableFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthClipEnable; +} VkPhysicalDeviceDepthClipEnableFeaturesEXT; + +typedef struct VkPipelineRasterizationDepthClipStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationDepthClipStateCreateFlagsEXT flags; + VkBool32 depthClipEnable; +} VkPipelineRasterizationDepthClipStateCreateInfoEXT; + + + +// VK_EXT_swapchain_colorspace is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_swapchain_colorspace 1 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 5 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace" + + +// VK_EXT_hdr_metadata is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_hdr_metadata 1 +#define VK_EXT_HDR_METADATA_SPEC_VERSION 3 +#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata" +typedef struct VkXYColorEXT { + float x; + float y; +} VkXYColorEXT; + +typedef struct VkHdrMetadataEXT { + VkStructureType sType; + const void* pNext; + VkXYColorEXT displayPrimaryRed; + VkXYColorEXT displayPrimaryGreen; + VkXYColorEXT displayPrimaryBlue; + VkXYColorEXT whitePoint; + float maxLuminance; + float minLuminance; + float maxContentLightLevel; + float maxFrameAverageLightLevel; +} VkHdrMetadataEXT; + +typedef void (VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainKHR* pSwapchains, + const VkHdrMetadataEXT* pMetadata); +#endif + + +// VK_IMG_relaxed_line_rasterization is a preprocessor guard. Do not pass it to API calls. +#define VK_IMG_relaxed_line_rasterization 1 +#define VK_IMG_RELAXED_LINE_RASTERIZATION_SPEC_VERSION 1 +#define VK_IMG_RELAXED_LINE_RASTERIZATION_EXTENSION_NAME "VK_IMG_relaxed_line_rasterization" +typedef struct VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG { + VkStructureType sType; + void* pNext; + VkBool32 relaxedLineRasterization; +} VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + + + +// VK_EXT_external_memory_dma_buf is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_external_memory_dma_buf 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME "VK_EXT_external_memory_dma_buf" + + +// VK_EXT_queue_family_foreign is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_queue_family_foreign 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME "VK_EXT_queue_family_foreign" +#define VK_QUEUE_FAMILY_FOREIGN_EXT (~2U) + + +// VK_EXT_debug_utils is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_debug_utils 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT) +#define VK_EXT_DEBUG_UTILS_SPEC_VERSION 2 +#define VK_EXT_DEBUG_UTILS_EXTENSION_NAME "VK_EXT_debug_utils" +typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT; + +typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT = 0x00000010, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT = 0x00000100, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT = 0x00001000, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageSeverityFlagBitsEXT; + +typedef enum VkDebugUtilsMessageTypeFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT = 0x00000002, + VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT = 0x00000004, + VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT = 0x00000008, + VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageTypeFlagBitsEXT; +typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT; +typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT; +typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT; +typedef struct VkDebugUtilsLabelEXT { + VkStructureType sType; + const void* pNext; + const char* pLabelName; + float color[4]; +} VkDebugUtilsLabelEXT; + +typedef struct VkDebugUtilsObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + const char* pObjectName; +} VkDebugUtilsObjectNameInfoEXT; + +typedef struct VkDebugUtilsMessengerCallbackDataEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCallbackDataFlagsEXT flags; + const char* pMessageIdName; + int32_t messageIdNumber; + const char* pMessage; + uint32_t queueLabelCount; + const VkDebugUtilsLabelEXT* pQueueLabels; + uint32_t cmdBufLabelCount; + const VkDebugUtilsLabelEXT* pCmdBufLabels; + uint32_t objectCount; + const VkDebugUtilsObjectNameInfoEXT* pObjects; +} VkDebugUtilsMessengerCallbackDataEXT; + +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugUtilsMessengerCallbackEXT)( + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, + void* pUserData); + +typedef struct VkDebugUtilsMessengerCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCreateFlagsEXT flags; + VkDebugUtilsMessageSeverityFlagsEXT messageSeverity; + VkDebugUtilsMessageTypeFlagsEXT messageType; + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback; + void* pUserData; +} VkDebugUtilsMessengerCreateInfoEXT; + +typedef struct VkDebugUtilsObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugUtilsObjectTagInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectNameEXT)(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo); +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectTagEXT)(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo); +typedef void (VKAPI_PTR *PFN_vkQueueBeginDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkQueueEndDebugUtilsLabelEXT)(VkQueue queue); +typedef void (VKAPI_PTR *PFN_vkQueueInsertDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBeginDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdInsertDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugUtilsMessengerEXT)(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugUtilsMessengerEXT)(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkSubmitDebugUtilsMessageEXT)(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT( + VkDevice device, + const VkDebugUtilsObjectNameInfoEXT* pNameInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT( + VkDevice device, + const VkDebugUtilsObjectTagInfoEXT* pTagInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT( + VkQueue queue); + +VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT( + VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugUtilsMessengerEXT* pMessenger); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT( + VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT( + VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); +#endif + + +// VK_EXT_sampler_filter_minmax is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_sampler_filter_minmax 1 +#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 2 +#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax" +typedef VkSamplerReductionMode VkSamplerReductionModeEXT; + +typedef VkSamplerReductionModeCreateInfo VkSamplerReductionModeCreateInfoEXT; + +typedef VkPhysicalDeviceSamplerFilterMinmaxProperties VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT; + + + +// VK_AMD_gpu_shader_int16 is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_gpu_shader_int16 1 +#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 2 +#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16" + + +// VK_AMD_mixed_attachment_samples is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_mixed_attachment_samples 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples" +typedef struct VkAttachmentSampleCountInfoAMD { + VkStructureType sType; + const void* pNext; + uint32_t colorAttachmentCount; + const VkSampleCountFlagBits* pColorAttachmentSamples; + VkSampleCountFlagBits depthStencilAttachmentSamples; +} VkAttachmentSampleCountInfoAMD; + + + +// VK_AMD_shader_fragment_mask is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_fragment_mask 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME "VK_AMD_shader_fragment_mask" + + +// VK_EXT_inline_uniform_block is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_inline_uniform_block 1 +#define VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION 1 +#define VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME "VK_EXT_inline_uniform_block" +typedef VkPhysicalDeviceInlineUniformBlockFeatures VkPhysicalDeviceInlineUniformBlockFeaturesEXT; + +typedef VkPhysicalDeviceInlineUniformBlockProperties VkPhysicalDeviceInlineUniformBlockPropertiesEXT; + +typedef VkWriteDescriptorSetInlineUniformBlock VkWriteDescriptorSetInlineUniformBlockEXT; + +typedef VkDescriptorPoolInlineUniformBlockCreateInfo VkDescriptorPoolInlineUniformBlockCreateInfoEXT; + + + +// VK_EXT_shader_stencil_export is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_stencil_export 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME "VK_EXT_shader_stencil_export" + + +// VK_EXT_sample_locations is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_sample_locations 1 +#define VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION 1 +#define VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME "VK_EXT_sample_locations" +typedef struct VkSampleLocationEXT { + float x; + float y; +} VkSampleLocationEXT; + +typedef struct VkSampleLocationsInfoEXT { + VkStructureType sType; + const void* pNext; + VkSampleCountFlagBits sampleLocationsPerPixel; + VkExtent2D sampleLocationGridSize; + uint32_t sampleLocationsCount; + const VkSampleLocationEXT* pSampleLocations; +} VkSampleLocationsInfoEXT; + +typedef struct VkAttachmentSampleLocationsEXT { + uint32_t attachmentIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkAttachmentSampleLocationsEXT; + +typedef struct VkSubpassSampleLocationsEXT { + uint32_t subpassIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkSubpassSampleLocationsEXT; + +typedef struct VkRenderPassSampleLocationsBeginInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t attachmentInitialSampleLocationsCount; + const VkAttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations; + uint32_t postSubpassSampleLocationsCount; + const VkSubpassSampleLocationsEXT* pPostSubpassSampleLocations; +} VkRenderPassSampleLocationsBeginInfoEXT; + +typedef struct VkPipelineSampleLocationsStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 sampleLocationsEnable; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkPipelineSampleLocationsStateCreateInfoEXT; + +typedef struct VkPhysicalDeviceSampleLocationsPropertiesEXT { + VkStructureType sType; + void* pNext; + VkSampleCountFlags sampleLocationSampleCounts; + VkExtent2D maxSampleLocationGridSize; + float sampleLocationCoordinateRange[2]; + uint32_t sampleLocationSubPixelBits; + VkBool32 variableSampleLocations; +} VkPhysicalDeviceSampleLocationsPropertiesEXT; + +typedef struct VkMultisamplePropertiesEXT { + VkStructureType sType; + void* pNext; + VkExtent2D maxSampleLocationGridSize; +} VkMultisamplePropertiesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEXT)(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEXT( + VkCommandBuffer commandBuffer, + const VkSampleLocationsInfoEXT* pSampleLocationsInfo); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMultisamplePropertiesEXT( + VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT* pMultisampleProperties); +#endif + + +// VK_EXT_blend_operation_advanced is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_blend_operation_advanced 1 +#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2 +#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced" + +typedef enum VkBlendOverlapEXT { + VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0, + VK_BLEND_OVERLAP_DISJOINT_EXT = 1, + VK_BLEND_OVERLAP_CONJOINT_EXT = 2, + VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF +} VkBlendOverlapEXT; +typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 advancedBlendCoherentOperations; +} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; + +typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t advancedBlendMaxColorAttachments; + VkBool32 advancedBlendIndependentBlend; + VkBool32 advancedBlendNonPremultipliedSrcColor; + VkBool32 advancedBlendNonPremultipliedDstColor; + VkBool32 advancedBlendCorrelatedOverlap; + VkBool32 advancedBlendAllOperations; +} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; + +typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 srcPremultiplied; + VkBool32 dstPremultiplied; + VkBlendOverlapEXT blendOverlap; +} VkPipelineColorBlendAdvancedStateCreateInfoEXT; + + + +// VK_NV_fragment_coverage_to_color is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_fragment_coverage_to_color 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color" +typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV; +typedef struct VkPipelineCoverageToColorStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageToColorStateCreateFlagsNV flags; + VkBool32 coverageToColorEnable; + uint32_t coverageToColorLocation; +} VkPipelineCoverageToColorStateCreateInfoNV; + + + +// VK_NV_framebuffer_mixed_samples is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_framebuffer_mixed_samples 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples" + +typedef enum VkCoverageModulationModeNV { + VK_COVERAGE_MODULATION_MODE_NONE_NV = 0, + VK_COVERAGE_MODULATION_MODE_RGB_NV = 1, + VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2, + VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3, + VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoverageModulationModeNV; +typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV; +typedef struct VkPipelineCoverageModulationStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageModulationStateCreateFlagsNV flags; + VkCoverageModulationModeNV coverageModulationMode; + VkBool32 coverageModulationTableEnable; + uint32_t coverageModulationTableCount; + const float* pCoverageModulationTable; +} VkPipelineCoverageModulationStateCreateInfoNV; + +typedef VkAttachmentSampleCountInfoAMD VkAttachmentSampleCountInfoNV; + + + +// VK_NV_fill_rectangle is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_fill_rectangle 1 +#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1 +#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle" + + +// VK_NV_shader_sm_builtins is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shader_sm_builtins 1 +#define VK_NV_SHADER_SM_BUILTINS_SPEC_VERSION 1 +#define VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME "VK_NV_shader_sm_builtins" +typedef struct VkPhysicalDeviceShaderSMBuiltinsPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t shaderSMCount; + uint32_t shaderWarpsPerSM; +} VkPhysicalDeviceShaderSMBuiltinsPropertiesNV; + +typedef struct VkPhysicalDeviceShaderSMBuiltinsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 shaderSMBuiltins; +} VkPhysicalDeviceShaderSMBuiltinsFeaturesNV; + + + +// VK_EXT_post_depth_coverage is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_post_depth_coverage 1 +#define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1 +#define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage" + + +// VK_EXT_image_drm_format_modifier is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_drm_format_modifier 1 +#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION 2 +#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME "VK_EXT_image_drm_format_modifier" +typedef struct VkDrmFormatModifierPropertiesEXT { + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + VkFormatFeatureFlags drmFormatModifierTilingFeatures; +} VkDrmFormatModifierPropertiesEXT; + +typedef struct VkDrmFormatModifierPropertiesListEXT { + VkStructureType sType; + void* pNext; + uint32_t drmFormatModifierCount; + VkDrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties; +} VkDrmFormatModifierPropertiesListEXT; + +typedef struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT { + VkStructureType sType; + const void* pNext; + uint64_t drmFormatModifier; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; +} VkPhysicalDeviceImageDrmFormatModifierInfoEXT; + +typedef struct VkImageDrmFormatModifierListCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t drmFormatModifierCount; + const uint64_t* pDrmFormatModifiers; +} VkImageDrmFormatModifierListCreateInfoEXT; + +typedef struct VkImageDrmFormatModifierExplicitCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + const VkSubresourceLayout* pPlaneLayouts; +} VkImageDrmFormatModifierExplicitCreateInfoEXT; + +typedef struct VkImageDrmFormatModifierPropertiesEXT { + VkStructureType sType; + void* pNext; + uint64_t drmFormatModifier; +} VkImageDrmFormatModifierPropertiesEXT; + +typedef struct VkDrmFormatModifierProperties2EXT { + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + VkFormatFeatureFlags2 drmFormatModifierTilingFeatures; +} VkDrmFormatModifierProperties2EXT; + +typedef struct VkDrmFormatModifierPropertiesList2EXT { + VkStructureType sType; + void* pNext; + uint32_t drmFormatModifierCount; + VkDrmFormatModifierProperties2EXT* pDrmFormatModifierProperties; +} VkDrmFormatModifierPropertiesList2EXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetImageDrmFormatModifierPropertiesEXT)(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageDrmFormatModifierPropertiesEXT( + VkDevice device, + VkImage image, + VkImageDrmFormatModifierPropertiesEXT* pProperties); +#endif + + +// VK_EXT_validation_cache is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_validation_cache 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) +#define VK_EXT_VALIDATION_CACHE_SPEC_VERSION 1 +#define VK_EXT_VALIDATION_CACHE_EXTENSION_NAME "VK_EXT_validation_cache" + +typedef enum VkValidationCacheHeaderVersionEXT { + VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT = 1, + VK_VALIDATION_CACHE_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCacheHeaderVersionEXT; +typedef VkFlags VkValidationCacheCreateFlagsEXT; +typedef struct VkValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheCreateFlagsEXT flags; + size_t initialDataSize; + const void* pInitialData; +} VkValidationCacheCreateInfoEXT; + +typedef struct VkShaderModuleValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheEXT validationCache; +} VkShaderModuleValidationCacheCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateValidationCacheEXT)(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache); +typedef void (VKAPI_PTR *PFN_vkDestroyValidationCacheEXT)(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkMergeValidationCachesEXT)(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches); +typedef VkResult (VKAPI_PTR *PFN_vkGetValidationCacheDataEXT)(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateValidationCacheEXT( + VkDevice device, + const VkValidationCacheCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkValidationCacheEXT* pValidationCache); + +VKAPI_ATTR void VKAPI_CALL vkDestroyValidationCacheEXT( + VkDevice device, + VkValidationCacheEXT validationCache, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkMergeValidationCachesEXT( + VkDevice device, + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT* pSrcCaches); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetValidationCacheDataEXT( + VkDevice device, + VkValidationCacheEXT validationCache, + size_t* pDataSize, + void* pData); +#endif + + +// VK_EXT_descriptor_indexing is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_descriptor_indexing 1 +#define VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION 2 +#define VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME "VK_EXT_descriptor_indexing" +typedef VkDescriptorBindingFlagBits VkDescriptorBindingFlagBitsEXT; + +typedef VkDescriptorBindingFlags VkDescriptorBindingFlagsEXT; + +typedef VkDescriptorSetLayoutBindingFlagsCreateInfo VkDescriptorSetLayoutBindingFlagsCreateInfoEXT; + +typedef VkPhysicalDeviceDescriptorIndexingFeatures VkPhysicalDeviceDescriptorIndexingFeaturesEXT; + +typedef VkPhysicalDeviceDescriptorIndexingProperties VkPhysicalDeviceDescriptorIndexingPropertiesEXT; + +typedef VkDescriptorSetVariableDescriptorCountAllocateInfo VkDescriptorSetVariableDescriptorCountAllocateInfoEXT; + +typedef VkDescriptorSetVariableDescriptorCountLayoutSupport VkDescriptorSetVariableDescriptorCountLayoutSupportEXT; + + + +// VK_EXT_shader_viewport_index_layer is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_viewport_index_layer 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME "VK_EXT_shader_viewport_index_layer" + + +// VK_NV_shading_rate_image is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shading_rate_image 1 +#define VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION 3 +#define VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME "VK_NV_shading_rate_image" + +typedef enum VkShadingRatePaletteEntryNV { + VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV = 0, + VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV = 1, + VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV = 2, + VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV = 3, + VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV = 4, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV = 5, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV = 6, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV = 7, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV = 8, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV = 9, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV = 10, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV = 11, + VK_SHADING_RATE_PALETTE_ENTRY_MAX_ENUM_NV = 0x7FFFFFFF +} VkShadingRatePaletteEntryNV; + +typedef enum VkCoarseSampleOrderTypeNV { + VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV = 0, + VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV = 1, + VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV = 2, + VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV = 3, + VK_COARSE_SAMPLE_ORDER_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoarseSampleOrderTypeNV; +typedef struct VkShadingRatePaletteNV { + uint32_t shadingRatePaletteEntryCount; + const VkShadingRatePaletteEntryNV* pShadingRatePaletteEntries; +} VkShadingRatePaletteNV; + +typedef struct VkPipelineViewportShadingRateImageStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 shadingRateImageEnable; + uint32_t viewportCount; + const VkShadingRatePaletteNV* pShadingRatePalettes; +} VkPipelineViewportShadingRateImageStateCreateInfoNV; + +typedef struct VkPhysicalDeviceShadingRateImageFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 shadingRateImage; + VkBool32 shadingRateCoarseSampleOrder; +} VkPhysicalDeviceShadingRateImageFeaturesNV; + +typedef struct VkPhysicalDeviceShadingRateImagePropertiesNV { + VkStructureType sType; + void* pNext; + VkExtent2D shadingRateTexelSize; + uint32_t shadingRatePaletteSize; + uint32_t shadingRateMaxCoarseSamples; +} VkPhysicalDeviceShadingRateImagePropertiesNV; + +typedef struct VkCoarseSampleLocationNV { + uint32_t pixelX; + uint32_t pixelY; + uint32_t sample; +} VkCoarseSampleLocationNV; + +typedef struct VkCoarseSampleOrderCustomNV { + VkShadingRatePaletteEntryNV shadingRate; + uint32_t sampleCount; + uint32_t sampleLocationCount; + const VkCoarseSampleLocationNV* pSampleLocations; +} VkCoarseSampleOrderCustomNV; + +typedef struct VkPipelineViewportCoarseSampleOrderStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkCoarseSampleOrderTypeNV sampleOrderType; + uint32_t customSampleOrderCount; + const VkCoarseSampleOrderCustomNV* pCustomSampleOrders; +} VkPipelineViewportCoarseSampleOrderStateCreateInfoNV; + +typedef void (VKAPI_PTR *PFN_vkCmdBindShadingRateImageNV)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportShadingRatePaletteNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoarseSampleOrderNV)(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindShadingRateImageNV( + VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportShadingRatePaletteNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkShadingRatePaletteNV* pShadingRatePalettes); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoarseSampleOrderNV( + VkCommandBuffer commandBuffer, + VkCoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); +#endif + + +// VK_NV_ray_tracing is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_ray_tracing 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV) +#define VK_NV_RAY_TRACING_SPEC_VERSION 3 +#define VK_NV_RAY_TRACING_EXTENSION_NAME "VK_NV_ray_tracing" +#define VK_SHADER_UNUSED_KHR (~0U) +#define VK_SHADER_UNUSED_NV VK_SHADER_UNUSED_KHR + +typedef enum VkRayTracingShaderGroupTypeKHR { + VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR = 0, + VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR = 1, + VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR = 2, + VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkRayTracingShaderGroupTypeKHR; typedef VkRayTracingShaderGroupTypeKHR VkRayTracingShaderGroupTypeNV; -typedef enum VkGeometryTypeKHR { - VK_GEOMETRY_TYPE_TRIANGLES_KHR = 0, - VK_GEOMETRY_TYPE_AABBS_KHR = 1, - VK_GEOMETRY_TYPE_INSTANCES_KHR = 2, - VK_GEOMETRY_TYPE_TRIANGLES_NV = VK_GEOMETRY_TYPE_TRIANGLES_KHR, - VK_GEOMETRY_TYPE_AABBS_NV = VK_GEOMETRY_TYPE_AABBS_KHR, - VK_GEOMETRY_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkGeometryTypeKHR; -typedef VkGeometryTypeKHR VkGeometryTypeNV; +typedef enum VkGeometryTypeKHR { + VK_GEOMETRY_TYPE_TRIANGLES_KHR = 0, + VK_GEOMETRY_TYPE_AABBS_KHR = 1, + VK_GEOMETRY_TYPE_INSTANCES_KHR = 2, + VK_GEOMETRY_TYPE_SPHERES_NV = 1000429004, + VK_GEOMETRY_TYPE_LINEAR_SWEPT_SPHERES_NV = 1000429005, + VK_GEOMETRY_TYPE_TRIANGLES_NV = VK_GEOMETRY_TYPE_TRIANGLES_KHR, + VK_GEOMETRY_TYPE_AABBS_NV = VK_GEOMETRY_TYPE_AABBS_KHR, + VK_GEOMETRY_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryTypeKHR; +typedef VkGeometryTypeKHR VkGeometryTypeNV; + + +typedef enum VkAccelerationStructureTypeKHR { + VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR = 0, + VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR = 1, + VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR = 2, + VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureTypeKHR; +typedef VkAccelerationStructureTypeKHR VkAccelerationStructureTypeNV; + + +typedef enum VkCopyAccelerationStructureModeKHR { + VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR = 0, + VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR = 1, + VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR = 2, + VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR = 3, + VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkCopyAccelerationStructureModeKHR; +typedef VkCopyAccelerationStructureModeKHR VkCopyAccelerationStructureModeNV; + + +typedef enum VkAccelerationStructureMemoryRequirementsTypeNV { + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = 0, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = 1, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = 2, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkAccelerationStructureMemoryRequirementsTypeNV; + +typedef enum VkGeometryFlagBitsKHR { + VK_GEOMETRY_OPAQUE_BIT_KHR = 0x00000001, + VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR = 0x00000002, + VK_GEOMETRY_OPAQUE_BIT_NV = VK_GEOMETRY_OPAQUE_BIT_KHR, + VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR, + VK_GEOMETRY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryFlagBitsKHR; +typedef VkFlags VkGeometryFlagsKHR; +typedef VkGeometryFlagsKHR VkGeometryFlagsNV; + +typedef VkGeometryFlagBitsKHR VkGeometryFlagBitsNV; + + +typedef enum VkGeometryInstanceFlagBitsKHR { + VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR = 0x00000001, + VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR = 0x00000002, + VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR = 0x00000004, + VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR = 0x00000008, + VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT = 0x00000010, + VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000020, + VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR, + VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, + VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryInstanceFlagBitsKHR; +typedef VkFlags VkGeometryInstanceFlagsKHR; +typedef VkGeometryInstanceFlagsKHR VkGeometryInstanceFlagsNV; + +typedef VkGeometryInstanceFlagBitsKHR VkGeometryInstanceFlagBitsNV; + + +typedef enum VkBuildAccelerationStructureFlagBitsKHR { + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR = 0x00000001, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR = 0x00000002, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR = 0x00000004, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR = 0x00000008, + VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR = 0x00000010, + VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV = 0x00000020, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT = 0x00000040, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000080, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT = 0x00000100, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV = 0x00000200, +#endif + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_KHR = 0x00000800, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkBuildAccelerationStructureFlagBitsKHR; +typedef VkFlags VkBuildAccelerationStructureFlagsKHR; +typedef VkBuildAccelerationStructureFlagsKHR VkBuildAccelerationStructureFlagsNV; + +typedef VkBuildAccelerationStructureFlagBitsKHR VkBuildAccelerationStructureFlagBitsNV; + +typedef struct VkRayTracingShaderGroupCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkRayTracingShaderGroupTypeKHR type; + uint32_t generalShader; + uint32_t closestHitShader; + uint32_t anyHitShader; + uint32_t intersectionShader; +} VkRayTracingShaderGroupCreateInfoNV; + +typedef struct VkRayTracingPipelineCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + uint32_t groupCount; + const VkRayTracingShaderGroupCreateInfoNV* pGroups; + uint32_t maxRecursionDepth; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkRayTracingPipelineCreateInfoNV; + +typedef struct VkGeometryTrianglesNV { + VkStructureType sType; + const void* pNext; + VkBuffer vertexData; + VkDeviceSize vertexOffset; + uint32_t vertexCount; + VkDeviceSize vertexStride; + VkFormat vertexFormat; + VkBuffer indexData; + VkDeviceSize indexOffset; + uint32_t indexCount; + VkIndexType indexType; + VkBuffer transformData; + VkDeviceSize transformOffset; +} VkGeometryTrianglesNV; + +typedef struct VkGeometryAABBNV { + VkStructureType sType; + const void* pNext; + VkBuffer aabbData; + uint32_t numAABBs; + uint32_t stride; + VkDeviceSize offset; +} VkGeometryAABBNV; + +typedef struct VkGeometryDataNV { + VkGeometryTrianglesNV triangles; + VkGeometryAABBNV aabbs; +} VkGeometryDataNV; + +typedef struct VkGeometryNV { + VkStructureType sType; + const void* pNext; + VkGeometryTypeKHR geometryType; + VkGeometryDataNV geometry; + VkGeometryFlagsKHR flags; +} VkGeometryNV; + +typedef struct VkAccelerationStructureInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureTypeNV type; + VkBuildAccelerationStructureFlagsNV flags; + uint32_t instanceCount; + uint32_t geometryCount; + const VkGeometryNV* pGeometries; +} VkAccelerationStructureInfoNV; + +typedef struct VkAccelerationStructureCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceSize compactedSize; + VkAccelerationStructureInfoNV info; +} VkAccelerationStructureCreateInfoNV; + +typedef struct VkBindAccelerationStructureMemoryInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureNV accelerationStructure; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; +} VkBindAccelerationStructureMemoryInfoNV; + +typedef struct VkWriteDescriptorSetAccelerationStructureNV { + VkStructureType sType; + const void* pNext; + uint32_t accelerationStructureCount; + const VkAccelerationStructureNV* pAccelerationStructures; +} VkWriteDescriptorSetAccelerationStructureNV; + +typedef struct VkAccelerationStructureMemoryRequirementsInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureMemoryRequirementsTypeNV type; + VkAccelerationStructureNV accelerationStructure; +} VkAccelerationStructureMemoryRequirementsInfoNV; + +typedef struct VkPhysicalDeviceRayTracingPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t shaderGroupHandleSize; + uint32_t maxRecursionDepth; + uint32_t maxShaderGroupStride; + uint32_t shaderGroupBaseAlignment; + uint64_t maxGeometryCount; + uint64_t maxInstanceCount; + uint64_t maxTriangleCount; + uint32_t maxDescriptorSetAccelerationStructures; +} VkPhysicalDeviceRayTracingPropertiesNV; + +typedef struct VkTransformMatrixKHR { + float matrix[3][4]; +} VkTransformMatrixKHR; + +typedef VkTransformMatrixKHR VkTransformMatrixNV; + +typedef struct VkAabbPositionsKHR { + float minX; + float minY; + float minZ; + float maxX; + float maxY; + float maxZ; +} VkAabbPositionsKHR; + +typedef VkAabbPositionsKHR VkAabbPositionsNV; + +typedef struct VkAccelerationStructureInstanceKHR { + VkTransformMatrixKHR transform; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureInstanceKHR; + +typedef VkAccelerationStructureInstanceKHR VkAccelerationStructureInstanceNV; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureNV)(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsNV)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); +typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeKHR mode); +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysNV)(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesNV)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesNV)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef VkResult (VKAPI_PTR *PFN_vkCompileDeferredNV)(VkDevice device, VkPipeline pipeline, uint32_t shader); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureNV( + VkDevice device, + const VkAccelerationStructureCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkAccelerationStructureNV* pAccelerationStructure); + +VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureNV( + VkDevice device, + VkAccelerationStructureNV accelerationStructure, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV( + VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryNV( + VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV( + VkCommandBuffer commandBuffer, + const VkAccelerationStructureInfoNV* pInfo, + VkBuffer instanceData, + VkDeviceSize instanceOffset, + VkBool32 update, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkBuffer scratch, + VkDeviceSize scratchOffset); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureNV( + VkCommandBuffer commandBuffer, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkCopyAccelerationStructureModeKHR mode); + +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysNV( + VkCommandBuffer commandBuffer, + VkBuffer raygenShaderBindingTableBuffer, + VkDeviceSize raygenShaderBindingOffset, + VkBuffer missShaderBindingTableBuffer, + VkDeviceSize missShaderBindingOffset, + VkDeviceSize missShaderBindingStride, + VkBuffer hitShaderBindingTableBuffer, + VkDeviceSize hitShaderBindingOffset, + VkDeviceSize hitShaderBindingStride, + VkBuffer callableShaderBindingTableBuffer, + VkDeviceSize callableShaderBindingOffset, + VkDeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesNV( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoNV* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureHandleNV( + VkDevice device, + VkAccelerationStructureNV accelerationStructure, + size_t dataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesNV( + VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureNV* pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); + +VKAPI_ATTR VkResult VKAPI_CALL vkCompileDeferredNV( + VkDevice device, + VkPipeline pipeline, + uint32_t shader); +#endif + + +// VK_NV_representative_fragment_test is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_representative_fragment_test 1 +#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION 2 +#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME "VK_NV_representative_fragment_test" +typedef struct VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 representativeFragmentTest; +} VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV; + +typedef struct VkPipelineRepresentativeFragmentTestStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 representativeFragmentTestEnable; +} VkPipelineRepresentativeFragmentTestStateCreateInfoNV; + + + +// VK_EXT_filter_cubic is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_filter_cubic 1 +#define VK_EXT_FILTER_CUBIC_SPEC_VERSION 3 +#define VK_EXT_FILTER_CUBIC_EXTENSION_NAME "VK_EXT_filter_cubic" +typedef struct VkPhysicalDeviceImageViewImageFormatInfoEXT { + VkStructureType sType; + void* pNext; + VkImageViewType imageViewType; +} VkPhysicalDeviceImageViewImageFormatInfoEXT; + +typedef struct VkFilterCubicImageViewImageFormatPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 filterCubic; + VkBool32 filterCubicMinmax; +} VkFilterCubicImageViewImageFormatPropertiesEXT; + + + +// VK_QCOM_render_pass_shader_resolve is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_render_pass_shader_resolve 1 +#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION 4 +#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME "VK_QCOM_render_pass_shader_resolve" + + +// VK_EXT_global_priority is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_global_priority 1 +#define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2 +#define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority" +typedef VkQueueGlobalPriority VkQueueGlobalPriorityEXT; + +typedef VkDeviceQueueGlobalPriorityCreateInfo VkDeviceQueueGlobalPriorityCreateInfoEXT; + + + +// VK_EXT_external_memory_host is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_external_memory_host 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME "VK_EXT_external_memory_host" +typedef struct VkImportMemoryHostPointerInfoEXT { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + void* pHostPointer; +} VkImportMemoryHostPointerInfoEXT; + +typedef struct VkMemoryHostPointerPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryHostPointerPropertiesEXT; + +typedef struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize minImportedHostPointerAlignment; +} VkPhysicalDeviceExternalMemoryHostPropertiesEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryHostPointerPropertiesEXT)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void* pHostPointer, + VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); +#endif + + +// VK_AMD_buffer_marker is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_buffer_marker 1 +#define VK_AMD_BUFFER_MARKER_SPEC_VERSION 1 +#define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker" +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarkerAMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); +#endif + + +// VK_AMD_pipeline_compiler_control is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_pipeline_compiler_control 1 +#define VK_AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION 1 +#define VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME "VK_AMD_pipeline_compiler_control" + +typedef enum VkPipelineCompilerControlFlagBitsAMD { + VK_PIPELINE_COMPILER_CONTROL_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF +} VkPipelineCompilerControlFlagBitsAMD; +typedef VkFlags VkPipelineCompilerControlFlagsAMD; +typedef struct VkPipelineCompilerControlCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkPipelineCompilerControlFlagsAMD compilerControlFlags; +} VkPipelineCompilerControlCreateInfoAMD; + + + +// VK_EXT_calibrated_timestamps is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_calibrated_timestamps 1 +#define VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION 2 +#define VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_EXT_calibrated_timestamps" +typedef VkTimeDomainKHR VkTimeDomainEXT; + +typedef VkCalibratedTimestampInfoKHR VkCalibratedTimestampInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT)(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainKHR* pTimeDomains); +typedef VkResult (VKAPI_PTR *PFN_vkGetCalibratedTimestampsEXT)(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoKHR* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + VkPhysicalDevice physicalDevice, + uint32_t* pTimeDomainCount, + VkTimeDomainKHR* pTimeDomains); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsEXT( + VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR* pTimestampInfos, + uint64_t* pTimestamps, + uint64_t* pMaxDeviation); +#endif + + +// VK_AMD_shader_core_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_core_properties 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION 2 +#define VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_AMD_shader_core_properties" +typedef struct VkPhysicalDeviceShaderCorePropertiesAMD { + VkStructureType sType; + void* pNext; + uint32_t shaderEngineCount; + uint32_t shaderArraysPerEngineCount; + uint32_t computeUnitsPerShaderArray; + uint32_t simdPerComputeUnit; + uint32_t wavefrontsPerSimd; + uint32_t wavefrontSize; + uint32_t sgprsPerSimd; + uint32_t minSgprAllocation; + uint32_t maxSgprAllocation; + uint32_t sgprAllocationGranularity; + uint32_t vgprsPerSimd; + uint32_t minVgprAllocation; + uint32_t maxVgprAllocation; + uint32_t vgprAllocationGranularity; +} VkPhysicalDeviceShaderCorePropertiesAMD; + + + +// VK_AMD_memory_overallocation_behavior is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_memory_overallocation_behavior 1 +#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION 1 +#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME "VK_AMD_memory_overallocation_behavior" + +typedef enum VkMemoryOverallocationBehaviorAMD { + VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD = 0, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD = 1, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD = 2, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_MAX_ENUM_AMD = 0x7FFFFFFF +} VkMemoryOverallocationBehaviorAMD; +typedef struct VkDeviceMemoryOverallocationCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkMemoryOverallocationBehaviorAMD overallocationBehavior; +} VkDeviceMemoryOverallocationCreateInfoAMD; + + + +// VK_EXT_vertex_attribute_divisor is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_vertex_attribute_divisor 1 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 3 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_EXT_vertex_attribute_divisor" +typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxVertexAttribDivisor; +} VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; + +typedef VkVertexInputBindingDivisorDescription VkVertexInputBindingDivisorDescriptionEXT; + +typedef VkPipelineVertexInputDivisorStateCreateInfo VkPipelineVertexInputDivisorStateCreateInfoEXT; + +typedef VkPhysicalDeviceVertexAttributeDivisorFeatures VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT; + + + +// VK_EXT_pipeline_creation_feedback is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_creation_feedback 1 +#define VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME "VK_EXT_pipeline_creation_feedback" +typedef VkPipelineCreationFeedbackFlagBits VkPipelineCreationFeedbackFlagBitsEXT; + +typedef VkPipelineCreationFeedbackFlags VkPipelineCreationFeedbackFlagsEXT; + +typedef VkPipelineCreationFeedbackCreateInfo VkPipelineCreationFeedbackCreateInfoEXT; + +typedef VkPipelineCreationFeedback VkPipelineCreationFeedbackEXT; + + + +// VK_NV_shader_subgroup_partitioned is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shader_subgroup_partitioned 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME "VK_NV_shader_subgroup_partitioned" + + +// VK_NV_compute_shader_derivatives is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_compute_shader_derivatives 1 +#define VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1 +#define VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_NV_compute_shader_derivatives" +typedef VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; + + + +// VK_NV_mesh_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_mesh_shader 1 +#define VK_NV_MESH_SHADER_SPEC_VERSION 1 +#define VK_NV_MESH_SHADER_EXTENSION_NAME "VK_NV_mesh_shader" +typedef struct VkPhysicalDeviceMeshShaderFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 taskShader; + VkBool32 meshShader; +} VkPhysicalDeviceMeshShaderFeaturesNV; + +typedef struct VkPhysicalDeviceMeshShaderPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxDrawMeshTasksCount; + uint32_t maxTaskWorkGroupInvocations; + uint32_t maxTaskWorkGroupSize[3]; + uint32_t maxTaskTotalMemorySize; + uint32_t maxTaskOutputCount; + uint32_t maxMeshWorkGroupInvocations; + uint32_t maxMeshWorkGroupSize[3]; + uint32_t maxMeshTotalMemorySize; + uint32_t maxMeshOutputVertices; + uint32_t maxMeshOutputPrimitives; + uint32_t maxMeshMultiviewViewCount; + uint32_t meshOutputPerVertexGranularity; + uint32_t meshOutputPerPrimitiveGranularity; +} VkPhysicalDeviceMeshShaderPropertiesNV; + +typedef struct VkDrawMeshTasksIndirectCommandNV { + uint32_t taskCount; + uint32_t firstTask; +} VkDrawMeshTasksIndirectCommandNV; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksNV)(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksNV( + VkCommandBuffer commandBuffer, + uint32_t taskCount, + uint32_t firstTask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectNV( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountNV( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + + +// VK_NV_fragment_shader_barycentric is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_fragment_shader_barycentric 1 +#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_NV_fragment_shader_barycentric" +typedef VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV; + + + +// VK_NV_shader_image_footprint is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shader_image_footprint 1 +#define VK_NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION 2 +#define VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME "VK_NV_shader_image_footprint" +typedef struct VkPhysicalDeviceShaderImageFootprintFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 imageFootprint; +} VkPhysicalDeviceShaderImageFootprintFeaturesNV; + + + +// VK_NV_scissor_exclusive is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_scissor_exclusive 1 +#define VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION 2 +#define VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME "VK_NV_scissor_exclusive" +typedef struct VkPipelineViewportExclusiveScissorStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t exclusiveScissorCount; + const VkRect2D* pExclusiveScissors; +} VkPipelineViewportExclusiveScissorStateCreateInfoNV; + +typedef struct VkPhysicalDeviceExclusiveScissorFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 exclusiveScissor; +} VkPhysicalDeviceExclusiveScissorFeaturesNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetExclusiveScissorEnableNV)(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkBool32* pExclusiveScissorEnables); +typedef void (VKAPI_PTR *PFN_vkCmdSetExclusiveScissorNV)(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetExclusiveScissorEnableNV( + VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkBool32* pExclusiveScissorEnables); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetExclusiveScissorNV( + VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkRect2D* pExclusiveScissors); +#endif + + +// VK_NV_device_diagnostic_checkpoints is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_device_diagnostic_checkpoints 1 +#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION 2 +#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME "VK_NV_device_diagnostic_checkpoints" +typedef struct VkQueueFamilyCheckpointPropertiesNV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags checkpointExecutionStageMask; +} VkQueueFamilyCheckpointPropertiesNV; + +typedef struct VkCheckpointDataNV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlagBits stage; + void* pCheckpointMarker; +} VkCheckpointDataNV; + +typedef struct VkQueueFamilyCheckpointProperties2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2 checkpointExecutionStageMask; +} VkQueueFamilyCheckpointProperties2NV; + +typedef struct VkCheckpointData2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2 stage; + void* pCheckpointMarker; +} VkCheckpointData2NV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetCheckpointNV)(VkCommandBuffer commandBuffer, const void* pCheckpointMarker); +typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointDataNV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData); +typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetCheckpointNV( + VkCommandBuffer commandBuffer, + const void* pCheckpointMarker); + +VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointDataNV( + VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointDataNV* pCheckpointData); + +VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV( + VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointData2NV* pCheckpointData); +#endif + + +// VK_INTEL_shader_integer_functions2 is a preprocessor guard. Do not pass it to API calls. +#define VK_INTEL_shader_integer_functions2 1 +#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_SPEC_VERSION 1 +#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME "VK_INTEL_shader_integer_functions2" +typedef struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL { + VkStructureType sType; + void* pNext; + VkBool32 shaderIntegerFunctions2; +} VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + + + +// VK_INTEL_performance_query is a preprocessor guard. Do not pass it to API calls. +#define VK_INTEL_performance_query 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL) +#define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 2 +#define VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME "VK_INTEL_performance_query" + +typedef enum VkPerformanceConfigurationTypeINTEL { + VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL = 0, + VK_PERFORMANCE_CONFIGURATION_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceConfigurationTypeINTEL; + +typedef enum VkQueryPoolSamplingModeINTEL { + VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL = 0, + VK_QUERY_POOL_SAMPLING_MODE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkQueryPoolSamplingModeINTEL; + +typedef enum VkPerformanceOverrideTypeINTEL { + VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL = 0, + VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL = 1, + VK_PERFORMANCE_OVERRIDE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceOverrideTypeINTEL; + +typedef enum VkPerformanceParameterTypeINTEL { + VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL = 0, + VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL = 1, + VK_PERFORMANCE_PARAMETER_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceParameterTypeINTEL; + +typedef enum VkPerformanceValueTypeINTEL { + VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL = 0, + VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL = 1, + VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL = 2, + VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL = 3, + VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL = 4, + VK_PERFORMANCE_VALUE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceValueTypeINTEL; +typedef union VkPerformanceValueDataINTEL { + uint32_t value32; + uint64_t value64; + float valueFloat; + VkBool32 valueBool; + const char* valueString; +} VkPerformanceValueDataINTEL; + +typedef struct VkPerformanceValueINTEL { + VkPerformanceValueTypeINTEL type; + VkPerformanceValueDataINTEL data; +} VkPerformanceValueINTEL; + +typedef struct VkInitializePerformanceApiInfoINTEL { + VkStructureType sType; + const void* pNext; + void* pUserData; +} VkInitializePerformanceApiInfoINTEL; + +typedef struct VkQueryPoolPerformanceQueryCreateInfoINTEL { + VkStructureType sType; + const void* pNext; + VkQueryPoolSamplingModeINTEL performanceCountersSampling; +} VkQueryPoolPerformanceQueryCreateInfoINTEL; + +typedef VkQueryPoolPerformanceQueryCreateInfoINTEL VkQueryPoolCreateInfoINTEL; + +typedef struct VkPerformanceMarkerInfoINTEL { + VkStructureType sType; + const void* pNext; + uint64_t marker; +} VkPerformanceMarkerInfoINTEL; + +typedef struct VkPerformanceStreamMarkerInfoINTEL { + VkStructureType sType; + const void* pNext; + uint32_t marker; +} VkPerformanceStreamMarkerInfoINTEL; + +typedef struct VkPerformanceOverrideInfoINTEL { + VkStructureType sType; + const void* pNext; + VkPerformanceOverrideTypeINTEL type; + VkBool32 enable; + uint64_t parameter; +} VkPerformanceOverrideInfoINTEL; + +typedef struct VkPerformanceConfigurationAcquireInfoINTEL { + VkStructureType sType; + const void* pNext; + VkPerformanceConfigurationTypeINTEL type; +} VkPerformanceConfigurationAcquireInfoINTEL; + +typedef VkResult (VKAPI_PTR *PFN_vkInitializePerformanceApiINTEL)(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo); +typedef void (VKAPI_PTR *PFN_vkUninitializePerformanceApiINTEL)(VkDevice device); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceStreamMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceOverrideINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo); +typedef VkResult (VKAPI_PTR *PFN_vkAcquirePerformanceConfigurationINTEL)(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration); +typedef VkResult (VKAPI_PTR *PFN_vkReleasePerformanceConfigurationINTEL)(VkDevice device, VkPerformanceConfigurationINTEL configuration); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSetPerformanceConfigurationINTEL)(VkQueue queue, VkPerformanceConfigurationINTEL configuration); +typedef VkResult (VKAPI_PTR *PFN_vkGetPerformanceParameterINTEL)(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkInitializePerformanceApiINTEL( + VkDevice device, + const VkInitializePerformanceApiInfoINTEL* pInitializeInfo); + +VKAPI_ATTR void VKAPI_CALL vkUninitializePerformanceApiINTEL( + VkDevice device); + +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceMarkerINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceMarkerInfoINTEL* pMarkerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceStreamMarkerINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceOverrideINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceOverrideInfoINTEL* pOverrideInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquirePerformanceConfigurationINTEL( + VkDevice device, + const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, + VkPerformanceConfigurationINTEL* pConfiguration); + +VKAPI_ATTR VkResult VKAPI_CALL vkReleasePerformanceConfigurationINTEL( + VkDevice device, + VkPerformanceConfigurationINTEL configuration); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSetPerformanceConfigurationINTEL( + VkQueue queue, + VkPerformanceConfigurationINTEL configuration); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPerformanceParameterINTEL( + VkDevice device, + VkPerformanceParameterTypeINTEL parameter, + VkPerformanceValueINTEL* pValue); +#endif + + +// VK_EXT_pci_bus_info is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pci_bus_info 1 +#define VK_EXT_PCI_BUS_INFO_SPEC_VERSION 2 +#define VK_EXT_PCI_BUS_INFO_EXTENSION_NAME "VK_EXT_pci_bus_info" +typedef struct VkPhysicalDevicePCIBusInfoPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t pciDomain; + uint32_t pciBus; + uint32_t pciDevice; + uint32_t pciFunction; +} VkPhysicalDevicePCIBusInfoPropertiesEXT; + + + +// VK_AMD_display_native_hdr is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_display_native_hdr 1 +#define VK_AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION 1 +#define VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME "VK_AMD_display_native_hdr" +typedef struct VkDisplayNativeHdrSurfaceCapabilitiesAMD { + VkStructureType sType; + void* pNext; + VkBool32 localDimmingSupport; +} VkDisplayNativeHdrSurfaceCapabilitiesAMD; + +typedef struct VkSwapchainDisplayNativeHdrCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkBool32 localDimmingEnable; +} VkSwapchainDisplayNativeHdrCreateInfoAMD; + +typedef void (VKAPI_PTR *PFN_vkSetLocalDimmingAMD)(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkSetLocalDimmingAMD( + VkDevice device, + VkSwapchainKHR swapChain, + VkBool32 localDimmingEnable); +#endif + + +// VK_EXT_fragment_density_map is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_fragment_density_map 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION 2 +#define VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME "VK_EXT_fragment_density_map" +typedef struct VkPhysicalDeviceFragmentDensityMapFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentDensityMap; + VkBool32 fragmentDensityMapDynamic; + VkBool32 fragmentDensityMapNonSubsampledImages; +} VkPhysicalDeviceFragmentDensityMapFeaturesEXT; + +typedef struct VkPhysicalDeviceFragmentDensityMapPropertiesEXT { + VkStructureType sType; + void* pNext; + VkExtent2D minFragmentDensityTexelSize; + VkExtent2D maxFragmentDensityTexelSize; + VkBool32 fragmentDensityInvocations; +} VkPhysicalDeviceFragmentDensityMapPropertiesEXT; + +typedef struct VkRenderPassFragmentDensityMapCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkAttachmentReference fragmentDensityMapAttachment; +} VkRenderPassFragmentDensityMapCreateInfoEXT; + +typedef struct VkRenderingFragmentDensityMapAttachmentInfoEXT { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; +} VkRenderingFragmentDensityMapAttachmentInfoEXT; + + + +// VK_EXT_scalar_block_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_scalar_block_layout 1 +#define VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION 1 +#define VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME "VK_EXT_scalar_block_layout" +typedef VkPhysicalDeviceScalarBlockLayoutFeatures VkPhysicalDeviceScalarBlockLayoutFeaturesEXT; + + + +// VK_GOOGLE_hlsl_functionality1 is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_hlsl_functionality1 1 +#define VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION 1 +#define VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME "VK_GOOGLE_hlsl_functionality1" +// VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION is a deprecated alias +#define VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION +// VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME is a deprecated alias +#define VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME + + +// VK_GOOGLE_decorate_string is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_decorate_string 1 +#define VK_GOOGLE_DECORATE_STRING_SPEC_VERSION 1 +#define VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME "VK_GOOGLE_decorate_string" + + +// VK_EXT_subgroup_size_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_subgroup_size_control 1 +#define VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION 2 +#define VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME "VK_EXT_subgroup_size_control" +typedef VkPhysicalDeviceSubgroupSizeControlFeatures VkPhysicalDeviceSubgroupSizeControlFeaturesEXT; + +typedef VkPhysicalDeviceSubgroupSizeControlProperties VkPhysicalDeviceSubgroupSizeControlPropertiesEXT; + +typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfo VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + + + +// VK_AMD_shader_core_properties2 is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_core_properties2 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME "VK_AMD_shader_core_properties2" + +typedef enum VkShaderCorePropertiesFlagBitsAMD { + VK_SHADER_CORE_PROPERTIES_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF +} VkShaderCorePropertiesFlagBitsAMD; +typedef VkFlags VkShaderCorePropertiesFlagsAMD; +typedef struct VkPhysicalDeviceShaderCoreProperties2AMD { + VkStructureType sType; + void* pNext; + VkShaderCorePropertiesFlagsAMD shaderCoreFeatures; + uint32_t activeComputeUnitCount; +} VkPhysicalDeviceShaderCoreProperties2AMD; + + + +// VK_AMD_device_coherent_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_device_coherent_memory 1 +#define VK_AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION 1 +#define VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME "VK_AMD_device_coherent_memory" +typedef struct VkPhysicalDeviceCoherentMemoryFeaturesAMD { + VkStructureType sType; + void* pNext; + VkBool32 deviceCoherentMemory; +} VkPhysicalDeviceCoherentMemoryFeaturesAMD; + + + +// VK_EXT_shader_image_atomic_int64 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_image_atomic_int64 1 +#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION 1 +#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_EXTENSION_NAME "VK_EXT_shader_image_atomic_int64" +typedef struct VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderImageInt64Atomics; + VkBool32 sparseImageInt64Atomics; +} VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + + + +// VK_EXT_memory_budget is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_memory_budget 1 +#define VK_EXT_MEMORY_BUDGET_SPEC_VERSION 1 +#define VK_EXT_MEMORY_BUDGET_EXTENSION_NAME "VK_EXT_memory_budget" +typedef struct VkPhysicalDeviceMemoryBudgetPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize heapBudget[VK_MAX_MEMORY_HEAPS]; + VkDeviceSize heapUsage[VK_MAX_MEMORY_HEAPS]; +} VkPhysicalDeviceMemoryBudgetPropertiesEXT; + + + +// VK_EXT_memory_priority is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_memory_priority 1 +#define VK_EXT_MEMORY_PRIORITY_SPEC_VERSION 1 +#define VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME "VK_EXT_memory_priority" +typedef struct VkPhysicalDeviceMemoryPriorityFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 memoryPriority; +} VkPhysicalDeviceMemoryPriorityFeaturesEXT; + +typedef struct VkMemoryPriorityAllocateInfoEXT { + VkStructureType sType; + const void* pNext; + float priority; +} VkMemoryPriorityAllocateInfoEXT; + + + +// VK_NV_dedicated_allocation_image_aliasing is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_dedicated_allocation_image_aliasing 1 +#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION 1 +#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME "VK_NV_dedicated_allocation_image_aliasing" +typedef struct VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 dedicatedAllocationImageAliasing; +} VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + + + +// VK_EXT_buffer_device_address is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_buffer_device_address 1 +#define VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 2 +#define VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_EXT_buffer_device_address" +typedef struct VkPhysicalDeviceBufferDeviceAddressFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 bufferDeviceAddress; + VkBool32 bufferDeviceAddressCaptureReplay; + VkBool32 bufferDeviceAddressMultiDevice; +} VkPhysicalDeviceBufferDeviceAddressFeaturesEXT; + +typedef VkPhysicalDeviceBufferDeviceAddressFeaturesEXT VkPhysicalDeviceBufferAddressFeaturesEXT; + +typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoEXT; + +typedef struct VkBufferDeviceAddressCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceAddress deviceAddress; +} VkBufferDeviceAddressCreateInfoEXT; + +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressEXT)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressEXT( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); +#endif + + +// VK_EXT_tooling_info is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_tooling_info 1 +#define VK_EXT_TOOLING_INFO_SPEC_VERSION 1 +#define VK_EXT_TOOLING_INFO_EXTENSION_NAME "VK_EXT_tooling_info" +typedef VkToolPurposeFlagBits VkToolPurposeFlagBitsEXT; + +typedef VkToolPurposeFlags VkToolPurposeFlagsEXT; + +typedef VkPhysicalDeviceToolProperties VkPhysicalDeviceToolPropertiesEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceToolPropertiesEXT)(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolPropertiesEXT( + VkPhysicalDevice physicalDevice, + uint32_t* pToolCount, + VkPhysicalDeviceToolProperties* pToolProperties); +#endif + + +// VK_EXT_separate_stencil_usage is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_separate_stencil_usage 1 +#define VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION 1 +#define VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME "VK_EXT_separate_stencil_usage" +typedef VkImageStencilUsageCreateInfo VkImageStencilUsageCreateInfoEXT; + + + +// VK_EXT_validation_features is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_validation_features 1 +#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 6 +#define VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME "VK_EXT_validation_features" + +typedef enum VkValidationFeatureEnableEXT { + VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT = 0, + VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT = 1, + VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT = 2, + VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT = 3, + VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT = 4, + VK_VALIDATION_FEATURE_ENABLE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationFeatureEnableEXT; + +typedef enum VkValidationFeatureDisableEXT { + VK_VALIDATION_FEATURE_DISABLE_ALL_EXT = 0, + VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT = 1, + VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT = 2, + VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT = 3, + VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT = 4, + VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT = 5, + VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT = 6, + VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT = 7, + VK_VALIDATION_FEATURE_DISABLE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationFeatureDisableEXT; +typedef struct VkValidationFeaturesEXT { + VkStructureType sType; + const void* pNext; + uint32_t enabledValidationFeatureCount; + const VkValidationFeatureEnableEXT* pEnabledValidationFeatures; + uint32_t disabledValidationFeatureCount; + const VkValidationFeatureDisableEXT* pDisabledValidationFeatures; +} VkValidationFeaturesEXT; + + + +// VK_NV_cooperative_matrix is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_cooperative_matrix 1 +#define VK_NV_COOPERATIVE_MATRIX_SPEC_VERSION 1 +#define VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME "VK_NV_cooperative_matrix" +typedef VkComponentTypeKHR VkComponentTypeNV; + +typedef VkScopeKHR VkScopeNV; + +typedef struct VkCooperativeMatrixPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t MSize; + uint32_t NSize; + uint32_t KSize; + VkComponentTypeNV AType; + VkComponentTypeNV BType; + VkComponentTypeNV CType; + VkComponentTypeNV DType; + VkScopeNV scope; +} VkCooperativeMatrixPropertiesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrixFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cooperativeMatrix; + VkBool32 cooperativeMatrixRobustBufferAccess; +} VkPhysicalDeviceCooperativeMatrixFeaturesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrixPropertiesNV { + VkStructureType sType; + void* pNext; + VkShaderStageFlags cooperativeMatrixSupportedStages; +} VkPhysicalDeviceCooperativeMatrixPropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeMatrixPropertiesNV* pProperties); +#endif + + +// VK_NV_coverage_reduction_mode is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_coverage_reduction_mode 1 +#define VK_NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION 1 +#define VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME "VK_NV_coverage_reduction_mode" + +typedef enum VkCoverageReductionModeNV { + VK_COVERAGE_REDUCTION_MODE_MERGE_NV = 0, + VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV = 1, + VK_COVERAGE_REDUCTION_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoverageReductionModeNV; +typedef VkFlags VkPipelineCoverageReductionStateCreateFlagsNV; +typedef struct VkPhysicalDeviceCoverageReductionModeFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 coverageReductionMode; +} VkPhysicalDeviceCoverageReductionModeFeaturesNV; + +typedef struct VkPipelineCoverageReductionStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageReductionStateCreateFlagsNV flags; + VkCoverageReductionModeNV coverageReductionMode; +} VkPipelineCoverageReductionStateCreateInfoNV; + +typedef struct VkFramebufferMixedSamplesCombinationNV { + VkStructureType sType; + void* pNext; + VkCoverageReductionModeNV coverageReductionMode; + VkSampleCountFlagBits rasterizationSamples; + VkSampleCountFlags depthStencilSamples; + VkSampleCountFlags colorSamples; +} VkFramebufferMixedSamplesCombinationNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV)(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + VkPhysicalDevice physicalDevice, + uint32_t* pCombinationCount, + VkFramebufferMixedSamplesCombinationNV* pCombinations); +#endif + + +// VK_EXT_fragment_shader_interlock is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_fragment_shader_interlock 1 +#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION 1 +#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME "VK_EXT_fragment_shader_interlock" +typedef struct VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShaderSampleInterlock; + VkBool32 fragmentShaderPixelInterlock; + VkBool32 fragmentShaderShadingRateInterlock; +} VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT; + + + +// VK_EXT_ycbcr_image_arrays is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_ycbcr_image_arrays 1 +#define VK_EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION 1 +#define VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME "VK_EXT_ycbcr_image_arrays" +typedef struct VkPhysicalDeviceYcbcrImageArraysFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 ycbcrImageArrays; +} VkPhysicalDeviceYcbcrImageArraysFeaturesEXT; + + + +// VK_EXT_provoking_vertex is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_provoking_vertex 1 +#define VK_EXT_PROVOKING_VERTEX_SPEC_VERSION 1 +#define VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME "VK_EXT_provoking_vertex" + +typedef enum VkProvokingVertexModeEXT { + VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT = 0, + VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT = 1, + VK_PROVOKING_VERTEX_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkProvokingVertexModeEXT; +typedef struct VkPhysicalDeviceProvokingVertexFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 provokingVertexLast; + VkBool32 transformFeedbackPreservesProvokingVertex; +} VkPhysicalDeviceProvokingVertexFeaturesEXT; + +typedef struct VkPhysicalDeviceProvokingVertexPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 provokingVertexModePerPipeline; + VkBool32 transformFeedbackPreservesTriangleFanProvokingVertex; +} VkPhysicalDeviceProvokingVertexPropertiesEXT; + +typedef struct VkPipelineRasterizationProvokingVertexStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkProvokingVertexModeEXT provokingVertexMode; +} VkPipelineRasterizationProvokingVertexStateCreateInfoEXT; + + + +// VK_EXT_headless_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_headless_surface 1 +#define VK_EXT_HEADLESS_SURFACE_SPEC_VERSION 1 +#define VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME "VK_EXT_headless_surface" +typedef VkFlags VkHeadlessSurfaceCreateFlagsEXT; +typedef struct VkHeadlessSurfaceCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkHeadlessSurfaceCreateFlagsEXT flags; +} VkHeadlessSurfaceCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateHeadlessSurfaceEXT)(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT( + VkInstance instance, + const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_EXT_line_rasterization is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_line_rasterization 1 +#define VK_EXT_LINE_RASTERIZATION_SPEC_VERSION 1 +#define VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME "VK_EXT_line_rasterization" +typedef VkLineRasterizationMode VkLineRasterizationModeEXT; + +typedef VkPhysicalDeviceLineRasterizationFeatures VkPhysicalDeviceLineRasterizationFeaturesEXT; + +typedef VkPhysicalDeviceLineRasterizationProperties VkPhysicalDeviceLineRasterizationPropertiesEXT; + +typedef VkPipelineRasterizationLineStateCreateInfo VkPipelineRasterizationLineStateCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEXT)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT( + VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern); +#endif + + +// VK_EXT_shader_atomic_float is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_atomic_float 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME "VK_EXT_shader_atomic_float" +typedef struct VkPhysicalDeviceShaderAtomicFloatFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferFloat32Atomics; + VkBool32 shaderBufferFloat32AtomicAdd; + VkBool32 shaderBufferFloat64Atomics; + VkBool32 shaderBufferFloat64AtomicAdd; + VkBool32 shaderSharedFloat32Atomics; + VkBool32 shaderSharedFloat32AtomicAdd; + VkBool32 shaderSharedFloat64Atomics; + VkBool32 shaderSharedFloat64AtomicAdd; + VkBool32 shaderImageFloat32Atomics; + VkBool32 shaderImageFloat32AtomicAdd; + VkBool32 sparseImageFloat32Atomics; + VkBool32 sparseImageFloat32AtomicAdd; +} VkPhysicalDeviceShaderAtomicFloatFeaturesEXT; + + + +// VK_EXT_host_query_reset is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_host_query_reset 1 +#define VK_EXT_HOST_QUERY_RESET_SPEC_VERSION 1 +#define VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME "VK_EXT_host_query_reset" +typedef VkPhysicalDeviceHostQueryResetFeatures VkPhysicalDeviceHostQueryResetFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkResetQueryPoolEXT)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkResetQueryPoolEXT( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); +#endif + + +// VK_EXT_index_type_uint8 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_index_type_uint8 1 +#define VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION 1 +#define VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME "VK_EXT_index_type_uint8" +typedef VkPhysicalDeviceIndexTypeUint8Features VkPhysicalDeviceIndexTypeUint8FeaturesEXT; + + + +// VK_EXT_extended_dynamic_state is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_extended_dynamic_state 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_extended_dynamic_state" +typedef struct VkPhysicalDeviceExtendedDynamicStateFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 extendedDynamicState; +} VkPhysicalDeviceExtendedDynamicStateFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetCullModeEXT)(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetFrontFaceEXT)(VkCommandBuffer commandBuffer, VkFrontFace frontFace); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveTopologyEXT)(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissorWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers2EXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthWriteEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthCompareOpEXT)(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBoundsTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilOpEXT)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetCullModeEXT( + VkCommandBuffer commandBuffer, + VkCullModeFlags cullMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFaceEXT( + VkCommandBuffer commandBuffer, + VkFrontFace frontFace); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopologyEXT( + VkCommandBuffer commandBuffer, + VkPrimitiveTopology primitiveTopology); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCountEXT( + VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCountEXT( + VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2EXT( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes, + const VkDeviceSize* pStrides); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthWriteEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOpEXT( + VkCommandBuffer commandBuffer, + VkCompareOp depthCompareOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthBoundsTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 stencilTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOpEXT( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp); +#endif + + +// VK_EXT_host_image_copy is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_host_image_copy 1 +#define VK_EXT_HOST_IMAGE_COPY_SPEC_VERSION 1 +#define VK_EXT_HOST_IMAGE_COPY_EXTENSION_NAME "VK_EXT_host_image_copy" +typedef VkHostImageCopyFlagBits VkHostImageCopyFlagBitsEXT; + +typedef VkHostImageCopyFlags VkHostImageCopyFlagsEXT; + +typedef VkPhysicalDeviceHostImageCopyFeatures VkPhysicalDeviceHostImageCopyFeaturesEXT; + +typedef VkPhysicalDeviceHostImageCopyProperties VkPhysicalDeviceHostImageCopyPropertiesEXT; + +typedef VkMemoryToImageCopy VkMemoryToImageCopyEXT; + +typedef VkImageToMemoryCopy VkImageToMemoryCopyEXT; + +typedef VkCopyMemoryToImageInfo VkCopyMemoryToImageInfoEXT; + +typedef VkCopyImageToMemoryInfo VkCopyImageToMemoryInfoEXT; + +typedef VkCopyImageToImageInfo VkCopyImageToImageInfoEXT; + +typedef VkHostImageLayoutTransitionInfo VkHostImageLayoutTransitionInfoEXT; + +typedef VkSubresourceHostMemcpySize VkSubresourceHostMemcpySizeEXT; + +typedef VkHostImageCopyDevicePerformanceQuery VkHostImageCopyDevicePerformanceQueryEXT; + +typedef VkSubresourceLayout2 VkSubresourceLayout2EXT; + +typedef VkImageSubresource2 VkImageSubresource2EXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToImageEXT)(VkDevice device, const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToMemoryEXT)(VkDevice device, const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToImageEXT)(VkDevice device, const VkCopyImageToImageInfo* pCopyImageToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkTransitionImageLayoutEXT)(VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfo* pTransitions); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, VkSubresourceLayout2* pLayout); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToImageEXT( + VkDevice device, + const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToMemoryEXT( + VkDevice device, + const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToImageEXT( + VkDevice device, + const VkCopyImageToImageInfo* pCopyImageToImageInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkTransitionImageLayoutEXT( + VkDevice device, + uint32_t transitionCount, + const VkHostImageLayoutTransitionInfo* pTransitions); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT( + VkDevice device, + VkImage image, + const VkImageSubresource2* pSubresource, + VkSubresourceLayout2* pLayout); +#endif + + +// VK_EXT_map_memory_placed is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_map_memory_placed 1 +#define VK_EXT_MAP_MEMORY_PLACED_SPEC_VERSION 1 +#define VK_EXT_MAP_MEMORY_PLACED_EXTENSION_NAME "VK_EXT_map_memory_placed" +typedef struct VkPhysicalDeviceMapMemoryPlacedFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 memoryMapPlaced; + VkBool32 memoryMapRangePlaced; + VkBool32 memoryUnmapReserve; +} VkPhysicalDeviceMapMemoryPlacedFeaturesEXT; + +typedef struct VkPhysicalDeviceMapMemoryPlacedPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize minPlacedMemoryMapAlignment; +} VkPhysicalDeviceMapMemoryPlacedPropertiesEXT; + +typedef struct VkMemoryMapPlacedInfoEXT { + VkStructureType sType; + const void* pNext; + void* pPlacedAddress; +} VkMemoryMapPlacedInfoEXT; + + + +// VK_EXT_shader_atomic_float2 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_atomic_float2 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_2_SPEC_VERSION 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_2_EXTENSION_NAME "VK_EXT_shader_atomic_float2" +typedef struct VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferFloat16Atomics; + VkBool32 shaderBufferFloat16AtomicAdd; + VkBool32 shaderBufferFloat16AtomicMinMax; + VkBool32 shaderBufferFloat32AtomicMinMax; + VkBool32 shaderBufferFloat64AtomicMinMax; + VkBool32 shaderSharedFloat16Atomics; + VkBool32 shaderSharedFloat16AtomicAdd; + VkBool32 shaderSharedFloat16AtomicMinMax; + VkBool32 shaderSharedFloat32AtomicMinMax; + VkBool32 shaderSharedFloat64AtomicMinMax; + VkBool32 shaderImageFloat32AtomicMinMax; + VkBool32 sparseImageFloat32AtomicMinMax; +} VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT; + + + +// VK_EXT_surface_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_surface_maintenance1 1 +#define VK_EXT_SURFACE_MAINTENANCE_1_SPEC_VERSION 1 +#define VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME "VK_EXT_surface_maintenance1" + +typedef enum VkPresentScalingFlagBitsEXT { + VK_PRESENT_SCALING_ONE_TO_ONE_BIT_EXT = 0x00000001, + VK_PRESENT_SCALING_ASPECT_RATIO_STRETCH_BIT_EXT = 0x00000002, + VK_PRESENT_SCALING_STRETCH_BIT_EXT = 0x00000004, + VK_PRESENT_SCALING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkPresentScalingFlagBitsEXT; +typedef VkFlags VkPresentScalingFlagsEXT; + +typedef enum VkPresentGravityFlagBitsEXT { + VK_PRESENT_GRAVITY_MIN_BIT_EXT = 0x00000001, + VK_PRESENT_GRAVITY_MAX_BIT_EXT = 0x00000002, + VK_PRESENT_GRAVITY_CENTERED_BIT_EXT = 0x00000004, + VK_PRESENT_GRAVITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkPresentGravityFlagBitsEXT; +typedef VkFlags VkPresentGravityFlagsEXT; +typedef struct VkSurfacePresentModeEXT { + VkStructureType sType; + void* pNext; + VkPresentModeKHR presentMode; +} VkSurfacePresentModeEXT; + +typedef struct VkSurfacePresentScalingCapabilitiesEXT { + VkStructureType sType; + void* pNext; + VkPresentScalingFlagsEXT supportedPresentScaling; + VkPresentGravityFlagsEXT supportedPresentGravityX; + VkPresentGravityFlagsEXT supportedPresentGravityY; + VkExtent2D minScaledImageExtent; + VkExtent2D maxScaledImageExtent; +} VkSurfacePresentScalingCapabilitiesEXT; + +typedef struct VkSurfacePresentModeCompatibilityEXT { + VkStructureType sType; + void* pNext; + uint32_t presentModeCount; + VkPresentModeKHR* pPresentModes; +} VkSurfacePresentModeCompatibilityEXT; + + + +// VK_EXT_swapchain_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_swapchain_maintenance1 1 +#define VK_EXT_SWAPCHAIN_MAINTENANCE_1_SPEC_VERSION 1 +#define VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME "VK_EXT_swapchain_maintenance1" +typedef struct VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 swapchainMaintenance1; +} VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT; + +typedef struct VkSwapchainPresentFenceInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkFence* pFences; +} VkSwapchainPresentFenceInfoEXT; + +typedef struct VkSwapchainPresentModesCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t presentModeCount; + const VkPresentModeKHR* pPresentModes; +} VkSwapchainPresentModesCreateInfoEXT; + +typedef struct VkSwapchainPresentModeInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentModeKHR* pPresentModes; +} VkSwapchainPresentModeInfoEXT; + +typedef struct VkSwapchainPresentScalingCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPresentScalingFlagsEXT scalingBehavior; + VkPresentGravityFlagsEXT presentGravityX; + VkPresentGravityFlagsEXT presentGravityY; +} VkSwapchainPresentScalingCreateInfoEXT; + +typedef struct VkReleaseSwapchainImagesInfoEXT { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint32_t imageIndexCount; + const uint32_t* pImageIndices; +} VkReleaseSwapchainImagesInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkReleaseSwapchainImagesEXT)(VkDevice device, const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseSwapchainImagesEXT( + VkDevice device, + const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo); +#endif + + +// VK_EXT_shader_demote_to_helper_invocation is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_demote_to_helper_invocation 1 +#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION 1 +#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME "VK_EXT_shader_demote_to_helper_invocation" +typedef VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + + + +// VK_NV_device_generated_commands is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_device_generated_commands 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNV) +#define VK_NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 +#define VK_NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NV_device_generated_commands" + +typedef enum VkIndirectCommandsTokenTypeNV { + VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV = 0, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV = 1, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV = 2, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV = 3, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV = 4, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV = 5, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV = 6, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV = 7, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV = 1000328000, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV = 1000428003, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV = 1000428004, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectCommandsTokenTypeNV; + +typedef enum VkIndirectStateFlagBitsNV { + VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV = 0x00000001, + VK_INDIRECT_STATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectStateFlagBitsNV; +typedef VkFlags VkIndirectStateFlagsNV; + +typedef enum VkIndirectCommandsLayoutUsageFlagBitsNV { + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV = 0x00000001, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV = 0x00000002, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV = 0x00000004, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectCommandsLayoutUsageFlagBitsNV; +typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNV; +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxGraphicsShaderGroupCount; + uint32_t maxIndirectSequenceCount; + uint32_t maxIndirectCommandsTokenCount; + uint32_t maxIndirectCommandsStreamCount; + uint32_t maxIndirectCommandsTokenOffset; + uint32_t maxIndirectCommandsStreamStride; + uint32_t minSequencesCountBufferOffsetAlignment; + uint32_t minSequencesIndexBufferOffsetAlignment; + uint32_t minIndirectCommandsBufferOffsetAlignment; +} VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 deviceGeneratedCommands; +} VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + +typedef struct VkGraphicsShaderGroupCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineVertexInputStateCreateInfo* pVertexInputState; + const VkPipelineTessellationStateCreateInfo* pTessellationState; +} VkGraphicsShaderGroupCreateInfoNV; + +typedef struct VkGraphicsPipelineShaderGroupsCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t groupCount; + const VkGraphicsShaderGroupCreateInfoNV* pGroups; + uint32_t pipelineCount; + const VkPipeline* pPipelines; +} VkGraphicsPipelineShaderGroupsCreateInfoNV; + +typedef struct VkBindShaderGroupIndirectCommandNV { + uint32_t groupIndex; +} VkBindShaderGroupIndirectCommandNV; + +typedef struct VkBindIndexBufferIndirectCommandNV { + VkDeviceAddress bufferAddress; + uint32_t size; + VkIndexType indexType; +} VkBindIndexBufferIndirectCommandNV; + +typedef struct VkBindVertexBufferIndirectCommandNV { + VkDeviceAddress bufferAddress; + uint32_t size; + uint32_t stride; +} VkBindVertexBufferIndirectCommandNV; + +typedef struct VkSetStateFlagsIndirectCommandNV { + uint32_t data; +} VkSetStateFlagsIndirectCommandNV; + +typedef struct VkIndirectCommandsStreamNV { + VkBuffer buffer; + VkDeviceSize offset; +} VkIndirectCommandsStreamNV; + +typedef struct VkIndirectCommandsLayoutTokenNV { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsTokenTypeNV tokenType; + uint32_t stream; + uint32_t offset; + uint32_t vertexBindingUnit; + VkBool32 vertexDynamicStride; + VkPipelineLayout pushconstantPipelineLayout; + VkShaderStageFlags pushconstantShaderStageFlags; + uint32_t pushconstantOffset; + uint32_t pushconstantSize; + VkIndirectStateFlagsNV indirectStateFlags; + uint32_t indexTypeCount; + const VkIndexType* pIndexTypes; + const uint32_t* pIndexTypeValues; +} VkIndirectCommandsLayoutTokenNV; + +typedef struct VkIndirectCommandsLayoutCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsLayoutUsageFlagsNV flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t tokenCount; + const VkIndirectCommandsLayoutTokenNV* pTokens; + uint32_t streamCount; + const uint32_t* pStreamStrides; +} VkIndirectCommandsLayoutCreateInfoNV; + +typedef struct VkGeneratedCommandsInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; + VkIndirectCommandsLayoutNV indirectCommandsLayout; + uint32_t streamCount; + const VkIndirectCommandsStreamNV* pStreams; + uint32_t sequencesCount; + VkBuffer preprocessBuffer; + VkDeviceSize preprocessOffset; + VkDeviceSize preprocessSize; + VkBuffer sequencesCountBuffer; + VkDeviceSize sequencesCountOffset; + VkBuffer sequencesIndexBuffer; + VkDeviceSize sequencesIndexOffset; +} VkGeneratedCommandsInfoNV; + +typedef struct VkGeneratedCommandsMemoryRequirementsInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; + VkIndirectCommandsLayoutNV indirectCommandsLayout; + uint32_t maxSequencesCount; +} VkGeneratedCommandsMemoryRequirementsInfoNV; + +typedef void (VKAPI_PTR *PFN_vkGetGeneratedCommandsMemoryRequirementsNV)(VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdPreprocessGeneratedCommandsNV)(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteGeneratedCommandsNV)(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindPipelineShaderGroupNV)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNV)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNV)(VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetGeneratedCommandsMemoryRequirementsNV( + VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkCmdPreprocessGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindPipelineShaderGroupNV( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNV( + VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNV( + VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator); +#endif + + +// VK_NV_inherited_viewport_scissor is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_inherited_viewport_scissor 1 +#define VK_NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION 1 +#define VK_NV_INHERITED_VIEWPORT_SCISSOR_EXTENSION_NAME "VK_NV_inherited_viewport_scissor" +typedef struct VkPhysicalDeviceInheritedViewportScissorFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 inheritedViewportScissor2D; +} VkPhysicalDeviceInheritedViewportScissorFeaturesNV; + +typedef struct VkCommandBufferInheritanceViewportScissorInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 viewportScissor2D; + uint32_t viewportDepthCount; + const VkViewport* pViewportDepths; +} VkCommandBufferInheritanceViewportScissorInfoNV; + + + +// VK_EXT_texel_buffer_alignment is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_texel_buffer_alignment 1 +#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION 1 +#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME "VK_EXT_texel_buffer_alignment" +typedef struct VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 texelBufferAlignment; +} VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT; + +typedef VkPhysicalDeviceTexelBufferAlignmentProperties VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT; + + + +// VK_QCOM_render_pass_transform is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_render_pass_transform 1 +#define VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION 4 +#define VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME "VK_QCOM_render_pass_transform" +typedef struct VkRenderPassTransformBeginInfoQCOM { + VkStructureType sType; + void* pNext; + VkSurfaceTransformFlagBitsKHR transform; +} VkRenderPassTransformBeginInfoQCOM; + +typedef struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM { + VkStructureType sType; + void* pNext; + VkSurfaceTransformFlagBitsKHR transform; + VkRect2D renderArea; +} VkCommandBufferInheritanceRenderPassTransformInfoQCOM; + + + +// VK_EXT_depth_bias_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_bias_control 1 +#define VK_EXT_DEPTH_BIAS_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DEPTH_BIAS_CONTROL_EXTENSION_NAME "VK_EXT_depth_bias_control" + +typedef enum VkDepthBiasRepresentationEXT { + VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORMAT_EXT = 0, + VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORCE_UNORM_EXT = 1, + VK_DEPTH_BIAS_REPRESENTATION_FLOAT_EXT = 2, + VK_DEPTH_BIAS_REPRESENTATION_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDepthBiasRepresentationEXT; +typedef struct VkPhysicalDeviceDepthBiasControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthBiasControl; + VkBool32 leastRepresentableValueForceUnormRepresentation; + VkBool32 floatRepresentation; + VkBool32 depthBiasExact; +} VkPhysicalDeviceDepthBiasControlFeaturesEXT; + +typedef struct VkDepthBiasInfoEXT { + VkStructureType sType; + const void* pNext; + float depthBiasConstantFactor; + float depthBiasClamp; + float depthBiasSlopeFactor; +} VkDepthBiasInfoEXT; + +typedef struct VkDepthBiasRepresentationInfoEXT { + VkStructureType sType; + const void* pNext; + VkDepthBiasRepresentationEXT depthBiasRepresentation; + VkBool32 depthBiasExact; +} VkDepthBiasRepresentationInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias2EXT)(VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT* pDepthBiasInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias2EXT( + VkCommandBuffer commandBuffer, + const VkDepthBiasInfoEXT* pDepthBiasInfo); +#endif + + +// VK_EXT_device_memory_report is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_device_memory_report 1 +#define VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION 2 +#define VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME "VK_EXT_device_memory_report" + +typedef enum VkDeviceMemoryReportEventTypeEXT { + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT = 0, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT = 1, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT = 2, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT = 3, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT = 4, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceMemoryReportEventTypeEXT; +typedef VkFlags VkDeviceMemoryReportFlagsEXT; +typedef struct VkPhysicalDeviceDeviceMemoryReportFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 deviceMemoryReport; +} VkPhysicalDeviceDeviceMemoryReportFeaturesEXT; + +typedef struct VkDeviceMemoryReportCallbackDataEXT { + VkStructureType sType; + void* pNext; + VkDeviceMemoryReportFlagsEXT flags; + VkDeviceMemoryReportEventTypeEXT type; + uint64_t memoryObjectId; + VkDeviceSize size; + VkObjectType objectType; + uint64_t objectHandle; + uint32_t heapIndex; +} VkDeviceMemoryReportCallbackDataEXT; + +typedef void (VKAPI_PTR *PFN_vkDeviceMemoryReportCallbackEXT)( + const VkDeviceMemoryReportCallbackDataEXT* pCallbackData, + void* pUserData); + +typedef struct VkDeviceDeviceMemoryReportCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceMemoryReportFlagsEXT flags; + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback; + void* pUserData; +} VkDeviceDeviceMemoryReportCreateInfoEXT; -typedef enum VkAccelerationStructureTypeKHR { - VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR = 0, - VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR = 1, - VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR = 2, - VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, - VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, - VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkAccelerationStructureTypeKHR; -typedef VkAccelerationStructureTypeKHR VkAccelerationStructureTypeNV; +// VK_EXT_acquire_drm_display is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_acquire_drm_display 1 +#define VK_EXT_ACQUIRE_DRM_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_ACQUIRE_DRM_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_drm_display" +typedef VkResult (VKAPI_PTR *PFN_vkAcquireDrmDisplayEXT)(VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetDrmDisplayEXT)(VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR* display); -typedef enum VkCopyAccelerationStructureModeKHR { - VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR = 0, - VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR = 1, - VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR = 2, - VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR = 3, - VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, - VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, - VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkCopyAccelerationStructureModeKHR; -typedef VkCopyAccelerationStructureModeKHR VkCopyAccelerationStructureModeNV; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireDrmDisplayEXT( + VkPhysicalDevice physicalDevice, + int32_t drmFd, + VkDisplayKHR display); +VKAPI_ATTR VkResult VKAPI_CALL vkGetDrmDisplayEXT( + VkPhysicalDevice physicalDevice, + int32_t drmFd, + uint32_t connectorId, + VkDisplayKHR* display); +#endif -typedef enum VkAccelerationStructureMemoryRequirementsTypeNV { - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = 0, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = 1, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = 2, - VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkAccelerationStructureMemoryRequirementsTypeNV; -typedef enum VkGeometryFlagBitsKHR { - VK_GEOMETRY_OPAQUE_BIT_KHR = 0x00000001, - VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR = 0x00000002, - VK_GEOMETRY_OPAQUE_BIT_NV = VK_GEOMETRY_OPAQUE_BIT_KHR, - VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR, - VK_GEOMETRY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkGeometryFlagBitsKHR; -typedef VkFlags VkGeometryFlagsKHR; -typedef VkGeometryFlagsKHR VkGeometryFlagsNV; +// VK_EXT_robustness2 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_robustness2 1 +#define VK_EXT_ROBUSTNESS_2_SPEC_VERSION 1 +#define VK_EXT_ROBUSTNESS_2_EXTENSION_NAME "VK_EXT_robustness2" +typedef struct VkPhysicalDeviceRobustness2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 robustBufferAccess2; + VkBool32 robustImageAccess2; + VkBool32 nullDescriptor; +} VkPhysicalDeviceRobustness2FeaturesEXT; -typedef VkGeometryFlagBitsKHR VkGeometryFlagBitsNV; +typedef struct VkPhysicalDeviceRobustness2PropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize robustStorageBufferAccessSizeAlignment; + VkDeviceSize robustUniformBufferAccessSizeAlignment; +} VkPhysicalDeviceRobustness2PropertiesEXT; -typedef enum VkGeometryInstanceFlagBitsKHR { - VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR = 0x00000001, - VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR = 0x00000002, - VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR = 0x00000004, - VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR = 0x00000008, - VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR, - VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, - VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, - VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, - VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, - VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkGeometryInstanceFlagBitsKHR; -typedef VkFlags VkGeometryInstanceFlagsKHR; -typedef VkGeometryInstanceFlagsKHR VkGeometryInstanceFlagsNV; -typedef VkGeometryInstanceFlagBitsKHR VkGeometryInstanceFlagBitsNV; +// VK_EXT_custom_border_color is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_custom_border_color 1 +#define VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION 12 +#define VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME "VK_EXT_custom_border_color" +typedef struct VkSamplerCustomBorderColorCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkClearColorValue customBorderColor; + VkFormat format; +} VkSamplerCustomBorderColorCreateInfoEXT; +typedef struct VkPhysicalDeviceCustomBorderColorPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxCustomBorderColorSamplers; +} VkPhysicalDeviceCustomBorderColorPropertiesEXT; -typedef enum VkBuildAccelerationStructureFlagBitsKHR { - VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR = 0x00000001, - VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR = 0x00000002, - VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR = 0x00000004, - VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR = 0x00000008, - VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR = 0x00000010, - VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV = 0x00000020, - VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, - VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, - VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, - VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, - VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR, - VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkBuildAccelerationStructureFlagBitsKHR; -typedef VkFlags VkBuildAccelerationStructureFlagsKHR; -typedef VkBuildAccelerationStructureFlagsKHR VkBuildAccelerationStructureFlagsNV; +typedef struct VkPhysicalDeviceCustomBorderColorFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 customBorderColors; + VkBool32 customBorderColorWithoutFormat; +} VkPhysicalDeviceCustomBorderColorFeaturesEXT; -typedef VkBuildAccelerationStructureFlagBitsKHR VkBuildAccelerationStructureFlagBitsNV; -typedef struct VkRayTracingShaderGroupCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkRayTracingShaderGroupTypeKHR type; - uint32_t generalShader; - uint32_t closestHitShader; - uint32_t anyHitShader; - uint32_t intersectionShader; -} VkRayTracingShaderGroupCreateInfoNV; -typedef struct VkRayTracingPipelineCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineCreateFlags flags; - uint32_t stageCount; - const VkPipelineShaderStageCreateInfo* pStages; - uint32_t groupCount; - const VkRayTracingShaderGroupCreateInfoNV* pGroups; - uint32_t maxRecursionDepth; - VkPipelineLayout layout; - VkPipeline basePipelineHandle; - int32_t basePipelineIndex; -} VkRayTracingPipelineCreateInfoNV; +// VK_GOOGLE_user_type is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_user_type 1 +#define VK_GOOGLE_USER_TYPE_SPEC_VERSION 1 +#define VK_GOOGLE_USER_TYPE_EXTENSION_NAME "VK_GOOGLE_user_type" -typedef struct VkGeometryTrianglesNV { + +// VK_NV_present_barrier is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_present_barrier 1 +#define VK_NV_PRESENT_BARRIER_SPEC_VERSION 1 +#define VK_NV_PRESENT_BARRIER_EXTENSION_NAME "VK_NV_present_barrier" +typedef struct VkPhysicalDevicePresentBarrierFeaturesNV { VkStructureType sType; - const void* pNext; - VkBuffer vertexData; - VkDeviceSize vertexOffset; - uint32_t vertexCount; - VkDeviceSize vertexStride; - VkFormat vertexFormat; - VkBuffer indexData; - VkDeviceSize indexOffset; - uint32_t indexCount; - VkIndexType indexType; - VkBuffer transformData; - VkDeviceSize transformOffset; -} VkGeometryTrianglesNV; + void* pNext; + VkBool32 presentBarrier; +} VkPhysicalDevicePresentBarrierFeaturesNV; -typedef struct VkGeometryAABBNV { +typedef struct VkSurfaceCapabilitiesPresentBarrierNV { VkStructureType sType; - const void* pNext; - VkBuffer aabbData; - uint32_t numAABBs; - uint32_t stride; - VkDeviceSize offset; -} VkGeometryAABBNV; + void* pNext; + VkBool32 presentBarrierSupported; +} VkSurfaceCapabilitiesPresentBarrierNV; -typedef struct VkGeometryDataNV { - VkGeometryTrianglesNV triangles; - VkGeometryAABBNV aabbs; -} VkGeometryDataNV; +typedef struct VkSwapchainPresentBarrierCreateInfoNV { + VkStructureType sType; + void* pNext; + VkBool32 presentBarrierEnable; +} VkSwapchainPresentBarrierCreateInfoNV; -typedef struct VkGeometryNV { - VkStructureType sType; - const void* pNext; - VkGeometryTypeKHR geometryType; - VkGeometryDataNV geometry; - VkGeometryFlagsKHR flags; -} VkGeometryNV; -typedef struct VkAccelerationStructureInfoNV { - VkStructureType sType; - const void* pNext; - VkAccelerationStructureTypeNV type; - VkBuildAccelerationStructureFlagsNV flags; - uint32_t instanceCount; - uint32_t geometryCount; - const VkGeometryNV* pGeometries; -} VkAccelerationStructureInfoNV; -typedef struct VkAccelerationStructureCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkDeviceSize compactedSize; - VkAccelerationStructureInfoNV info; -} VkAccelerationStructureCreateInfoNV; +// VK_EXT_private_data is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_private_data 1 +typedef VkPrivateDataSlot VkPrivateDataSlotEXT; + +#define VK_EXT_PRIVATE_DATA_SPEC_VERSION 1 +#define VK_EXT_PRIVATE_DATA_EXTENSION_NAME "VK_EXT_private_data" +typedef VkPrivateDataSlotCreateFlags VkPrivateDataSlotCreateFlagsEXT; + +typedef VkPhysicalDevicePrivateDataFeatures VkPhysicalDevicePrivateDataFeaturesEXT; + +typedef VkDevicePrivateDataCreateInfo VkDevicePrivateDataCreateInfoEXT; + +typedef VkPrivateDataSlotCreateInfo VkPrivateDataSlotCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreatePrivateDataSlotEXT)(VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlot* pPrivateDataSlot); +typedef void (VKAPI_PTR *PFN_vkDestroyPrivateDataSlotEXT)(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data); +typedef void (VKAPI_PTR *PFN_vkGetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlotEXT( + VkDevice device, + const VkPrivateDataSlotCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPrivateDataSlot* pPrivateDataSlot); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlotEXT( + VkDevice device, + VkPrivateDataSlot privateDataSlot, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateDataEXT( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t data); + +VKAPI_ATTR void VKAPI_CALL vkGetPrivateDataEXT( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t* pData); +#endif + + +// VK_EXT_pipeline_creation_cache_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_creation_cache_control 1 +#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION 3 +#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME "VK_EXT_pipeline_creation_cache_control" +typedef VkPhysicalDevicePipelineCreationCacheControlFeatures VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT; + -typedef struct VkBindAccelerationStructureMemoryInfoNV { - VkStructureType sType; - const void* pNext; - VkAccelerationStructureNV accelerationStructure; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; - uint32_t deviceIndexCount; - const uint32_t* pDeviceIndices; -} VkBindAccelerationStructureMemoryInfoNV; -typedef struct VkWriteDescriptorSetAccelerationStructureNV { +// VK_NV_device_diagnostics_config is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_device_diagnostics_config 1 +#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION 2 +#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME "VK_NV_device_diagnostics_config" + +typedef enum VkDeviceDiagnosticsConfigFlagBitsNV { + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV = 0x00000001, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV = 0x00000002, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV = 0x00000004, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV = 0x00000008, + VK_DEVICE_DIAGNOSTICS_CONFIG_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkDeviceDiagnosticsConfigFlagBitsNV; +typedef VkFlags VkDeviceDiagnosticsConfigFlagsNV; +typedef struct VkPhysicalDeviceDiagnosticsConfigFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 diagnosticsConfig; +} VkPhysicalDeviceDiagnosticsConfigFeaturesNV; + +typedef struct VkDeviceDiagnosticsConfigCreateInfoNV { VkStructureType sType; const void* pNext; - uint32_t accelerationStructureCount; - const VkAccelerationStructureNV* pAccelerationStructures; -} VkWriteDescriptorSetAccelerationStructureNV; + VkDeviceDiagnosticsConfigFlagsNV flags; +} VkDeviceDiagnosticsConfigCreateInfoNV; -typedef struct VkAccelerationStructureMemoryRequirementsInfoNV { - VkStructureType sType; - const void* pNext; - VkAccelerationStructureMemoryRequirementsTypeNV type; - VkAccelerationStructureNV accelerationStructure; -} VkAccelerationStructureMemoryRequirementsInfoNV; -typedef struct VkPhysicalDeviceRayTracingPropertiesNV { - VkStructureType sType; - void* pNext; - uint32_t shaderGroupHandleSize; - uint32_t maxRecursionDepth; - uint32_t maxShaderGroupStride; - uint32_t shaderGroupBaseAlignment; - uint64_t maxGeometryCount; - uint64_t maxInstanceCount; - uint64_t maxTriangleCount; - uint32_t maxDescriptorSetAccelerationStructures; -} VkPhysicalDeviceRayTracingPropertiesNV; -typedef struct VkTransformMatrixKHR { - float matrix[3][4]; -} VkTransformMatrixKHR; +// VK_QCOM_render_pass_store_ops is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_render_pass_store_ops 1 +#define VK_QCOM_RENDER_PASS_STORE_OPS_SPEC_VERSION 2 +#define VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME "VK_QCOM_render_pass_store_ops" -typedef VkTransformMatrixKHR VkTransformMatrixNV; -typedef struct VkAabbPositionsKHR { - float minX; - float minY; - float minZ; - float maxX; - float maxY; - float maxZ; -} VkAabbPositionsKHR; +// VK_NV_cuda_kernel_launch is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_cuda_kernel_launch 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCudaModuleNV) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCudaFunctionNV) +#define VK_NV_CUDA_KERNEL_LAUNCH_SPEC_VERSION 2 +#define VK_NV_CUDA_KERNEL_LAUNCH_EXTENSION_NAME "VK_NV_cuda_kernel_launch" +typedef struct VkCudaModuleCreateInfoNV { + VkStructureType sType; + const void* pNext; + size_t dataSize; + const void* pData; +} VkCudaModuleCreateInfoNV; -typedef VkAabbPositionsKHR VkAabbPositionsNV; +typedef struct VkCudaFunctionCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkCudaModuleNV module; + const char* pName; +} VkCudaFunctionCreateInfoNV; -typedef struct VkAccelerationStructureInstanceKHR { - VkTransformMatrixKHR transform; - uint32_t instanceCustomIndex:24; - uint32_t mask:8; - uint32_t instanceShaderBindingTableRecordOffset:24; - VkGeometryInstanceFlagsKHR flags:8; - uint64_t accelerationStructureReference; -} VkAccelerationStructureInstanceKHR; +typedef struct VkCudaLaunchInfoNV { + VkStructureType sType; + const void* pNext; + VkCudaFunctionNV function; + uint32_t gridDimX; + uint32_t gridDimY; + uint32_t gridDimZ; + uint32_t blockDimX; + uint32_t blockDimY; + uint32_t blockDimZ; + uint32_t sharedMemBytes; + size_t paramCount; + const void* const * pParams; + size_t extraCount; + const void* const * pExtras; +} VkCudaLaunchInfoNV; -typedef VkAccelerationStructureInstanceKHR VkAccelerationStructureInstanceNV; +typedef struct VkPhysicalDeviceCudaKernelLaunchFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cudaKernelLaunchFeatures; +} VkPhysicalDeviceCudaKernelLaunchFeaturesNV; -typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureNV)(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure); -typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator); -typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsNV)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); -typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); -typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset); -typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeKHR mode); -typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysNV)(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth); -typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesNV)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); -typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); -typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesNV)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); -typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData); -typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); -typedef VkResult (VKAPI_PTR *PFN_vkCompileDeferredNV)(VkDevice device, VkPipeline pipeline, uint32_t shader); +typedef struct VkPhysicalDeviceCudaKernelLaunchPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t computeCapabilityMinor; + uint32_t computeCapabilityMajor; +} VkPhysicalDeviceCudaKernelLaunchPropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateCudaModuleNV)(VkDevice device, const VkCudaModuleCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCudaModuleNV* pModule); +typedef VkResult (VKAPI_PTR *PFN_vkGetCudaModuleCacheNV)(VkDevice device, VkCudaModuleNV module, size_t* pCacheSize, void* pCacheData); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCudaFunctionNV)(VkDevice device, const VkCudaFunctionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCudaFunctionNV* pFunction); +typedef void (VKAPI_PTR *PFN_vkDestroyCudaModuleNV)(VkDevice device, VkCudaModuleNV module, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDestroyCudaFunctionNV)(VkDevice device, VkCudaFunctionNV function, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdCudaLaunchKernelNV)(VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV* pLaunchInfo); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureNV( +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCudaModuleNV( VkDevice device, - const VkAccelerationStructureCreateInfoNV* pCreateInfo, + const VkCudaModuleCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkAccelerationStructureNV* pAccelerationStructure); + VkCudaModuleNV* pModule); -VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureNV( +VKAPI_ATTR VkResult VKAPI_CALL vkGetCudaModuleCacheNV( VkDevice device, - VkAccelerationStructureNV accelerationStructure, - const VkAllocationCallbacks* pAllocator); + VkCudaModuleNV module, + size_t* pCacheSize, + void* pCacheData); -VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV( +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCudaFunctionNV( VkDevice device, - const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, - VkMemoryRequirements2KHR* pMemoryRequirements); + const VkCudaFunctionCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCudaFunctionNV* pFunction); -VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryNV( +VKAPI_ATTR void VKAPI_CALL vkDestroyCudaModuleNV( VkDevice device, - uint32_t bindInfoCount, - const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); + VkCudaModuleNV module, + const VkAllocationCallbacks* pAllocator); -VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV( - VkCommandBuffer commandBuffer, - const VkAccelerationStructureInfoNV* pInfo, - VkBuffer instanceData, - VkDeviceSize instanceOffset, - VkBool32 update, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, - VkBuffer scratch, - VkDeviceSize scratchOffset); +VKAPI_ATTR void VKAPI_CALL vkDestroyCudaFunctionNV( + VkDevice device, + VkCudaFunctionNV function, + const VkAllocationCallbacks* pAllocator); -VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureNV( +VKAPI_ATTR void VKAPI_CALL vkCmdCudaLaunchKernelNV( VkCommandBuffer commandBuffer, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, - VkCopyAccelerationStructureModeKHR mode); + const VkCudaLaunchInfoNV* pLaunchInfo); +#endif -VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysNV( - VkCommandBuffer commandBuffer, - VkBuffer raygenShaderBindingTableBuffer, - VkDeviceSize raygenShaderBindingOffset, - VkBuffer missShaderBindingTableBuffer, - VkDeviceSize missShaderBindingOffset, - VkDeviceSize missShaderBindingStride, - VkBuffer hitShaderBindingTableBuffer, - VkDeviceSize hitShaderBindingOffset, - VkDeviceSize hitShaderBindingStride, - VkBuffer callableShaderBindingTableBuffer, - VkDeviceSize callableShaderBindingOffset, - VkDeviceSize callableShaderBindingStride, - uint32_t width, - uint32_t height, - uint32_t depth); -VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesNV( - VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkRayTracingPipelineCreateInfoNV* pCreateInfos, - const VkAllocationCallbacks* pAllocator, - VkPipeline* pPipelines); +// VK_NV_low_latency is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_low_latency 1 +#define VK_NV_LOW_LATENCY_SPEC_VERSION 1 +#define VK_NV_LOW_LATENCY_EXTENSION_NAME "VK_NV_low_latency" +typedef struct VkQueryLowLatencySupportNV { + VkStructureType sType; + const void* pNext; + void* pQueriedLowLatencyData; +} VkQueryLowLatencySupportNV; -VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesKHR( - VkDevice device, - VkPipeline pipeline, - uint32_t firstGroup, - uint32_t groupCount, - size_t dataSize, - void* pData); -VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV( - VkDevice device, - VkPipeline pipeline, - uint32_t firstGroup, - uint32_t groupCount, - size_t dataSize, - void* pData); -VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureHandleNV( - VkDevice device, - VkAccelerationStructureNV accelerationStructure, - size_t dataSize, - void* pData); +// VK_EXT_descriptor_buffer is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_descriptor_buffer 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR) +#define VK_EXT_DESCRIPTOR_BUFFER_SPEC_VERSION 1 +#define VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME "VK_EXT_descriptor_buffer" +typedef struct VkPhysicalDeviceDescriptorBufferPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 combinedImageSamplerDescriptorSingleArray; + VkBool32 bufferlessPushDescriptors; + VkBool32 allowSamplerImageViewPostSubmitCreation; + VkDeviceSize descriptorBufferOffsetAlignment; + uint32_t maxDescriptorBufferBindings; + uint32_t maxResourceDescriptorBufferBindings; + uint32_t maxSamplerDescriptorBufferBindings; + uint32_t maxEmbeddedImmutableSamplerBindings; + uint32_t maxEmbeddedImmutableSamplers; + size_t bufferCaptureReplayDescriptorDataSize; + size_t imageCaptureReplayDescriptorDataSize; + size_t imageViewCaptureReplayDescriptorDataSize; + size_t samplerCaptureReplayDescriptorDataSize; + size_t accelerationStructureCaptureReplayDescriptorDataSize; + size_t samplerDescriptorSize; + size_t combinedImageSamplerDescriptorSize; + size_t sampledImageDescriptorSize; + size_t storageImageDescriptorSize; + size_t uniformTexelBufferDescriptorSize; + size_t robustUniformTexelBufferDescriptorSize; + size_t storageTexelBufferDescriptorSize; + size_t robustStorageTexelBufferDescriptorSize; + size_t uniformBufferDescriptorSize; + size_t robustUniformBufferDescriptorSize; + size_t storageBufferDescriptorSize; + size_t robustStorageBufferDescriptorSize; + size_t inputAttachmentDescriptorSize; + size_t accelerationStructureDescriptorSize; + VkDeviceSize maxSamplerDescriptorBufferRange; + VkDeviceSize maxResourceDescriptorBufferRange; + VkDeviceSize samplerDescriptorBufferAddressSpaceSize; + VkDeviceSize resourceDescriptorBufferAddressSpaceSize; + VkDeviceSize descriptorBufferAddressSpaceSize; +} VkPhysicalDeviceDescriptorBufferPropertiesEXT; + +typedef struct VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT { + VkStructureType sType; + void* pNext; + size_t combinedImageSamplerDensityMapDescriptorSize; +} VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; -VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesNV( - VkCommandBuffer commandBuffer, - uint32_t accelerationStructureCount, - const VkAccelerationStructureNV* pAccelerationStructures, - VkQueryType queryType, - VkQueryPool queryPool, - uint32_t firstQuery); +typedef struct VkPhysicalDeviceDescriptorBufferFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 descriptorBuffer; + VkBool32 descriptorBufferCaptureReplay; + VkBool32 descriptorBufferImageLayoutIgnored; + VkBool32 descriptorBufferPushDescriptors; +} VkPhysicalDeviceDescriptorBufferFeaturesEXT; -VKAPI_ATTR VkResult VKAPI_CALL vkCompileDeferredNV( - VkDevice device, - VkPipeline pipeline, - uint32_t shader); -#endif +typedef struct VkDescriptorAddressInfoEXT { + VkStructureType sType; + void* pNext; + VkDeviceAddress address; + VkDeviceSize range; + VkFormat format; +} VkDescriptorAddressInfoEXT; +typedef struct VkDescriptorBufferBindingInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceAddress address; + VkBufferUsageFlags usage; +} VkDescriptorBufferBindingInfoEXT; -#define VK_NV_representative_fragment_test 1 -#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION 2 -#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME "VK_NV_representative_fragment_test" -typedef struct VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV { +typedef struct VkDescriptorBufferBindingPushDescriptorBufferHandleEXT { VkStructureType sType; - void* pNext; - VkBool32 representativeFragmentTest; -} VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV; + const void* pNext; + VkBuffer buffer; +} VkDescriptorBufferBindingPushDescriptorBufferHandleEXT; + +typedef union VkDescriptorDataEXT { + const VkSampler* pSampler; + const VkDescriptorImageInfo* pCombinedImageSampler; + const VkDescriptorImageInfo* pInputAttachmentImage; + const VkDescriptorImageInfo* pSampledImage; + const VkDescriptorImageInfo* pStorageImage; + const VkDescriptorAddressInfoEXT* pUniformTexelBuffer; + const VkDescriptorAddressInfoEXT* pStorageTexelBuffer; + const VkDescriptorAddressInfoEXT* pUniformBuffer; + const VkDescriptorAddressInfoEXT* pStorageBuffer; + VkDeviceAddress accelerationStructure; +} VkDescriptorDataEXT; + +typedef struct VkDescriptorGetInfoEXT { + VkStructureType sType; + const void* pNext; + VkDescriptorType type; + VkDescriptorDataEXT data; +} VkDescriptorGetInfoEXT; -typedef struct VkPipelineRepresentativeFragmentTestStateCreateInfoNV { +typedef struct VkBufferCaptureDescriptorDataInfoEXT { VkStructureType sType; const void* pNext; - VkBool32 representativeFragmentTestEnable; -} VkPipelineRepresentativeFragmentTestStateCreateInfoNV; + VkBuffer buffer; +} VkBufferCaptureDescriptorDataInfoEXT; +typedef struct VkImageCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageCaptureDescriptorDataInfoEXT; +typedef struct VkImageViewCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkImageView imageView; +} VkImageViewCaptureDescriptorDataInfoEXT; -#define VK_EXT_filter_cubic 1 -#define VK_EXT_FILTER_CUBIC_SPEC_VERSION 3 -#define VK_EXT_FILTER_CUBIC_EXTENSION_NAME "VK_EXT_filter_cubic" -typedef struct VkPhysicalDeviceImageViewImageFormatInfoEXT { +typedef struct VkSamplerCaptureDescriptorDataInfoEXT { VkStructureType sType; - void* pNext; - VkImageViewType imageViewType; -} VkPhysicalDeviceImageViewImageFormatInfoEXT; + const void* pNext; + VkSampler sampler; +} VkSamplerCaptureDescriptorDataInfoEXT; -typedef struct VkFilterCubicImageViewImageFormatPropertiesEXT { +typedef struct VkOpaqueCaptureDescriptorDataCreateInfoEXT { VkStructureType sType; - void* pNext; - VkBool32 filterCubic; - VkBool32 filterCubicMinmax; -} VkFilterCubicImageViewImageFormatPropertiesEXT; + const void* pNext; + const void* opaqueCaptureDescriptorData; +} VkOpaqueCaptureDescriptorDataCreateInfoEXT; +typedef struct VkAccelerationStructureCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR accelerationStructure; + VkAccelerationStructureNV accelerationStructureNV; +} VkAccelerationStructureCaptureDescriptorDataInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSizeEXT)(VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize* pLayoutSizeInBytes); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutBindingOffsetEXT)(VkDevice device, VkDescriptorSetLayout layout, uint32_t binding, VkDeviceSize* pOffset); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorEXT)(VkDevice device, const VkDescriptorGetInfoEXT* pDescriptorInfo, size_t dataSize, void* pDescriptor); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t bufferCount, const VkDescriptorBufferBindingInfoEXT* pBindingInfos); +typedef void (VKAPI_PTR *PFN_vkCmdSetDescriptorBufferOffsetsEXT)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const uint32_t* pBufferIndices, const VkDeviceSize* pOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set); +typedef VkResult (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT* pInfo, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetImageOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkImageCaptureDescriptorDataInfoEXT* pInfo, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSizeEXT( + VkDevice device, + VkDescriptorSetLayout layout, + VkDeviceSize* pLayoutSizeInBytes); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutBindingOffsetEXT( + VkDevice device, + VkDescriptorSetLayout layout, + uint32_t binding, + VkDeviceSize* pOffset); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorEXT( + VkDevice device, + const VkDescriptorGetInfoEXT* pDescriptorInfo, + size_t dataSize, + void* pDescriptor); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBuffersEXT( + VkCommandBuffer commandBuffer, + uint32_t bufferCount, + const VkDescriptorBufferBindingInfoEXT* pBindingInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDescriptorBufferOffsetsEXT( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t setCount, + const uint32_t* pBufferIndices, + const VkDeviceSize* pOffsets); +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBufferEmbeddedSamplersEXT( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set); -#define VK_QCOM_render_pass_shader_resolve 1 -#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION 4 -#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME "VK_QCOM_render_pass_shader_resolve" +VKAPI_ATTR VkResult VKAPI_CALL vkGetBufferOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkBufferCaptureDescriptorDataInfoEXT* pInfo, + void* pData); +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkImageCaptureDescriptorDataInfoEXT* pInfo, + void* pData); -#define VK_EXT_global_priority 1 -#define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2 -#define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority" -typedef VkQueueGlobalPriorityKHR VkQueueGlobalPriorityEXT; +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, + void* pData); -typedef VkDeviceQueueGlobalPriorityCreateInfoKHR VkDeviceQueueGlobalPriorityCreateInfoEXT; +VKAPI_ATTR VkResult VKAPI_CALL vkGetSamplerOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, + void* pData); +VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, + void* pData); +#endif -#define VK_EXT_external_memory_host 1 -#define VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION 1 -#define VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME "VK_EXT_external_memory_host" -typedef struct VkImportMemoryHostPointerInfoEXT { - VkStructureType sType; - const void* pNext; - VkExternalMemoryHandleTypeFlagBits handleType; - void* pHostPointer; -} VkImportMemoryHostPointerInfoEXT; +// VK_EXT_graphics_pipeline_library is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_graphics_pipeline_library 1 +#define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_SPEC_VERSION 1 +#define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME "VK_EXT_graphics_pipeline_library" -typedef struct VkMemoryHostPointerPropertiesEXT { +typedef enum VkGraphicsPipelineLibraryFlagBitsEXT { + VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT = 0x00000001, + VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT = 0x00000002, + VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT = 0x00000004, + VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT = 0x00000008, + VK_GRAPHICS_PIPELINE_LIBRARY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkGraphicsPipelineLibraryFlagBitsEXT; +typedef VkFlags VkGraphicsPipelineLibraryFlagsEXT; +typedef struct VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT { VkStructureType sType; void* pNext; - uint32_t memoryTypeBits; -} VkMemoryHostPointerPropertiesEXT; + VkBool32 graphicsPipelineLibrary; +} VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; -typedef struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT { +typedef struct VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT { VkStructureType sType; void* pNext; - VkDeviceSize minImportedHostPointerAlignment; -} VkPhysicalDeviceExternalMemoryHostPropertiesEXT; - -typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryHostPointerPropertiesEXT)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); + VkBool32 graphicsPipelineLibraryFastLinking; + VkBool32 graphicsPipelineLibraryIndependentInterpolationDecoration; +} VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT( - VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - const void* pHostPointer, - VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); -#endif +typedef struct VkGraphicsPipelineLibraryCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkGraphicsPipelineLibraryFlagsEXT flags; +} VkGraphicsPipelineLibraryCreateInfoEXT; -#define VK_AMD_buffer_marker 1 -#define VK_AMD_BUFFER_MARKER_SPEC_VERSION 1 -#define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker" -typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarkerAMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( - VkCommandBuffer commandBuffer, - VkPipelineStageFlagBits pipelineStage, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - uint32_t marker); -#endif +// VK_AMD_shader_early_and_late_fragment_tests is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_early_and_late_fragment_tests 1 +#define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_SPEC_VERSION 1 +#define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_EXTENSION_NAME "VK_AMD_shader_early_and_late_fragment_tests" +typedef struct VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD { + VkStructureType sType; + void* pNext; + VkBool32 shaderEarlyAndLateFragmentTests; +} VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; -#define VK_AMD_pipeline_compiler_control 1 -#define VK_AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION 1 -#define VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME "VK_AMD_pipeline_compiler_control" -typedef enum VkPipelineCompilerControlFlagBitsAMD { - VK_PIPELINE_COMPILER_CONTROL_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF -} VkPipelineCompilerControlFlagBitsAMD; -typedef VkFlags VkPipelineCompilerControlFlagsAMD; -typedef struct VkPipelineCompilerControlCreateInfoAMD { - VkStructureType sType; - const void* pNext; - VkPipelineCompilerControlFlagsAMD compilerControlFlags; -} VkPipelineCompilerControlCreateInfoAMD; +// VK_NV_fragment_shading_rate_enums is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_fragment_shading_rate_enums 1 +#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME "VK_NV_fragment_shading_rate_enums" +typedef enum VkFragmentShadingRateTypeNV { + VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV = 0, + VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV = 1, + VK_FRAGMENT_SHADING_RATE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkFragmentShadingRateTypeNV; +typedef enum VkFragmentShadingRateNV { + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV = 0, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV = 1, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV = 4, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV = 5, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV = 6, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV = 9, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV = 10, + VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV = 11, + VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV = 12, + VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV = 13, + VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV = 14, + VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV = 15, + VK_FRAGMENT_SHADING_RATE_MAX_ENUM_NV = 0x7FFFFFFF +} VkFragmentShadingRateNV; +typedef struct VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShadingRateEnums; + VkBool32 supersampleFragmentShadingRates; + VkBool32 noInvocationFragmentShadingRates; +} VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV; -#define VK_EXT_calibrated_timestamps 1 -#define VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION 2 -#define VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_EXT_calibrated_timestamps" +typedef struct VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV { + VkStructureType sType; + void* pNext; + VkSampleCountFlagBits maxFragmentShadingRateInvocationCount; +} VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV; -typedef enum VkTimeDomainEXT { - VK_TIME_DOMAIN_DEVICE_EXT = 0, - VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT = 1, - VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT = 2, - VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT = 3, - VK_TIME_DOMAIN_MAX_ENUM_EXT = 0x7FFFFFFF -} VkTimeDomainEXT; -typedef struct VkCalibratedTimestampInfoEXT { - VkStructureType sType; - const void* pNext; - VkTimeDomainEXT timeDomain; -} VkCalibratedTimestampInfoEXT; +typedef struct VkPipelineFragmentShadingRateEnumStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkFragmentShadingRateTypeNV shadingRateType; + VkFragmentShadingRateNV shadingRate; + VkFragmentShadingRateCombinerOpKHR combinerOps[2]; +} VkPipelineFragmentShadingRateEnumStateCreateInfoNV; -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT)(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains); -typedef VkResult (VKAPI_PTR *PFN_vkGetCalibratedTimestampsEXT)(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation); +typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateEnumNV)(VkCommandBuffer commandBuffer, VkFragmentShadingRateNV shadingRate, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( - VkPhysicalDevice physicalDevice, - uint32_t* pTimeDomainCount, - VkTimeDomainEXT* pTimeDomains); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsEXT( - VkDevice device, - uint32_t timestampCount, - const VkCalibratedTimestampInfoEXT* pTimestampInfos, - uint64_t* pTimestamps, - uint64_t* pMaxDeviation); +VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateEnumNV( + VkCommandBuffer commandBuffer, + VkFragmentShadingRateNV shadingRate, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); #endif -#define VK_AMD_shader_core_properties 1 -#define VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION 2 -#define VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_AMD_shader_core_properties" -typedef struct VkPhysicalDeviceShaderCorePropertiesAMD { - VkStructureType sType; - void* pNext; - uint32_t shaderEngineCount; - uint32_t shaderArraysPerEngineCount; - uint32_t computeUnitsPerShaderArray; - uint32_t simdPerComputeUnit; - uint32_t wavefrontsPerSimd; - uint32_t wavefrontSize; - uint32_t sgprsPerSimd; - uint32_t minSgprAllocation; - uint32_t maxSgprAllocation; - uint32_t sgprAllocationGranularity; - uint32_t vgprsPerSimd; - uint32_t minVgprAllocation; - uint32_t maxVgprAllocation; - uint32_t vgprAllocationGranularity; -} VkPhysicalDeviceShaderCorePropertiesAMD; +// VK_NV_ray_tracing_motion_blur is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_ray_tracing_motion_blur 1 +#define VK_NV_RAY_TRACING_MOTION_BLUR_SPEC_VERSION 1 +#define VK_NV_RAY_TRACING_MOTION_BLUR_EXTENSION_NAME "VK_NV_ray_tracing_motion_blur" + +typedef enum VkAccelerationStructureMotionInstanceTypeNV { + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_STATIC_NV = 0, + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MATRIX_MOTION_NV = 1, + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV = 2, + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkAccelerationStructureMotionInstanceTypeNV; +typedef VkFlags VkAccelerationStructureMotionInfoFlagsNV; +typedef VkFlags VkAccelerationStructureMotionInstanceFlagsNV; +typedef union VkDeviceOrHostAddressConstKHR { + VkDeviceAddress deviceAddress; + const void* hostAddress; +} VkDeviceOrHostAddressConstKHR; +typedef struct VkAccelerationStructureGeometryMotionTrianglesDataNV { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR vertexData; +} VkAccelerationStructureGeometryMotionTrianglesDataNV; +typedef struct VkAccelerationStructureMotionInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t maxInstances; + VkAccelerationStructureMotionInfoFlagsNV flags; +} VkAccelerationStructureMotionInfoNV; -#define VK_AMD_memory_overallocation_behavior 1 -#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION 1 -#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME "VK_AMD_memory_overallocation_behavior" +typedef struct VkAccelerationStructureMatrixMotionInstanceNV { + VkTransformMatrixKHR transformT0; + VkTransformMatrixKHR transformT1; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureMatrixMotionInstanceNV; -typedef enum VkMemoryOverallocationBehaviorAMD { - VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD = 0, - VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD = 1, - VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD = 2, - VK_MEMORY_OVERALLOCATION_BEHAVIOR_MAX_ENUM_AMD = 0x7FFFFFFF -} VkMemoryOverallocationBehaviorAMD; -typedef struct VkDeviceMemoryOverallocationCreateInfoAMD { - VkStructureType sType; - const void* pNext; - VkMemoryOverallocationBehaviorAMD overallocationBehavior; -} VkDeviceMemoryOverallocationCreateInfoAMD; +typedef struct VkSRTDataNV { + float sx; + float a; + float b; + float pvx; + float sy; + float c; + float pvy; + float sz; + float pvz; + float qx; + float qy; + float qz; + float qw; + float tx; + float ty; + float tz; +} VkSRTDataNV; + +typedef struct VkAccelerationStructureSRTMotionInstanceNV { + VkSRTDataNV transformT0; + VkSRTDataNV transformT1; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureSRTMotionInstanceNV; +typedef union VkAccelerationStructureMotionInstanceDataNV { + VkAccelerationStructureInstanceKHR staticInstance; + VkAccelerationStructureMatrixMotionInstanceNV matrixMotionInstance; + VkAccelerationStructureSRTMotionInstanceNV srtMotionInstance; +} VkAccelerationStructureMotionInstanceDataNV; +typedef struct VkAccelerationStructureMotionInstanceNV { + VkAccelerationStructureMotionInstanceTypeNV type; + VkAccelerationStructureMotionInstanceFlagsNV flags; + VkAccelerationStructureMotionInstanceDataNV data; +} VkAccelerationStructureMotionInstanceNV; -#define VK_EXT_vertex_attribute_divisor 1 -#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 3 -#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_EXT_vertex_attribute_divisor" -typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT { +typedef struct VkPhysicalDeviceRayTracingMotionBlurFeaturesNV { VkStructureType sType; void* pNext; - uint32_t maxVertexAttribDivisor; -} VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; + VkBool32 rayTracingMotionBlur; + VkBool32 rayTracingMotionBlurPipelineTraceRaysIndirect; +} VkPhysicalDeviceRayTracingMotionBlurFeaturesNV; -typedef struct VkVertexInputBindingDivisorDescriptionEXT { - uint32_t binding; - uint32_t divisor; -} VkVertexInputBindingDivisorDescriptionEXT; -typedef struct VkPipelineVertexInputDivisorStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t vertexBindingDivisorCount; - const VkVertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors; -} VkPipelineVertexInputDivisorStateCreateInfoEXT; -typedef struct VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT { +// VK_EXT_ycbcr_2plane_444_formats is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_ycbcr_2plane_444_formats 1 +#define VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION 1 +#define VK_EXT_YCBCR_2PLANE_444_FORMATS_EXTENSION_NAME "VK_EXT_ycbcr_2plane_444_formats" +typedef struct VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 vertexAttributeInstanceRateDivisor; - VkBool32 vertexAttributeInstanceRateZeroDivisor; -} VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT; + VkBool32 ycbcr2plane444Formats; +} VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; -#define VK_EXT_pipeline_creation_feedback 1 -#define VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION 1 -#define VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME "VK_EXT_pipeline_creation_feedback" -typedef VkPipelineCreationFeedbackFlagBits VkPipelineCreationFeedbackFlagBitsEXT; +// VK_EXT_fragment_density_map2 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_fragment_density_map2 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_2_SPEC_VERSION 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME "VK_EXT_fragment_density_map2" +typedef struct VkPhysicalDeviceFragmentDensityMap2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentDensityMapDeferred; +} VkPhysicalDeviceFragmentDensityMap2FeaturesEXT; -typedef VkPipelineCreationFeedbackFlags VkPipelineCreationFeedbackFlagsEXT; +typedef struct VkPhysicalDeviceFragmentDensityMap2PropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 subsampledLoads; + VkBool32 subsampledCoarseReconstructionEarlyAccess; + uint32_t maxSubsampledArrayLayers; + uint32_t maxDescriptorSetSubsampledSamplers; +} VkPhysicalDeviceFragmentDensityMap2PropertiesEXT; -typedef VkPipelineCreationFeedbackCreateInfo VkPipelineCreationFeedbackCreateInfoEXT; -typedef VkPipelineCreationFeedback VkPipelineCreationFeedbackEXT; +// VK_QCOM_rotated_copy_commands is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_rotated_copy_commands 1 +#define VK_QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION 2 +#define VK_QCOM_ROTATED_COPY_COMMANDS_EXTENSION_NAME "VK_QCOM_rotated_copy_commands" +typedef struct VkCopyCommandTransformInfoQCOM { + VkStructureType sType; + const void* pNext; + VkSurfaceTransformFlagBitsKHR transform; +} VkCopyCommandTransformInfoQCOM; -#define VK_NV_shader_subgroup_partitioned 1 -#define VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION 1 -#define VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME "VK_NV_shader_subgroup_partitioned" +// VK_EXT_image_robustness is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_robustness 1 +#define VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION 1 +#define VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_image_robustness" +typedef VkPhysicalDeviceImageRobustnessFeatures VkPhysicalDeviceImageRobustnessFeaturesEXT; -#define VK_NV_compute_shader_derivatives 1 -#define VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1 -#define VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_NV_compute_shader_derivatives" -typedef struct VkPhysicalDeviceComputeShaderDerivativesFeaturesNV { - VkStructureType sType; - void* pNext; - VkBool32 computeDerivativeGroupQuads; - VkBool32 computeDerivativeGroupLinear; -} VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; +// VK_EXT_image_compression_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_compression_control 1 +#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SPEC_VERSION 1 +#define VK_EXT_IMAGE_COMPRESSION_CONTROL_EXTENSION_NAME "VK_EXT_image_compression_control" -#define VK_NV_mesh_shader 1 -#define VK_NV_MESH_SHADER_SPEC_VERSION 1 -#define VK_NV_MESH_SHADER_EXTENSION_NAME "VK_NV_mesh_shader" -typedef struct VkPhysicalDeviceMeshShaderFeaturesNV { - VkStructureType sType; - void* pNext; - VkBool32 taskShader; - VkBool32 meshShader; -} VkPhysicalDeviceMeshShaderFeaturesNV; +typedef enum VkImageCompressionFlagBitsEXT { + VK_IMAGE_COMPRESSION_DEFAULT_EXT = 0, + VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT = 0x00000001, + VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT = 0x00000002, + VK_IMAGE_COMPRESSION_DISABLED_EXT = 0x00000004, + VK_IMAGE_COMPRESSION_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkImageCompressionFlagBitsEXT; +typedef VkFlags VkImageCompressionFlagsEXT; -typedef struct VkPhysicalDeviceMeshShaderPropertiesNV { +typedef enum VkImageCompressionFixedRateFlagBitsEXT { + VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT = 0, + VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT = 0x00000001, + VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT = 0x00000002, + VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT = 0x00000004, + VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT = 0x00000008, + VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT = 0x00000010, + VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT = 0x00000020, + VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT = 0x00000040, + VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT = 0x00000080, + VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT = 0x00000100, + VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT = 0x00000200, + VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT = 0x00000400, + VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT = 0x00000800, + VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT = 0x00001000, + VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT = 0x00002000, + VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT = 0x00004000, + VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT = 0x00008000, + VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT = 0x00010000, + VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT = 0x00020000, + VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT = 0x00040000, + VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT = 0x00080000, + VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT = 0x00100000, + VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT = 0x00200000, + VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT = 0x00400000, + VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT = 0x00800000, + VK_IMAGE_COMPRESSION_FIXED_RATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkImageCompressionFixedRateFlagBitsEXT; +typedef VkFlags VkImageCompressionFixedRateFlagsEXT; +typedef struct VkPhysicalDeviceImageCompressionControlFeaturesEXT { VkStructureType sType; void* pNext; - uint32_t maxDrawMeshTasksCount; - uint32_t maxTaskWorkGroupInvocations; - uint32_t maxTaskWorkGroupSize[3]; - uint32_t maxTaskTotalMemorySize; - uint32_t maxTaskOutputCount; - uint32_t maxMeshWorkGroupInvocations; - uint32_t maxMeshWorkGroupSize[3]; - uint32_t maxMeshTotalMemorySize; - uint32_t maxMeshOutputVertices; - uint32_t maxMeshOutputPrimitives; - uint32_t maxMeshMultiviewViewCount; - uint32_t meshOutputPerVertexGranularity; - uint32_t meshOutputPerPrimitiveGranularity; -} VkPhysicalDeviceMeshShaderPropertiesNV; - -typedef struct VkDrawMeshTasksIndirectCommandNV { - uint32_t taskCount; - uint32_t firstTask; -} VkDrawMeshTasksIndirectCommandNV; - -typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksNV)(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask); -typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); -typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + VkBool32 imageCompressionControl; +} VkPhysicalDeviceImageCompressionControlFeaturesEXT; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksNV( - VkCommandBuffer commandBuffer, - uint32_t taskCount, - uint32_t firstTask); +typedef struct VkImageCompressionControlEXT { + VkStructureType sType; + const void* pNext; + VkImageCompressionFlagsEXT flags; + uint32_t compressionControlPlaneCount; + VkImageCompressionFixedRateFlagsEXT* pFixedRateFlags; +} VkImageCompressionControlEXT; -VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectNV( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - uint32_t drawCount, - uint32_t stride); +typedef struct VkImageCompressionPropertiesEXT { + VkStructureType sType; + void* pNext; + VkImageCompressionFlagsEXT imageCompressionFlags; + VkImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags; +} VkImageCompressionPropertiesEXT; -VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountNV( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride); -#endif -#define VK_NV_fragment_shader_barycentric 1 -#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1 -#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_NV_fragment_shader_barycentric" -typedef VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV; +// VK_EXT_attachment_feedback_loop_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_attachment_feedback_loop_layout 1 +#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_SPEC_VERSION 2 +#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_EXTENSION_NAME "VK_EXT_attachment_feedback_loop_layout" +typedef struct VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 attachmentFeedbackLoopLayout; +} VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; -#define VK_NV_shader_image_footprint 1 -#define VK_NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION 2 -#define VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME "VK_NV_shader_image_footprint" -typedef struct VkPhysicalDeviceShaderImageFootprintFeaturesNV { +// VK_EXT_4444_formats is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_4444_formats 1 +#define VK_EXT_4444_FORMATS_SPEC_VERSION 1 +#define VK_EXT_4444_FORMATS_EXTENSION_NAME "VK_EXT_4444_formats" +typedef struct VkPhysicalDevice4444FormatsFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 imageFootprint; -} VkPhysicalDeviceShaderImageFootprintFeaturesNV; + VkBool32 formatA4R4G4B4; + VkBool32 formatA4B4G4R4; +} VkPhysicalDevice4444FormatsFeaturesEXT; -#define VK_NV_scissor_exclusive 1 -#define VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION 1 -#define VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME "VK_NV_scissor_exclusive" -typedef struct VkPipelineViewportExclusiveScissorStateCreateInfoNV { +// VK_EXT_device_fault is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_device_fault 1 +#define VK_EXT_DEVICE_FAULT_SPEC_VERSION 2 +#define VK_EXT_DEVICE_FAULT_EXTENSION_NAME "VK_EXT_device_fault" + +typedef enum VkDeviceFaultAddressTypeEXT { + VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT = 0, + VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT = 1, + VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT = 2, + VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT = 3, + VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT = 4, + VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT = 5, + VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT = 6, + VK_DEVICE_FAULT_ADDRESS_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceFaultAddressTypeEXT; + +typedef enum VkDeviceFaultVendorBinaryHeaderVersionEXT { + VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT = 1, + VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceFaultVendorBinaryHeaderVersionEXT; +typedef struct VkPhysicalDeviceFaultFeaturesEXT { VkStructureType sType; - const void* pNext; - uint32_t exclusiveScissorCount; - const VkRect2D* pExclusiveScissors; -} VkPipelineViewportExclusiveScissorStateCreateInfoNV; + void* pNext; + VkBool32 deviceFault; + VkBool32 deviceFaultVendorBinary; +} VkPhysicalDeviceFaultFeaturesEXT; -typedef struct VkPhysicalDeviceExclusiveScissorFeaturesNV { +typedef struct VkDeviceFaultCountsEXT { VkStructureType sType; void* pNext; - VkBool32 exclusiveScissor; -} VkPhysicalDeviceExclusiveScissorFeaturesNV; + uint32_t addressInfoCount; + uint32_t vendorInfoCount; + VkDeviceSize vendorBinarySize; +} VkDeviceFaultCountsEXT; + +typedef struct VkDeviceFaultAddressInfoEXT { + VkDeviceFaultAddressTypeEXT addressType; + VkDeviceAddress reportedAddress; + VkDeviceSize addressPrecision; +} VkDeviceFaultAddressInfoEXT; + +typedef struct VkDeviceFaultVendorInfoEXT { + char description[VK_MAX_DESCRIPTION_SIZE]; + uint64_t vendorFaultCode; + uint64_t vendorFaultData; +} VkDeviceFaultVendorInfoEXT; -typedef void (VKAPI_PTR *PFN_vkCmdSetExclusiveScissorNV)(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors); +typedef struct VkDeviceFaultInfoEXT { + VkStructureType sType; + void* pNext; + char description[VK_MAX_DESCRIPTION_SIZE]; + VkDeviceFaultAddressInfoEXT* pAddressInfos; + VkDeviceFaultVendorInfoEXT* pVendorInfos; + void* pVendorBinaryData; +} VkDeviceFaultInfoEXT; + +typedef struct VkDeviceFaultVendorBinaryHeaderVersionOneEXT { + uint32_t headerSize; + VkDeviceFaultVendorBinaryHeaderVersionEXT headerVersion; + uint32_t vendorID; + uint32_t deviceID; + uint32_t driverVersion; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; + uint32_t applicationNameOffset; + uint32_t applicationVersion; + uint32_t engineNameOffset; + uint32_t engineVersion; + uint32_t apiVersion; +} VkDeviceFaultVendorBinaryHeaderVersionOneEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceFaultInfoEXT)(VkDevice device, VkDeviceFaultCountsEXT* pFaultCounts, VkDeviceFaultInfoEXT* pFaultInfo); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetExclusiveScissorNV( - VkCommandBuffer commandBuffer, - uint32_t firstExclusiveScissor, - uint32_t exclusiveScissorCount, - const VkRect2D* pExclusiveScissors); +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceFaultInfoEXT( + VkDevice device, + VkDeviceFaultCountsEXT* pFaultCounts, + VkDeviceFaultInfoEXT* pFaultInfo); #endif -#define VK_NV_device_diagnostic_checkpoints 1 -#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION 2 -#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME "VK_NV_device_diagnostic_checkpoints" -typedef struct VkQueueFamilyCheckpointPropertiesNV { - VkStructureType sType; - void* pNext; - VkPipelineStageFlags checkpointExecutionStageMask; -} VkQueueFamilyCheckpointPropertiesNV; +// VK_ARM_rasterization_order_attachment_access is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_rasterization_order_attachment_access 1 +#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1 +#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_ARM_rasterization_order_attachment_access" +typedef struct VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 rasterizationOrderColorAttachmentAccess; + VkBool32 rasterizationOrderDepthAttachmentAccess; + VkBool32 rasterizationOrderStencilAttachmentAccess; +} VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; -typedef struct VkCheckpointDataNV { - VkStructureType sType; - void* pNext; - VkPipelineStageFlagBits stage; - void* pCheckpointMarker; -} VkCheckpointDataNV; +typedef VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; -typedef void (VKAPI_PTR *PFN_vkCmdSetCheckpointNV)(VkCommandBuffer commandBuffer, const void* pCheckpointMarker); -typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointDataNV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetCheckpointNV( - VkCommandBuffer commandBuffer, - const void* pCheckpointMarker); -VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointDataNV( - VkQueue queue, - uint32_t* pCheckpointDataCount, - VkCheckpointDataNV* pCheckpointData); -#endif +// VK_EXT_rgba10x6_formats is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_rgba10x6_formats 1 +#define VK_EXT_RGBA10X6_FORMATS_SPEC_VERSION 1 +#define VK_EXT_RGBA10X6_FORMATS_EXTENSION_NAME "VK_EXT_rgba10x6_formats" +typedef struct VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 formatRgba10x6WithoutYCbCrSampler; +} VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT; -#define VK_INTEL_shader_integer_functions2 1 -#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_SPEC_VERSION 1 -#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME "VK_INTEL_shader_integer_functions2" -typedef struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL { + +// VK_VALVE_mutable_descriptor_type is a preprocessor guard. Do not pass it to API calls. +#define VK_VALVE_mutable_descriptor_type 1 +#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1 +#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_VALVE_mutable_descriptor_type" +typedef struct VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 shaderIntegerFunctions2; -} VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + VkBool32 mutableDescriptorType; +} VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT; +typedef VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE; +typedef struct VkMutableDescriptorTypeListEXT { + uint32_t descriptorTypeCount; + const VkDescriptorType* pDescriptorTypes; +} VkMutableDescriptorTypeListEXT; -#define VK_INTEL_performance_query 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL) -#define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 2 -#define VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME "VK_INTEL_performance_query" +typedef VkMutableDescriptorTypeListEXT VkMutableDescriptorTypeListVALVE; -typedef enum VkPerformanceConfigurationTypeINTEL { - VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL = 0, - VK_PERFORMANCE_CONFIGURATION_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF -} VkPerformanceConfigurationTypeINTEL; +typedef struct VkMutableDescriptorTypeCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t mutableDescriptorTypeListCount; + const VkMutableDescriptorTypeListEXT* pMutableDescriptorTypeLists; +} VkMutableDescriptorTypeCreateInfoEXT; -typedef enum VkQueryPoolSamplingModeINTEL { - VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL = 0, - VK_QUERY_POOL_SAMPLING_MODE_MAX_ENUM_INTEL = 0x7FFFFFFF -} VkQueryPoolSamplingModeINTEL; +typedef VkMutableDescriptorTypeCreateInfoEXT VkMutableDescriptorTypeCreateInfoVALVE; -typedef enum VkPerformanceOverrideTypeINTEL { - VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL = 0, - VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL = 1, - VK_PERFORMANCE_OVERRIDE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF -} VkPerformanceOverrideTypeINTEL; -typedef enum VkPerformanceParameterTypeINTEL { - VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL = 0, - VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL = 1, - VK_PERFORMANCE_PARAMETER_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF -} VkPerformanceParameterTypeINTEL; -typedef enum VkPerformanceValueTypeINTEL { - VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL = 0, - VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL = 1, - VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL = 2, - VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL = 3, - VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL = 4, - VK_PERFORMANCE_VALUE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF -} VkPerformanceValueTypeINTEL; -typedef union VkPerformanceValueDataINTEL { - uint32_t value32; - uint64_t value64; - float valueFloat; - VkBool32 valueBool; - const char* valueString; -} VkPerformanceValueDataINTEL; +// VK_EXT_vertex_input_dynamic_state is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_vertex_input_dynamic_state 1 +#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION 2 +#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_vertex_input_dynamic_state" +typedef struct VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 vertexInputDynamicState; +} VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT; -typedef struct VkPerformanceValueINTEL { - VkPerformanceValueTypeINTEL type; - VkPerformanceValueDataINTEL data; -} VkPerformanceValueINTEL; +typedef struct VkVertexInputBindingDescription2EXT { + VkStructureType sType; + void* pNext; + uint32_t binding; + uint32_t stride; + VkVertexInputRate inputRate; + uint32_t divisor; +} VkVertexInputBindingDescription2EXT; -typedef struct VkInitializePerformanceApiInfoINTEL { +typedef struct VkVertexInputAttributeDescription2EXT { VkStructureType sType; - const void* pNext; - void* pUserData; -} VkInitializePerformanceApiInfoINTEL; + void* pNext; + uint32_t location; + uint32_t binding; + VkFormat format; + uint32_t offset; +} VkVertexInputAttributeDescription2EXT; -typedef struct VkQueryPoolPerformanceQueryCreateInfoINTEL { - VkStructureType sType; - const void* pNext; - VkQueryPoolSamplingModeINTEL performanceCountersSampling; -} VkQueryPoolPerformanceQueryCreateInfoINTEL; +typedef void (VKAPI_PTR *PFN_vkCmdSetVertexInputEXT)(VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); -typedef VkQueryPoolPerformanceQueryCreateInfoINTEL VkQueryPoolCreateInfoINTEL; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetVertexInputEXT( + VkCommandBuffer commandBuffer, + uint32_t vertexBindingDescriptionCount, + const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); +#endif -typedef struct VkPerformanceMarkerInfoINTEL { - VkStructureType sType; - const void* pNext; - uint64_t marker; -} VkPerformanceMarkerInfoINTEL; -typedef struct VkPerformanceStreamMarkerInfoINTEL { +// VK_EXT_physical_device_drm is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_physical_device_drm 1 +#define VK_EXT_PHYSICAL_DEVICE_DRM_SPEC_VERSION 1 +#define VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME "VK_EXT_physical_device_drm" +typedef struct VkPhysicalDeviceDrmPropertiesEXT { VkStructureType sType; - const void* pNext; - uint32_t marker; -} VkPerformanceStreamMarkerInfoINTEL; + void* pNext; + VkBool32 hasPrimary; + VkBool32 hasRender; + int64_t primaryMajor; + int64_t primaryMinor; + int64_t renderMajor; + int64_t renderMinor; +} VkPhysicalDeviceDrmPropertiesEXT; -typedef struct VkPerformanceOverrideInfoINTEL { - VkStructureType sType; - const void* pNext; - VkPerformanceOverrideTypeINTEL type; - VkBool32 enable; - uint64_t parameter; -} VkPerformanceOverrideInfoINTEL; -typedef struct VkPerformanceConfigurationAcquireInfoINTEL { - VkStructureType sType; - const void* pNext; - VkPerformanceConfigurationTypeINTEL type; -} VkPerformanceConfigurationAcquireInfoINTEL; -typedef VkResult (VKAPI_PTR *PFN_vkInitializePerformanceApiINTEL)(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo); -typedef void (VKAPI_PTR *PFN_vkUninitializePerformanceApiINTEL)(VkDevice device); -typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo); -typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceStreamMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo); -typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceOverrideINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo); -typedef VkResult (VKAPI_PTR *PFN_vkAcquirePerformanceConfigurationINTEL)(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration); -typedef VkResult (VKAPI_PTR *PFN_vkReleasePerformanceConfigurationINTEL)(VkDevice device, VkPerformanceConfigurationINTEL configuration); -typedef VkResult (VKAPI_PTR *PFN_vkQueueSetPerformanceConfigurationINTEL)(VkQueue queue, VkPerformanceConfigurationINTEL configuration); -typedef VkResult (VKAPI_PTR *PFN_vkGetPerformanceParameterINTEL)(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue); +// VK_EXT_device_address_binding_report is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_device_address_binding_report 1 +#define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_SPEC_VERSION 1 +#define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_EXTENSION_NAME "VK_EXT_device_address_binding_report" -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkInitializePerformanceApiINTEL( - VkDevice device, - const VkInitializePerformanceApiInfoINTEL* pInitializeInfo); +typedef enum VkDeviceAddressBindingTypeEXT { + VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT = 0, + VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT = 1, + VK_DEVICE_ADDRESS_BINDING_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceAddressBindingTypeEXT; -VKAPI_ATTR void VKAPI_CALL vkUninitializePerformanceApiINTEL( - VkDevice device); +typedef enum VkDeviceAddressBindingFlagBitsEXT { + VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT = 0x00000001, + VK_DEVICE_ADDRESS_BINDING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceAddressBindingFlagBitsEXT; +typedef VkFlags VkDeviceAddressBindingFlagsEXT; +typedef struct VkPhysicalDeviceAddressBindingReportFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 reportAddressBinding; +} VkPhysicalDeviceAddressBindingReportFeaturesEXT; -VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceMarkerINTEL( - VkCommandBuffer commandBuffer, - const VkPerformanceMarkerInfoINTEL* pMarkerInfo); +typedef struct VkDeviceAddressBindingCallbackDataEXT { + VkStructureType sType; + void* pNext; + VkDeviceAddressBindingFlagsEXT flags; + VkDeviceAddress baseAddress; + VkDeviceSize size; + VkDeviceAddressBindingTypeEXT bindingType; +} VkDeviceAddressBindingCallbackDataEXT; -VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceStreamMarkerINTEL( - VkCommandBuffer commandBuffer, - const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo); -VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceOverrideINTEL( - VkCommandBuffer commandBuffer, - const VkPerformanceOverrideInfoINTEL* pOverrideInfo); -VKAPI_ATTR VkResult VKAPI_CALL vkAcquirePerformanceConfigurationINTEL( - VkDevice device, - const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, - VkPerformanceConfigurationINTEL* pConfiguration); +// VK_EXT_depth_clip_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_clip_control 1 +#define VK_EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME "VK_EXT_depth_clip_control" +typedef struct VkPhysicalDeviceDepthClipControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthClipControl; +} VkPhysicalDeviceDepthClipControlFeaturesEXT; -VKAPI_ATTR VkResult VKAPI_CALL vkReleasePerformanceConfigurationINTEL( - VkDevice device, - VkPerformanceConfigurationINTEL configuration); +typedef struct VkPipelineViewportDepthClipControlCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 negativeOneToOne; +} VkPipelineViewportDepthClipControlCreateInfoEXT; -VKAPI_ATTR VkResult VKAPI_CALL vkQueueSetPerformanceConfigurationINTEL( - VkQueue queue, - VkPerformanceConfigurationINTEL configuration); -VKAPI_ATTR VkResult VKAPI_CALL vkGetPerformanceParameterINTEL( - VkDevice device, - VkPerformanceParameterTypeINTEL parameter, - VkPerformanceValueINTEL* pValue); -#endif +// VK_EXT_primitive_topology_list_restart is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_primitive_topology_list_restart 1 +#define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION 1 +#define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME "VK_EXT_primitive_topology_list_restart" +typedef struct VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 primitiveTopologyListRestart; + VkBool32 primitiveTopologyPatchListRestart; +} VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; -#define VK_EXT_pci_bus_info 1 -#define VK_EXT_PCI_BUS_INFO_SPEC_VERSION 2 -#define VK_EXT_PCI_BUS_INFO_EXTENSION_NAME "VK_EXT_pci_bus_info" -typedef struct VkPhysicalDevicePCIBusInfoPropertiesEXT { + + +// VK_EXT_present_mode_fifo_latest_ready is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_present_mode_fifo_latest_ready 1 +#define VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_SPEC_VERSION 1 +#define VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_EXTENSION_NAME "VK_EXT_present_mode_fifo_latest_ready" +typedef struct VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT { VkStructureType sType; void* pNext; - uint32_t pciDomain; - uint32_t pciBus; - uint32_t pciDevice; - uint32_t pciFunction; -} VkPhysicalDevicePCIBusInfoPropertiesEXT; + VkBool32 presentModeFifoLatestReady; +} VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; -#define VK_AMD_display_native_hdr 1 -#define VK_AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION 1 -#define VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME "VK_AMD_display_native_hdr" -typedef struct VkDisplayNativeHdrSurfaceCapabilitiesAMD { +// VK_HUAWEI_subpass_shading is a preprocessor guard. Do not pass it to API calls. +#define VK_HUAWEI_subpass_shading 1 +#define VK_HUAWEI_SUBPASS_SHADING_SPEC_VERSION 3 +#define VK_HUAWEI_SUBPASS_SHADING_EXTENSION_NAME "VK_HUAWEI_subpass_shading" +typedef struct VkSubpassShadingPipelineCreateInfoHUAWEI { + VkStructureType sType; + void* pNext; + VkRenderPass renderPass; + uint32_t subpass; +} VkSubpassShadingPipelineCreateInfoHUAWEI; + +typedef struct VkPhysicalDeviceSubpassShadingFeaturesHUAWEI { VkStructureType sType; void* pNext; - VkBool32 localDimmingSupport; -} VkDisplayNativeHdrSurfaceCapabilitiesAMD; + VkBool32 subpassShading; +} VkPhysicalDeviceSubpassShadingFeaturesHUAWEI; -typedef struct VkSwapchainDisplayNativeHdrCreateInfoAMD { +typedef struct VkPhysicalDeviceSubpassShadingPropertiesHUAWEI { VkStructureType sType; - const void* pNext; - VkBool32 localDimmingEnable; -} VkSwapchainDisplayNativeHdrCreateInfoAMD; + void* pNext; + uint32_t maxSubpassShadingWorkgroupSizeAspectRatio; +} VkPhysicalDeviceSubpassShadingPropertiesHUAWEI; -typedef void (VKAPI_PTR *PFN_vkSetLocalDimmingAMD)(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI)(VkDevice device, VkRenderPass renderpass, VkExtent2D* pMaxWorkgroupSize); +typedef void (VKAPI_PTR *PFN_vkCmdSubpassShadingHUAWEI)(VkCommandBuffer commandBuffer); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkSetLocalDimmingAMD( +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( VkDevice device, - VkSwapchainKHR swapChain, - VkBool32 localDimmingEnable); + VkRenderPass renderpass, + VkExtent2D* pMaxWorkgroupSize); + +VKAPI_ATTR void VKAPI_CALL vkCmdSubpassShadingHUAWEI( + VkCommandBuffer commandBuffer); #endif -#define VK_EXT_fragment_density_map 1 -#define VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION 2 -#define VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME "VK_EXT_fragment_density_map" -typedef struct VkPhysicalDeviceFragmentDensityMapFeaturesEXT { +// VK_HUAWEI_invocation_mask is a preprocessor guard. Do not pass it to API calls. +#define VK_HUAWEI_invocation_mask 1 +#define VK_HUAWEI_INVOCATION_MASK_SPEC_VERSION 1 +#define VK_HUAWEI_INVOCATION_MASK_EXTENSION_NAME "VK_HUAWEI_invocation_mask" +typedef struct VkPhysicalDeviceInvocationMaskFeaturesHUAWEI { VkStructureType sType; void* pNext; - VkBool32 fragmentDensityMap; - VkBool32 fragmentDensityMapDynamic; - VkBool32 fragmentDensityMapNonSubsampledImages; -} VkPhysicalDeviceFragmentDensityMapFeaturesEXT; + VkBool32 invocationMask; +} VkPhysicalDeviceInvocationMaskFeaturesHUAWEI; -typedef struct VkPhysicalDeviceFragmentDensityMapPropertiesEXT { - VkStructureType sType; - void* pNext; - VkExtent2D minFragmentDensityTexelSize; - VkExtent2D maxFragmentDensityTexelSize; - VkBool32 fragmentDensityInvocations; -} VkPhysicalDeviceFragmentDensityMapPropertiesEXT; +typedef void (VKAPI_PTR *PFN_vkCmdBindInvocationMaskHUAWEI)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout); -typedef struct VkRenderPassFragmentDensityMapCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkAttachmentReference fragmentDensityMapAttachment; -} VkRenderPassFragmentDensityMapCreateInfoEXT; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindInvocationMaskHUAWEI( + VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout); +#endif +// VK_NV_external_memory_rdma is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_external_memory_rdma 1 +typedef void* VkRemoteAddressNV; +#define VK_NV_EXTERNAL_MEMORY_RDMA_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_RDMA_EXTENSION_NAME "VK_NV_external_memory_rdma" +typedef struct VkMemoryGetRemoteAddressInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetRemoteAddressInfoNV; -#define VK_EXT_scalar_block_layout 1 -#define VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION 1 -#define VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME "VK_EXT_scalar_block_layout" -typedef VkPhysicalDeviceScalarBlockLayoutFeatures VkPhysicalDeviceScalarBlockLayoutFeaturesEXT; +typedef struct VkPhysicalDeviceExternalMemoryRDMAFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 externalMemoryRDMA; +} VkPhysicalDeviceExternalMemoryRDMAFeaturesNV; +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryRemoteAddressNV)(VkDevice device, const VkMemoryGetRemoteAddressInfoNV* pMemoryGetRemoteAddressInfo, VkRemoteAddressNV* pAddress); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryRemoteAddressNV( + VkDevice device, + const VkMemoryGetRemoteAddressInfoNV* pMemoryGetRemoteAddressInfo, + VkRemoteAddressNV* pAddress); +#endif -#define VK_GOOGLE_hlsl_functionality1 1 -#define VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION 1 -#define VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME "VK_GOOGLE_hlsl_functionality1" -#define VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION -#define VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME +// VK_EXT_pipeline_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_properties 1 +#define VK_EXT_PIPELINE_PROPERTIES_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_PROPERTIES_EXTENSION_NAME "VK_EXT_pipeline_properties" +typedef VkPipelineInfoKHR VkPipelineInfoEXT; -#define VK_GOOGLE_decorate_string 1 -#define VK_GOOGLE_DECORATE_STRING_SPEC_VERSION 1 -#define VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME "VK_GOOGLE_decorate_string" +typedef struct VkPipelinePropertiesIdentifierEXT { + VkStructureType sType; + void* pNext; + uint8_t pipelineIdentifier[VK_UUID_SIZE]; +} VkPipelinePropertiesIdentifierEXT; +typedef struct VkPhysicalDevicePipelinePropertiesFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pipelinePropertiesIdentifier; +} VkPhysicalDevicePipelinePropertiesFeaturesEXT; -#define VK_EXT_subgroup_size_control 1 -#define VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION 2 -#define VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME "VK_EXT_subgroup_size_control" -typedef VkPhysicalDeviceSubgroupSizeControlFeatures VkPhysicalDeviceSubgroupSizeControlFeaturesEXT; +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelinePropertiesEXT)(VkDevice device, const VkPipelineInfoEXT* pPipelineInfo, VkBaseOutStructure* pPipelineProperties); -typedef VkPhysicalDeviceSubgroupSizeControlProperties VkPhysicalDeviceSubgroupSizeControlPropertiesEXT; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelinePropertiesEXT( + VkDevice device, + const VkPipelineInfoEXT* pPipelineInfo, + VkBaseOutStructure* pPipelineProperties); +#endif -typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfo VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; +// VK_EXT_frame_boundary is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_frame_boundary 1 +#define VK_EXT_FRAME_BOUNDARY_SPEC_VERSION 1 +#define VK_EXT_FRAME_BOUNDARY_EXTENSION_NAME "VK_EXT_frame_boundary" +typedef enum VkFrameBoundaryFlagBitsEXT { + VK_FRAME_BOUNDARY_FRAME_END_BIT_EXT = 0x00000001, + VK_FRAME_BOUNDARY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkFrameBoundaryFlagBitsEXT; +typedef VkFlags VkFrameBoundaryFlagsEXT; +typedef struct VkPhysicalDeviceFrameBoundaryFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 frameBoundary; +} VkPhysicalDeviceFrameBoundaryFeaturesEXT; -#define VK_AMD_shader_core_properties2 1 -#define VK_AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION 1 -#define VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME "VK_AMD_shader_core_properties2" +typedef struct VkFrameBoundaryEXT { + VkStructureType sType; + const void* pNext; + VkFrameBoundaryFlagsEXT flags; + uint64_t frameID; + uint32_t imageCount; + const VkImage* pImages; + uint32_t bufferCount; + const VkBuffer* pBuffers; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkFrameBoundaryEXT; -typedef enum VkShaderCorePropertiesFlagBitsAMD { - VK_SHADER_CORE_PROPERTIES_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF -} VkShaderCorePropertiesFlagBitsAMD; -typedef VkFlags VkShaderCorePropertiesFlagsAMD; -typedef struct VkPhysicalDeviceShaderCoreProperties2AMD { - VkStructureType sType; - void* pNext; - VkShaderCorePropertiesFlagsAMD shaderCoreFeatures; - uint32_t activeComputeUnitCount; -} VkPhysicalDeviceShaderCoreProperties2AMD; +// VK_EXT_multisampled_render_to_single_sampled is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_multisampled_render_to_single_sampled 1 +#define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_SPEC_VERSION 1 +#define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME "VK_EXT_multisampled_render_to_single_sampled" +typedef struct VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 multisampledRenderToSingleSampled; +} VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; -#define VK_AMD_device_coherent_memory 1 -#define VK_AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION 1 -#define VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME "VK_AMD_device_coherent_memory" -typedef struct VkPhysicalDeviceCoherentMemoryFeaturesAMD { +typedef struct VkSubpassResolvePerformanceQueryEXT { VkStructureType sType; void* pNext; - VkBool32 deviceCoherentMemory; -} VkPhysicalDeviceCoherentMemoryFeaturesAMD; + VkBool32 optimal; +} VkSubpassResolvePerformanceQueryEXT; + +typedef struct VkMultisampledRenderToSingleSampledInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 multisampledRenderToSingleSampledEnable; + VkSampleCountFlagBits rasterizationSamples; +} VkMultisampledRenderToSingleSampledInfoEXT; -#define VK_EXT_shader_image_atomic_int64 1 -#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION 1 -#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_EXTENSION_NAME "VK_EXT_shader_image_atomic_int64" -typedef struct VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT { +// VK_EXT_extended_dynamic_state2 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_extended_dynamic_state2 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME "VK_EXT_extended_dynamic_state2" +typedef struct VkPhysicalDeviceExtendedDynamicState2FeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 shaderImageInt64Atomics; - VkBool32 sparseImageInt64Atomics; -} VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + VkBool32 extendedDynamicState2; + VkBool32 extendedDynamicState2LogicOp; + VkBool32 extendedDynamicState2PatchControlPoints; +} VkPhysicalDeviceExtendedDynamicState2FeaturesEXT; +typedef void (VKAPI_PTR *PFN_vkCmdSetPatchControlPointsEXT)(VkCommandBuffer commandBuffer, uint32_t patchControlPoints); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizerDiscardEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBiasEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEXT)(VkCommandBuffer commandBuffer, VkLogicOp logicOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveRestartEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetPatchControlPointsEXT( + VkCommandBuffer commandBuffer, + uint32_t patchControlPoints); -#define VK_EXT_memory_budget 1 -#define VK_EXT_MEMORY_BUDGET_SPEC_VERSION 1 -#define VK_EXT_MEMORY_BUDGET_EXTENSION_NAME "VK_EXT_memory_budget" -typedef struct VkPhysicalDeviceMemoryBudgetPropertiesEXT { - VkStructureType sType; - void* pNext; - VkDeviceSize heapBudget[VK_MAX_MEMORY_HEAPS]; - VkDeviceSize heapUsage[VK_MAX_MEMORY_HEAPS]; -} VkPhysicalDeviceMemoryBudgetPropertiesEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 rasterizerDiscardEnable); +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthBiasEnable); +VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEXT( + VkCommandBuffer commandBuffer, + VkLogicOp logicOp); -#define VK_EXT_memory_priority 1 -#define VK_EXT_MEMORY_PRIORITY_SPEC_VERSION 1 -#define VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME "VK_EXT_memory_priority" -typedef struct VkPhysicalDeviceMemoryPriorityFeaturesEXT { +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 primitiveRestartEnable); +#endif + + +// VK_EXT_color_write_enable is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_color_write_enable 1 +#define VK_EXT_COLOR_WRITE_ENABLE_SPEC_VERSION 1 +#define VK_EXT_COLOR_WRITE_ENABLE_EXTENSION_NAME "VK_EXT_color_write_enable" +typedef struct VkPhysicalDeviceColorWriteEnableFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 memoryPriority; -} VkPhysicalDeviceMemoryPriorityFeaturesEXT; + VkBool32 colorWriteEnable; +} VkPhysicalDeviceColorWriteEnableFeaturesEXT; -typedef struct VkMemoryPriorityAllocateInfoEXT { +typedef struct VkPipelineColorWriteCreateInfoEXT { VkStructureType sType; const void* pNext; - float priority; -} VkMemoryPriorityAllocateInfoEXT; + uint32_t attachmentCount; + const VkBool32* pColorWriteEnables; +} VkPipelineColorWriteCreateInfoEXT; +typedef void (VKAPI_PTR *PFN_vkCmdSetColorWriteEnableEXT)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32* pColorWriteEnables); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWriteEnableEXT( + VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkBool32* pColorWriteEnables); +#endif -#define VK_NV_dedicated_allocation_image_aliasing 1 -#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION 1 -#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME "VK_NV_dedicated_allocation_image_aliasing" -typedef struct VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV { +// VK_EXT_primitives_generated_query is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_primitives_generated_query 1 +#define VK_EXT_PRIMITIVES_GENERATED_QUERY_SPEC_VERSION 1 +#define VK_EXT_PRIMITIVES_GENERATED_QUERY_EXTENSION_NAME "VK_EXT_primitives_generated_query" +typedef struct VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 dedicatedAllocationImageAliasing; -} VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + VkBool32 primitivesGeneratedQuery; + VkBool32 primitivesGeneratedQueryWithRasterizerDiscard; + VkBool32 primitivesGeneratedQueryWithNonZeroStreams; +} VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + + + +// VK_EXT_global_priority_query is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_global_priority_query 1 +#define VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION 1 +#define VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME "VK_EXT_global_priority_query" +#define VK_MAX_GLOBAL_PRIORITY_SIZE_EXT VK_MAX_GLOBAL_PRIORITY_SIZE +typedef VkPhysicalDeviceGlobalPriorityQueryFeatures VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT; +typedef VkQueueFamilyGlobalPriorityProperties VkQueueFamilyGlobalPriorityPropertiesEXT; -#define VK_EXT_buffer_device_address 1 -#define VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 2 -#define VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_EXT_buffer_device_address" -typedef struct VkPhysicalDeviceBufferDeviceAddressFeaturesEXT { + +// VK_EXT_image_view_min_lod is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_view_min_lod 1 +#define VK_EXT_IMAGE_VIEW_MIN_LOD_SPEC_VERSION 1 +#define VK_EXT_IMAGE_VIEW_MIN_LOD_EXTENSION_NAME "VK_EXT_image_view_min_lod" +typedef struct VkPhysicalDeviceImageViewMinLodFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 bufferDeviceAddress; - VkBool32 bufferDeviceAddressCaptureReplay; - VkBool32 bufferDeviceAddressMultiDevice; -} VkPhysicalDeviceBufferDeviceAddressFeaturesEXT; - -typedef VkPhysicalDeviceBufferDeviceAddressFeaturesEXT VkPhysicalDeviceBufferAddressFeaturesEXT; - -typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoEXT; + VkBool32 minLod; +} VkPhysicalDeviceImageViewMinLodFeaturesEXT; -typedef struct VkBufferDeviceAddressCreateInfoEXT { +typedef struct VkImageViewMinLodCreateInfoEXT { VkStructureType sType; const void* pNext; - VkDeviceAddress deviceAddress; -} VkBufferDeviceAddressCreateInfoEXT; + float minLod; +} VkImageViewMinLodCreateInfoEXT; -typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressEXT)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressEXT( - VkDevice device, - const VkBufferDeviceAddressInfo* pInfo); -#endif +// VK_EXT_multi_draw is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_multi_draw 1 +#define VK_EXT_MULTI_DRAW_SPEC_VERSION 1 +#define VK_EXT_MULTI_DRAW_EXTENSION_NAME "VK_EXT_multi_draw" +typedef struct VkPhysicalDeviceMultiDrawFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 multiDraw; +} VkPhysicalDeviceMultiDrawFeaturesEXT; -#define VK_EXT_tooling_info 1 -#define VK_EXT_TOOLING_INFO_SPEC_VERSION 1 -#define VK_EXT_TOOLING_INFO_EXTENSION_NAME "VK_EXT_tooling_info" -typedef VkToolPurposeFlagBits VkToolPurposeFlagBitsEXT; +typedef struct VkPhysicalDeviceMultiDrawPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxMultiDrawCount; +} VkPhysicalDeviceMultiDrawPropertiesEXT; -typedef VkToolPurposeFlags VkToolPurposeFlagsEXT; +typedef struct VkMultiDrawInfoEXT { + uint32_t firstVertex; + uint32_t vertexCount; +} VkMultiDrawInfoEXT; -typedef VkPhysicalDeviceToolProperties VkPhysicalDeviceToolPropertiesEXT; +typedef struct VkMultiDrawIndexedInfoEXT { + uint32_t firstIndex; + uint32_t indexCount; + int32_t vertexOffset; +} VkMultiDrawIndexedInfoEXT; -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceToolPropertiesEXT)(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMultiEXT)(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawInfoEXT* pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMultiIndexedEXT)(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawIndexedInfoEXT* pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, const int32_t* pVertexOffset); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolPropertiesEXT( - VkPhysicalDevice physicalDevice, - uint32_t* pToolCount, - VkPhysicalDeviceToolProperties* pToolProperties); -#endif - +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiEXT( + VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawInfoEXT* pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride); -#define VK_EXT_separate_stencil_usage 1 -#define VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION 1 -#define VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME "VK_EXT_separate_stencil_usage" -typedef VkImageStencilUsageCreateInfo VkImageStencilUsageCreateInfoEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiIndexedEXT( + VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawIndexedInfoEXT* pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t* pVertexOffset); +#endif +// VK_EXT_image_2d_view_of_3d is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_2d_view_of_3d 1 +#define VK_EXT_IMAGE_2D_VIEW_OF_3D_SPEC_VERSION 1 +#define VK_EXT_IMAGE_2D_VIEW_OF_3D_EXTENSION_NAME "VK_EXT_image_2d_view_of_3d" +typedef struct VkPhysicalDeviceImage2DViewOf3DFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 image2DViewOf3D; + VkBool32 sampler2DViewOf3D; +} VkPhysicalDeviceImage2DViewOf3DFeaturesEXT; -#define VK_EXT_validation_features 1 -#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 5 -#define VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME "VK_EXT_validation_features" -typedef enum VkValidationFeatureEnableEXT { - VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT = 0, - VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT = 1, - VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT = 2, - VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT = 3, - VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT = 4, - VK_VALIDATION_FEATURE_ENABLE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkValidationFeatureEnableEXT; -typedef enum VkValidationFeatureDisableEXT { - VK_VALIDATION_FEATURE_DISABLE_ALL_EXT = 0, - VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT = 1, - VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT = 2, - VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT = 3, - VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT = 4, - VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT = 5, - VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT = 6, - VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT = 7, - VK_VALIDATION_FEATURE_DISABLE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkValidationFeatureDisableEXT; -typedef struct VkValidationFeaturesEXT { - VkStructureType sType; - const void* pNext; - uint32_t enabledValidationFeatureCount; - const VkValidationFeatureEnableEXT* pEnabledValidationFeatures; - uint32_t disabledValidationFeatureCount; - const VkValidationFeatureDisableEXT* pDisabledValidationFeatures; -} VkValidationFeaturesEXT; +// VK_EXT_shader_tile_image is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_tile_image 1 +#define VK_EXT_SHADER_TILE_IMAGE_SPEC_VERSION 1 +#define VK_EXT_SHADER_TILE_IMAGE_EXTENSION_NAME "VK_EXT_shader_tile_image" +typedef struct VkPhysicalDeviceShaderTileImageFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderTileImageColorReadAccess; + VkBool32 shaderTileImageDepthReadAccess; + VkBool32 shaderTileImageStencilReadAccess; +} VkPhysicalDeviceShaderTileImageFeaturesEXT; +typedef struct VkPhysicalDeviceShaderTileImagePropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderTileImageCoherentReadAccelerated; + VkBool32 shaderTileImageReadSampleFromPixelRateInvocation; + VkBool32 shaderTileImageReadFromHelperInvocation; +} VkPhysicalDeviceShaderTileImagePropertiesEXT; -#define VK_NV_cooperative_matrix 1 -#define VK_NV_COOPERATIVE_MATRIX_SPEC_VERSION 1 -#define VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME "VK_NV_cooperative_matrix" -typedef enum VkComponentTypeNV { - VK_COMPONENT_TYPE_FLOAT16_NV = 0, - VK_COMPONENT_TYPE_FLOAT32_NV = 1, - VK_COMPONENT_TYPE_FLOAT64_NV = 2, - VK_COMPONENT_TYPE_SINT8_NV = 3, - VK_COMPONENT_TYPE_SINT16_NV = 4, - VK_COMPONENT_TYPE_SINT32_NV = 5, - VK_COMPONENT_TYPE_SINT64_NV = 6, - VK_COMPONENT_TYPE_UINT8_NV = 7, - VK_COMPONENT_TYPE_UINT16_NV = 8, - VK_COMPONENT_TYPE_UINT32_NV = 9, - VK_COMPONENT_TYPE_UINT64_NV = 10, - VK_COMPONENT_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkComponentTypeNV; - -typedef enum VkScopeNV { - VK_SCOPE_DEVICE_NV = 1, - VK_SCOPE_WORKGROUP_NV = 2, - VK_SCOPE_SUBGROUP_NV = 3, - VK_SCOPE_QUEUE_FAMILY_NV = 5, - VK_SCOPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkScopeNV; -typedef struct VkCooperativeMatrixPropertiesNV { - VkStructureType sType; - void* pNext; - uint32_t MSize; - uint32_t NSize; - uint32_t KSize; - VkComponentTypeNV AType; - VkComponentTypeNV BType; - VkComponentTypeNV CType; - VkComponentTypeNV DType; - VkScopeNV scope; -} VkCooperativeMatrixPropertiesNV; +// VK_EXT_opacity_micromap is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_opacity_micromap 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkMicromapEXT) +#define VK_EXT_OPACITY_MICROMAP_SPEC_VERSION 2 +#define VK_EXT_OPACITY_MICROMAP_EXTENSION_NAME "VK_EXT_opacity_micromap" -typedef struct VkPhysicalDeviceCooperativeMatrixFeaturesNV { - VkStructureType sType; - void* pNext; - VkBool32 cooperativeMatrix; - VkBool32 cooperativeMatrixRobustBufferAccess; -} VkPhysicalDeviceCooperativeMatrixFeaturesNV; +typedef enum VkMicromapTypeEXT { + VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT = 0, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_MICROMAP_TYPE_DISPLACEMENT_MICROMAP_NV = 1000397000, +#endif + VK_MICROMAP_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkMicromapTypeEXT; + +typedef enum VkBuildMicromapModeEXT { + VK_BUILD_MICROMAP_MODE_BUILD_EXT = 0, + VK_BUILD_MICROMAP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkBuildMicromapModeEXT; + +typedef enum VkCopyMicromapModeEXT { + VK_COPY_MICROMAP_MODE_CLONE_EXT = 0, + VK_COPY_MICROMAP_MODE_SERIALIZE_EXT = 1, + VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT = 2, + VK_COPY_MICROMAP_MODE_COMPACT_EXT = 3, + VK_COPY_MICROMAP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkCopyMicromapModeEXT; + +typedef enum VkOpacityMicromapFormatEXT { + VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT = 1, + VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT = 2, + VK_OPACITY_MICROMAP_FORMAT_MAX_ENUM_EXT = 0x7FFFFFFF +} VkOpacityMicromapFormatEXT; + +typedef enum VkOpacityMicromapSpecialIndexEXT { + VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT = -1, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT = -2, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT = -3, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT = -4, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_CLUSTER_GEOMETRY_DISABLE_OPACITY_MICROMAP_NV = -5, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_MAX_ENUM_EXT = 0x7FFFFFFF +} VkOpacityMicromapSpecialIndexEXT; -typedef struct VkPhysicalDeviceCooperativeMatrixPropertiesNV { - VkStructureType sType; - void* pNext; - VkShaderStageFlags cooperativeMatrixSupportedStages; -} VkPhysicalDeviceCooperativeMatrixPropertiesNV; +typedef enum VkAccelerationStructureCompatibilityKHR { + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR = 0, + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR = 1, + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureCompatibilityKHR; -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties); +typedef enum VkAccelerationStructureBuildTypeKHR { + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR = 0, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR = 1, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR = 2, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureBuildTypeKHR; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( - VkPhysicalDevice physicalDevice, - uint32_t* pPropertyCount, - VkCooperativeMatrixPropertiesNV* pProperties); -#endif +typedef enum VkBuildMicromapFlagBitsEXT { + VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT = 0x00000001, + VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT = 0x00000002, + VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT = 0x00000004, + VK_BUILD_MICROMAP_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkBuildMicromapFlagBitsEXT; +typedef VkFlags VkBuildMicromapFlagsEXT; + +typedef enum VkMicromapCreateFlagBitsEXT { + VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = 0x00000001, + VK_MICROMAP_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkMicromapCreateFlagBitsEXT; +typedef VkFlags VkMicromapCreateFlagsEXT; +typedef struct VkMicromapUsageEXT { + uint32_t count; + uint32_t subdivisionLevel; + uint32_t format; +} VkMicromapUsageEXT; +typedef union VkDeviceOrHostAddressKHR { + VkDeviceAddress deviceAddress; + void* hostAddress; +} VkDeviceOrHostAddressKHR; -#define VK_NV_coverage_reduction_mode 1 -#define VK_NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION 1 -#define VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME "VK_NV_coverage_reduction_mode" +typedef struct VkMicromapBuildInfoEXT { + VkStructureType sType; + const void* pNext; + VkMicromapTypeEXT type; + VkBuildMicromapFlagsEXT flags; + VkBuildMicromapModeEXT mode; + VkMicromapEXT dstMicromap; + uint32_t usageCountsCount; + const VkMicromapUsageEXT* pUsageCounts; + const VkMicromapUsageEXT* const* ppUsageCounts; + VkDeviceOrHostAddressConstKHR data; + VkDeviceOrHostAddressKHR scratchData; + VkDeviceOrHostAddressConstKHR triangleArray; + VkDeviceSize triangleArrayStride; +} VkMicromapBuildInfoEXT; + +typedef struct VkMicromapCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkMicromapCreateFlagsEXT createFlags; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; + VkMicromapTypeEXT type; + VkDeviceAddress deviceAddress; +} VkMicromapCreateInfoEXT; -typedef enum VkCoverageReductionModeNV { - VK_COVERAGE_REDUCTION_MODE_MERGE_NV = 0, - VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV = 1, - VK_COVERAGE_REDUCTION_MODE_MAX_ENUM_NV = 0x7FFFFFFF -} VkCoverageReductionModeNV; -typedef VkFlags VkPipelineCoverageReductionStateCreateFlagsNV; -typedef struct VkPhysicalDeviceCoverageReductionModeFeaturesNV { +typedef struct VkPhysicalDeviceOpacityMicromapFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 coverageReductionMode; -} VkPhysicalDeviceCoverageReductionModeFeaturesNV; + VkBool32 micromap; + VkBool32 micromapCaptureReplay; + VkBool32 micromapHostCommands; +} VkPhysicalDeviceOpacityMicromapFeaturesEXT; -typedef struct VkPipelineCoverageReductionStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineCoverageReductionStateCreateFlagsNV flags; - VkCoverageReductionModeNV coverageReductionMode; -} VkPipelineCoverageReductionStateCreateInfoNV; +typedef struct VkPhysicalDeviceOpacityMicromapPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxOpacity2StateSubdivisionLevel; + uint32_t maxOpacity4StateSubdivisionLevel; +} VkPhysicalDeviceOpacityMicromapPropertiesEXT; -typedef struct VkFramebufferMixedSamplesCombinationNV { - VkStructureType sType; - void* pNext; - VkCoverageReductionModeNV coverageReductionMode; - VkSampleCountFlagBits rasterizationSamples; - VkSampleCountFlags depthStencilSamples; - VkSampleCountFlags colorSamples; -} VkFramebufferMixedSamplesCombinationNV; +typedef struct VkMicromapVersionInfoEXT { + VkStructureType sType; + const void* pNext; + const uint8_t* pVersionData; +} VkMicromapVersionInfoEXT; -typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV)(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations); +typedef struct VkCopyMicromapToMemoryInfoEXT { + VkStructureType sType; + const void* pNext; + VkMicromapEXT src; + VkDeviceOrHostAddressKHR dst; + VkCopyMicromapModeEXT mode; +} VkCopyMicromapToMemoryInfoEXT; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - VkPhysicalDevice physicalDevice, - uint32_t* pCombinationCount, - VkFramebufferMixedSamplesCombinationNV* pCombinations); -#endif +typedef struct VkCopyMemoryToMicromapInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR src; + VkMicromapEXT dst; + VkCopyMicromapModeEXT mode; +} VkCopyMemoryToMicromapInfoEXT; +typedef struct VkCopyMicromapInfoEXT { + VkStructureType sType; + const void* pNext; + VkMicromapEXT src; + VkMicromapEXT dst; + VkCopyMicromapModeEXT mode; +} VkCopyMicromapInfoEXT; -#define VK_EXT_fragment_shader_interlock 1 -#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION 1 -#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME "VK_EXT_fragment_shader_interlock" -typedef struct VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT { +typedef struct VkMicromapBuildSizesInfoEXT { VkStructureType sType; - void* pNext; - VkBool32 fragmentShaderSampleInterlock; - VkBool32 fragmentShaderPixelInterlock; - VkBool32 fragmentShaderShadingRateInterlock; -} VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT; + const void* pNext; + VkDeviceSize micromapSize; + VkDeviceSize buildScratchSize; + VkBool32 discardable; +} VkMicromapBuildSizesInfoEXT; + +typedef struct VkAccelerationStructureTrianglesOpacityMicromapEXT { + VkStructureType sType; + void* pNext; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexBuffer; + VkDeviceSize indexStride; + uint32_t baseTriangle; + uint32_t usageCountsCount; + const VkMicromapUsageEXT* pUsageCounts; + const VkMicromapUsageEXT* const* ppUsageCounts; + VkMicromapEXT micromap; +} VkAccelerationStructureTrianglesOpacityMicromapEXT; + +typedef struct VkMicromapTriangleEXT { + uint32_t dataOffset; + uint16_t subdivisionLevel; + uint16_t format; +} VkMicromapTriangleEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMicromapEXT)(VkDevice device, const VkMicromapCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkMicromapEXT* pMicromap); +typedef void (VKAPI_PTR *PFN_vkDestroyMicromapEXT)(VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdBuildMicromapsEXT)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBuildMicromapsEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMicromapEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMicromapToMemoryEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapToMemoryInfoEXT* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToMicromapEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToMicromapInfoEXT* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkWriteMicromapsPropertiesEXT)(VkDevice device, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, size_t dataSize, void* pData, size_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMicromapEXT)(VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMicromapToMemoryEXT)(VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToMicromapEXT)(VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteMicromapsPropertiesEXT)(VkCommandBuffer commandBuffer, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef void (VKAPI_PTR *PFN_vkGetDeviceMicromapCompatibilityEXT)(VkDevice device, const VkMicromapVersionInfoEXT* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility); +typedef void (VKAPI_PTR *PFN_vkGetMicromapBuildSizesEXT)(VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkMicromapBuildInfoEXT* pBuildInfo, VkMicromapBuildSizesInfoEXT* pSizeInfo); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMicromapEXT( + VkDevice device, + const VkMicromapCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkMicromapEXT* pMicromap); +VKAPI_ATTR void VKAPI_CALL vkDestroyMicromapEXT( + VkDevice device, + VkMicromapEXT micromap, + const VkAllocationCallbacks* pAllocator); -#define VK_EXT_ycbcr_image_arrays 1 -#define VK_EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION 1 -#define VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME "VK_EXT_ycbcr_image_arrays" -typedef struct VkPhysicalDeviceYcbcrImageArraysFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 ycbcrImageArrays; -} VkPhysicalDeviceYcbcrImageArraysFeaturesEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdBuildMicromapsEXT( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkMicromapBuildInfoEXT* pInfos); +VKAPI_ATTR VkResult VKAPI_CALL vkBuildMicromapsEXT( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkMicromapBuildInfoEXT* pInfos); +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMicromapEXT( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMicromapInfoEXT* pInfo); -#define VK_EXT_provoking_vertex 1 -#define VK_EXT_PROVOKING_VERTEX_SPEC_VERSION 1 -#define VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME "VK_EXT_provoking_vertex" +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMicromapToMemoryEXT( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMicromapToMemoryInfoEXT* pInfo); -typedef enum VkProvokingVertexModeEXT { - VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT = 0, - VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT = 1, - VK_PROVOKING_VERTEX_MODE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkProvokingVertexModeEXT; -typedef struct VkPhysicalDeviceProvokingVertexFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 provokingVertexLast; - VkBool32 transformFeedbackPreservesProvokingVertex; -} VkPhysicalDeviceProvokingVertexFeaturesEXT; +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToMicromapEXT( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToMicromapInfoEXT* pInfo); -typedef struct VkPhysicalDeviceProvokingVertexPropertiesEXT { - VkStructureType sType; - void* pNext; - VkBool32 provokingVertexModePerPipeline; - VkBool32 transformFeedbackPreservesTriangleFanProvokingVertex; -} VkPhysicalDeviceProvokingVertexPropertiesEXT; +VKAPI_ATTR VkResult VKAPI_CALL vkWriteMicromapsPropertiesEXT( + VkDevice device, + uint32_t micromapCount, + const VkMicromapEXT* pMicromaps, + VkQueryType queryType, + size_t dataSize, + void* pData, + size_t stride); -typedef struct VkPipelineRasterizationProvokingVertexStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkProvokingVertexModeEXT provokingVertexMode; -} VkPipelineRasterizationProvokingVertexStateCreateInfoEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMicromapEXT( + VkCommandBuffer commandBuffer, + const VkCopyMicromapInfoEXT* pInfo); +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMicromapToMemoryEXT( + VkCommandBuffer commandBuffer, + const VkCopyMicromapToMemoryInfoEXT* pInfo); +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToMicromapEXT( + VkCommandBuffer commandBuffer, + const VkCopyMemoryToMicromapInfoEXT* pInfo); -#define VK_EXT_headless_surface 1 -#define VK_EXT_HEADLESS_SURFACE_SPEC_VERSION 1 -#define VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME "VK_EXT_headless_surface" -typedef VkFlags VkHeadlessSurfaceCreateFlagsEXT; -typedef struct VkHeadlessSurfaceCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkHeadlessSurfaceCreateFlagsEXT flags; -} VkHeadlessSurfaceCreateInfoEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdWriteMicromapsPropertiesEXT( + VkCommandBuffer commandBuffer, + uint32_t micromapCount, + const VkMicromapEXT* pMicromaps, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); -typedef VkResult (VKAPI_PTR *PFN_vkCreateHeadlessSurfaceEXT)(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +VKAPI_ATTR void VKAPI_CALL vkGetDeviceMicromapCompatibilityEXT( + VkDevice device, + const VkMicromapVersionInfoEXT* pVersionInfo, + VkAccelerationStructureCompatibilityKHR* pCompatibility); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT( - VkInstance instance, - const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); +VKAPI_ATTR void VKAPI_CALL vkGetMicromapBuildSizesEXT( + VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkMicromapBuildInfoEXT* pBuildInfo, + VkMicromapBuildSizesInfoEXT* pSizeInfo); #endif -#define VK_EXT_line_rasterization 1 -#define VK_EXT_LINE_RASTERIZATION_SPEC_VERSION 1 -#define VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME "VK_EXT_line_rasterization" +// VK_EXT_load_store_op_none is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_load_store_op_none 1 +#define VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION 1 +#define VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME "VK_EXT_load_store_op_none" + -typedef enum VkLineRasterizationModeEXT { - VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT = 0, - VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT = 1, - VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT = 2, - VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT = 3, - VK_LINE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkLineRasterizationModeEXT; -typedef struct VkPhysicalDeviceLineRasterizationFeaturesEXT { +// VK_HUAWEI_cluster_culling_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_HUAWEI_cluster_culling_shader 1 +#define VK_HUAWEI_CLUSTER_CULLING_SHADER_SPEC_VERSION 3 +#define VK_HUAWEI_CLUSTER_CULLING_SHADER_EXTENSION_NAME "VK_HUAWEI_cluster_culling_shader" +typedef struct VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI { VkStructureType sType; void* pNext; - VkBool32 rectangularLines; - VkBool32 bresenhamLines; - VkBool32 smoothLines; - VkBool32 stippledRectangularLines; - VkBool32 stippledBresenhamLines; - VkBool32 stippledSmoothLines; -} VkPhysicalDeviceLineRasterizationFeaturesEXT; + VkBool32 clustercullingShader; + VkBool32 multiviewClusterCullingShader; +} VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI; -typedef struct VkPhysicalDeviceLineRasterizationPropertiesEXT { +typedef struct VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI { VkStructureType sType; void* pNext; - uint32_t lineSubPixelPrecisionBits; -} VkPhysicalDeviceLineRasterizationPropertiesEXT; + uint32_t maxWorkGroupCount[3]; + uint32_t maxWorkGroupSize[3]; + uint32_t maxOutputClusterCount; + VkDeviceSize indirectBufferOffsetAlignment; +} VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI; -typedef struct VkPipelineRasterizationLineStateCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkLineRasterizationModeEXT lineRasterizationMode; - VkBool32 stippledLineEnable; - uint32_t lineStippleFactor; - uint16_t lineStipplePattern; -} VkPipelineRasterizationLineStateCreateInfoEXT; +typedef struct VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI { + VkStructureType sType; + void* pNext; + VkBool32 clusterShadingRate; +} VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; -typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEXT)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern); +typedef void (VKAPI_PTR *PFN_vkCmdDrawClusterHUAWEI)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef void (VKAPI_PTR *PFN_vkCmdDrawClusterIndirectHUAWEI)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT( +VKAPI_ATTR void VKAPI_CALL vkCmdDrawClusterHUAWEI( VkCommandBuffer commandBuffer, - uint32_t lineStippleFactor, - uint16_t lineStipplePattern); + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawClusterIndirectHUAWEI( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset); #endif -#define VK_EXT_shader_atomic_float 1 -#define VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION 1 -#define VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME "VK_EXT_shader_atomic_float" -typedef struct VkPhysicalDeviceShaderAtomicFloatFeaturesEXT { +// VK_EXT_border_color_swizzle is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_border_color_swizzle 1 +#define VK_EXT_BORDER_COLOR_SWIZZLE_SPEC_VERSION 1 +#define VK_EXT_BORDER_COLOR_SWIZZLE_EXTENSION_NAME "VK_EXT_border_color_swizzle" +typedef struct VkPhysicalDeviceBorderColorSwizzleFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 shaderBufferFloat32Atomics; - VkBool32 shaderBufferFloat32AtomicAdd; - VkBool32 shaderBufferFloat64Atomics; - VkBool32 shaderBufferFloat64AtomicAdd; - VkBool32 shaderSharedFloat32Atomics; - VkBool32 shaderSharedFloat32AtomicAdd; - VkBool32 shaderSharedFloat64Atomics; - VkBool32 shaderSharedFloat64AtomicAdd; - VkBool32 shaderImageFloat32Atomics; - VkBool32 shaderImageFloat32AtomicAdd; - VkBool32 sparseImageFloat32Atomics; - VkBool32 sparseImageFloat32AtomicAdd; -} VkPhysicalDeviceShaderAtomicFloatFeaturesEXT; + VkBool32 borderColorSwizzle; + VkBool32 borderColorSwizzleFromImage; +} VkPhysicalDeviceBorderColorSwizzleFeaturesEXT; +typedef struct VkSamplerBorderColorComponentMappingCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkComponentMapping components; + VkBool32 srgb; +} VkSamplerBorderColorComponentMappingCreateInfoEXT; -#define VK_EXT_host_query_reset 1 -#define VK_EXT_HOST_QUERY_RESET_SPEC_VERSION 1 -#define VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME "VK_EXT_host_query_reset" -typedef VkPhysicalDeviceHostQueryResetFeatures VkPhysicalDeviceHostQueryResetFeaturesEXT; -typedef void (VKAPI_PTR *PFN_vkResetQueryPoolEXT)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); +// VK_EXT_pageable_device_local_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pageable_device_local_memory 1 +#define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_SPEC_VERSION 1 +#define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME "VK_EXT_pageable_device_local_memory" +typedef struct VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pageableDeviceLocalMemory; +} VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkSetDeviceMemoryPriorityEXT)(VkDevice device, VkDeviceMemory memory, float priority); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkResetQueryPoolEXT( +VKAPI_ATTR void VKAPI_CALL vkSetDeviceMemoryPriorityEXT( VkDevice device, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount); + VkDeviceMemory memory, + float priority); #endif -#define VK_EXT_index_type_uint8 1 -#define VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION 1 -#define VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME "VK_EXT_index_type_uint8" -typedef struct VkPhysicalDeviceIndexTypeUint8FeaturesEXT { +// VK_ARM_shader_core_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_shader_core_properties 1 +#define VK_ARM_SHADER_CORE_PROPERTIES_SPEC_VERSION 1 +#define VK_ARM_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_ARM_shader_core_properties" +typedef struct VkPhysicalDeviceShaderCorePropertiesARM { VkStructureType sType; void* pNext; - VkBool32 indexTypeUint8; -} VkPhysicalDeviceIndexTypeUint8FeaturesEXT; + uint32_t pixelRate; + uint32_t texelRate; + uint32_t fmaRate; +} VkPhysicalDeviceShaderCorePropertiesARM; -#define VK_EXT_extended_dynamic_state 1 -#define VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION 1 -#define VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_extended_dynamic_state" -typedef struct VkPhysicalDeviceExtendedDynamicStateFeaturesEXT { +// VK_ARM_scheduling_controls is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_scheduling_controls 1 +#define VK_ARM_SCHEDULING_CONTROLS_SPEC_VERSION 1 +#define VK_ARM_SCHEDULING_CONTROLS_EXTENSION_NAME "VK_ARM_scheduling_controls" +typedef VkFlags64 VkPhysicalDeviceSchedulingControlsFlagsARM; + +// Flag bits for VkPhysicalDeviceSchedulingControlsFlagBitsARM +typedef VkFlags64 VkPhysicalDeviceSchedulingControlsFlagBitsARM; +static const VkPhysicalDeviceSchedulingControlsFlagBitsARM VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_SHADER_CORE_COUNT_ARM = 0x00000001ULL; + +typedef struct VkDeviceQueueShaderCoreControlCreateInfoARM { VkStructureType sType; void* pNext; - VkBool32 extendedDynamicState; -} VkPhysicalDeviceExtendedDynamicStateFeaturesEXT; + uint32_t shaderCoreCount; +} VkDeviceQueueShaderCoreControlCreateInfoARM; -typedef void (VKAPI_PTR *PFN_vkCmdSetCullModeEXT)(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode); -typedef void (VKAPI_PTR *PFN_vkCmdSetFrontFaceEXT)(VkCommandBuffer commandBuffer, VkFrontFace frontFace); -typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveTopologyEXT)(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology); -typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports); -typedef void (VKAPI_PTR *PFN_vkCmdSetScissorWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors); -typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers2EXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides); -typedef void (VKAPI_PTR *PFN_vkCmdSetDepthTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable); -typedef void (VKAPI_PTR *PFN_vkCmdSetDepthWriteEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable); -typedef void (VKAPI_PTR *PFN_vkCmdSetDepthCompareOpEXT)(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp); -typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBoundsTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable); -typedef void (VKAPI_PTR *PFN_vkCmdSetStencilTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable); -typedef void (VKAPI_PTR *PFN_vkCmdSetStencilOpEXT)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp); +typedef struct VkPhysicalDeviceSchedulingControlsFeaturesARM { + VkStructureType sType; + void* pNext; + VkBool32 schedulingControls; +} VkPhysicalDeviceSchedulingControlsFeaturesARM; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetCullModeEXT( - VkCommandBuffer commandBuffer, - VkCullModeFlags cullMode); +typedef struct VkPhysicalDeviceSchedulingControlsPropertiesARM { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags; +} VkPhysicalDeviceSchedulingControlsPropertiesARM; -VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFaceEXT( - VkCommandBuffer commandBuffer, - VkFrontFace frontFace); -VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopologyEXT( - VkCommandBuffer commandBuffer, - VkPrimitiveTopology primitiveTopology); -VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCountEXT( - VkCommandBuffer commandBuffer, - uint32_t viewportCount, - const VkViewport* pViewports); +// VK_EXT_image_sliced_view_of_3d is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_sliced_view_of_3d 1 +#define VK_EXT_IMAGE_SLICED_VIEW_OF_3D_SPEC_VERSION 1 +#define VK_EXT_IMAGE_SLICED_VIEW_OF_3D_EXTENSION_NAME "VK_EXT_image_sliced_view_of_3d" +#define VK_REMAINING_3D_SLICES_EXT (~0U) +typedef struct VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 imageSlicedViewOf3D; +} VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT; -VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCountEXT( - VkCommandBuffer commandBuffer, - uint32_t scissorCount, - const VkRect2D* pScissors); +typedef struct VkImageViewSlicedCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t sliceOffset; + uint32_t sliceCount; +} VkImageViewSlicedCreateInfoEXT; -VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2EXT( - VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer* pBuffers, - const VkDeviceSize* pOffsets, - const VkDeviceSize* pSizes, - const VkDeviceSize* pStrides); -VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnableEXT( - VkCommandBuffer commandBuffer, - VkBool32 depthTestEnable); -VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnableEXT( - VkCommandBuffer commandBuffer, - VkBool32 depthWriteEnable); +// VK_VALVE_descriptor_set_host_mapping is a preprocessor guard. Do not pass it to API calls. +#define VK_VALVE_descriptor_set_host_mapping 1 +#define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_SPEC_VERSION 1 +#define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_EXTENSION_NAME "VK_VALVE_descriptor_set_host_mapping" +typedef struct VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE { + VkStructureType sType; + void* pNext; + VkBool32 descriptorSetHostMapping; +} VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; -VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOpEXT( - VkCommandBuffer commandBuffer, - VkCompareOp depthCompareOp); +typedef struct VkDescriptorSetBindingReferenceVALVE { + VkStructureType sType; + const void* pNext; + VkDescriptorSetLayout descriptorSetLayout; + uint32_t binding; +} VkDescriptorSetBindingReferenceVALVE; -VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnableEXT( - VkCommandBuffer commandBuffer, - VkBool32 depthBoundsTestEnable); +typedef struct VkDescriptorSetLayoutHostMappingInfoVALVE { + VkStructureType sType; + void* pNext; + size_t descriptorOffset; + uint32_t descriptorSize; +} VkDescriptorSetLayoutHostMappingInfoVALVE; -VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnableEXT( - VkCommandBuffer commandBuffer, - VkBool32 stencilTestEnable); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE)(VkDevice device, const VkDescriptorSetBindingReferenceVALVE* pBindingReference, VkDescriptorSetLayoutHostMappingInfoVALVE* pHostMapping); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetHostMappingVALVE)(VkDevice device, VkDescriptorSet descriptorSet, void** ppData); -VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOpEXT( - VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - VkStencilOp failOp, - VkStencilOp passOp, - VkStencilOp depthFailOp, - VkCompareOp compareOp); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutHostMappingInfoVALVE( + VkDevice device, + const VkDescriptorSetBindingReferenceVALVE* pBindingReference, + VkDescriptorSetLayoutHostMappingInfoVALVE* pHostMapping); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetHostMappingVALVE( + VkDevice device, + VkDescriptorSet descriptorSet, + void** ppData); #endif -#define VK_EXT_shader_atomic_float2 1 -#define VK_EXT_SHADER_ATOMIC_FLOAT_2_SPEC_VERSION 1 -#define VK_EXT_SHADER_ATOMIC_FLOAT_2_EXTENSION_NAME "VK_EXT_shader_atomic_float2" -typedef struct VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT { +// VK_EXT_depth_clamp_zero_one is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_clamp_zero_one 1 +#define VK_EXT_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME "VK_EXT_depth_clamp_zero_one" +typedef VkPhysicalDeviceDepthClampZeroOneFeaturesKHR VkPhysicalDeviceDepthClampZeroOneFeaturesEXT; + + + +// VK_EXT_non_seamless_cube_map is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_non_seamless_cube_map 1 +#define VK_EXT_NON_SEAMLESS_CUBE_MAP_SPEC_VERSION 1 +#define VK_EXT_NON_SEAMLESS_CUBE_MAP_EXTENSION_NAME "VK_EXT_non_seamless_cube_map" +typedef struct VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 shaderBufferFloat16Atomics; - VkBool32 shaderBufferFloat16AtomicAdd; - VkBool32 shaderBufferFloat16AtomicMinMax; - VkBool32 shaderBufferFloat32AtomicMinMax; - VkBool32 shaderBufferFloat64AtomicMinMax; - VkBool32 shaderSharedFloat16Atomics; - VkBool32 shaderSharedFloat16AtomicAdd; - VkBool32 shaderSharedFloat16AtomicMinMax; - VkBool32 shaderSharedFloat32AtomicMinMax; - VkBool32 shaderSharedFloat64AtomicMinMax; - VkBool32 shaderImageFloat32AtomicMinMax; - VkBool32 sparseImageFloat32AtomicMinMax; -} VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT; + VkBool32 nonSeamlessCubeMap; +} VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT; -#define VK_EXT_shader_demote_to_helper_invocation 1 -#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION 1 -#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME "VK_EXT_shader_demote_to_helper_invocation" -typedef VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; +// VK_ARM_render_pass_striped is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_render_pass_striped 1 +#define VK_ARM_RENDER_PASS_STRIPED_SPEC_VERSION 1 +#define VK_ARM_RENDER_PASS_STRIPED_EXTENSION_NAME "VK_ARM_render_pass_striped" +typedef struct VkPhysicalDeviceRenderPassStripedFeaturesARM { + VkStructureType sType; + void* pNext; + VkBool32 renderPassStriped; +} VkPhysicalDeviceRenderPassStripedFeaturesARM; +typedef struct VkPhysicalDeviceRenderPassStripedPropertiesARM { + VkStructureType sType; + void* pNext; + VkExtent2D renderPassStripeGranularity; + uint32_t maxRenderPassStripes; +} VkPhysicalDeviceRenderPassStripedPropertiesARM; +typedef struct VkRenderPassStripeInfoARM { + VkStructureType sType; + const void* pNext; + VkRect2D stripeArea; +} VkRenderPassStripeInfoARM; -#define VK_NV_device_generated_commands 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNV) -#define VK_NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 -#define VK_NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NV_device_generated_commands" +typedef struct VkRenderPassStripeBeginInfoARM { + VkStructureType sType; + const void* pNext; + uint32_t stripeInfoCount; + const VkRenderPassStripeInfoARM* pStripeInfos; +} VkRenderPassStripeBeginInfoARM; -typedef enum VkIndirectCommandsTokenTypeNV { - VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV = 0, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV = 1, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV = 2, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV = 3, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV = 4, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV = 5, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV = 6, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV = 7, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV = 1000328000, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkIndirectCommandsTokenTypeNV; +typedef struct VkRenderPassStripeSubmitInfoARM { + VkStructureType sType; + const void* pNext; + uint32_t stripeSemaphoreInfoCount; + const VkSemaphoreSubmitInfo* pStripeSemaphoreInfos; +} VkRenderPassStripeSubmitInfoARM; -typedef enum VkIndirectStateFlagBitsNV { - VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV = 0x00000001, - VK_INDIRECT_STATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkIndirectStateFlagBitsNV; -typedef VkFlags VkIndirectStateFlagsNV; -typedef enum VkIndirectCommandsLayoutUsageFlagBitsNV { - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV = 0x00000001, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV = 0x00000002, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV = 0x00000004, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkIndirectCommandsLayoutUsageFlagBitsNV; -typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNV; -typedef struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV { + +// VK_QCOM_fragment_density_map_offset is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_fragment_density_map_offset 1 +#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION 2 +#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME "VK_QCOM_fragment_density_map_offset" +typedef struct VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM { VkStructureType sType; void* pNext; - uint32_t maxGraphicsShaderGroupCount; - uint32_t maxIndirectSequenceCount; - uint32_t maxIndirectCommandsTokenCount; - uint32_t maxIndirectCommandsStreamCount; - uint32_t maxIndirectCommandsTokenOffset; - uint32_t maxIndirectCommandsStreamStride; - uint32_t minSequencesCountBufferOffsetAlignment; - uint32_t minSequencesIndexBufferOffsetAlignment; - uint32_t minIndirectCommandsBufferOffsetAlignment; -} VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + VkBool32 fragmentDensityMapOffset; +} VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; -typedef struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV { +typedef struct VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM { VkStructureType sType; void* pNext; - VkBool32 deviceGeneratedCommands; -} VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; - -typedef struct VkGraphicsShaderGroupCreateInfoNV { - VkStructureType sType; - const void* pNext; - uint32_t stageCount; - const VkPipelineShaderStageCreateInfo* pStages; - const VkPipelineVertexInputStateCreateInfo* pVertexInputState; - const VkPipelineTessellationStateCreateInfo* pTessellationState; -} VkGraphicsShaderGroupCreateInfoNV; - -typedef struct VkGraphicsPipelineShaderGroupsCreateInfoNV { - VkStructureType sType; - const void* pNext; - uint32_t groupCount; - const VkGraphicsShaderGroupCreateInfoNV* pGroups; - uint32_t pipelineCount; - const VkPipeline* pPipelines; -} VkGraphicsPipelineShaderGroupsCreateInfoNV; - -typedef struct VkBindShaderGroupIndirectCommandNV { - uint32_t groupIndex; -} VkBindShaderGroupIndirectCommandNV; - -typedef struct VkBindIndexBufferIndirectCommandNV { - VkDeviceAddress bufferAddress; - uint32_t size; - VkIndexType indexType; -} VkBindIndexBufferIndirectCommandNV; + VkExtent2D fragmentDensityOffsetGranularity; +} VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; -typedef struct VkBindVertexBufferIndirectCommandNV { - VkDeviceAddress bufferAddress; - uint32_t size; - uint32_t stride; -} VkBindVertexBufferIndirectCommandNV; +typedef struct VkSubpassFragmentDensityMapOffsetEndInfoQCOM { + VkStructureType sType; + const void* pNext; + uint32_t fragmentDensityOffsetCount; + const VkOffset2D* pFragmentDensityOffsets; +} VkSubpassFragmentDensityMapOffsetEndInfoQCOM; -typedef struct VkSetStateFlagsIndirectCommandNV { - uint32_t data; -} VkSetStateFlagsIndirectCommandNV; -typedef struct VkIndirectCommandsStreamNV { - VkBuffer buffer; - VkDeviceSize offset; -} VkIndirectCommandsStreamNV; -typedef struct VkIndirectCommandsLayoutTokenNV { - VkStructureType sType; - const void* pNext; - VkIndirectCommandsTokenTypeNV tokenType; - uint32_t stream; - uint32_t offset; - uint32_t vertexBindingUnit; - VkBool32 vertexDynamicStride; - VkPipelineLayout pushconstantPipelineLayout; - VkShaderStageFlags pushconstantShaderStageFlags; - uint32_t pushconstantOffset; - uint32_t pushconstantSize; - VkIndirectStateFlagsNV indirectStateFlags; - uint32_t indexTypeCount; - const VkIndexType* pIndexTypes; - const uint32_t* pIndexTypeValues; -} VkIndirectCommandsLayoutTokenNV; +// VK_NV_copy_memory_indirect is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_copy_memory_indirect 1 +#define VK_NV_COPY_MEMORY_INDIRECT_SPEC_VERSION 1 +#define VK_NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME "VK_NV_copy_memory_indirect" +typedef struct VkCopyMemoryIndirectCommandNV { + VkDeviceAddress srcAddress; + VkDeviceAddress dstAddress; + VkDeviceSize size; +} VkCopyMemoryIndirectCommandNV; -typedef struct VkIndirectCommandsLayoutCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkIndirectCommandsLayoutUsageFlagsNV flags; - VkPipelineBindPoint pipelineBindPoint; - uint32_t tokenCount; - const VkIndirectCommandsLayoutTokenNV* pTokens; - uint32_t streamCount; - const uint32_t* pStreamStrides; -} VkIndirectCommandsLayoutCreateInfoNV; +typedef struct VkCopyMemoryToImageIndirectCommandNV { + VkDeviceAddress srcAddress; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkCopyMemoryToImageIndirectCommandNV; -typedef struct VkGeneratedCommandsInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineBindPoint pipelineBindPoint; - VkPipeline pipeline; - VkIndirectCommandsLayoutNV indirectCommandsLayout; - uint32_t streamCount; - const VkIndirectCommandsStreamNV* pStreams; - uint32_t sequencesCount; - VkBuffer preprocessBuffer; - VkDeviceSize preprocessOffset; - VkDeviceSize preprocessSize; - VkBuffer sequencesCountBuffer; - VkDeviceSize sequencesCountOffset; - VkBuffer sequencesIndexBuffer; - VkDeviceSize sequencesIndexOffset; -} VkGeneratedCommandsInfoNV; +typedef struct VkPhysicalDeviceCopyMemoryIndirectFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 indirectCopy; +} VkPhysicalDeviceCopyMemoryIndirectFeaturesNV; -typedef struct VkGeneratedCommandsMemoryRequirementsInfoNV { - VkStructureType sType; - const void* pNext; - VkPipelineBindPoint pipelineBindPoint; - VkPipeline pipeline; - VkIndirectCommandsLayoutNV indirectCommandsLayout; - uint32_t maxSequencesCount; -} VkGeneratedCommandsMemoryRequirementsInfoNV; +typedef struct VkPhysicalDeviceCopyMemoryIndirectPropertiesNV { + VkStructureType sType; + void* pNext; + VkQueueFlags supportedQueues; +} VkPhysicalDeviceCopyMemoryIndirectPropertiesNV; -typedef void (VKAPI_PTR *PFN_vkGetGeneratedCommandsMemoryRequirementsNV)(VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkCmdPreprocessGeneratedCommandsNV)(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); -typedef void (VKAPI_PTR *PFN_vkCmdExecuteGeneratedCommandsNV)(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); -typedef void (VKAPI_PTR *PFN_vkCmdBindPipelineShaderGroupNV)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex); -typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNV)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); -typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNV)(VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryIndirectNV)(VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToImageIndirectNV)(VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, VkImage dstImage, VkImageLayout dstImageLayout, const VkImageSubresourceLayers* pImageSubresources); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetGeneratedCommandsMemoryRequirementsNV( - VkDevice device, - const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, - VkMemoryRequirements2* pMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkCmdPreprocessGeneratedCommandsNV( +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryIndirectNV( VkCommandBuffer commandBuffer, - const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride); -VKAPI_ATTR void VKAPI_CALL vkCmdExecuteGeneratedCommandsNV( +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToImageIndirectNV( VkCommandBuffer commandBuffer, - VkBool32 isPreprocessed, - const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VkImage dstImage, + VkImageLayout dstImageLayout, + const VkImageSubresourceLayers* pImageSubresources); +#endif -VKAPI_ATTR void VKAPI_CALL vkCmdBindPipelineShaderGroupNV( - VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipeline pipeline, - uint32_t groupIndex); -VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNV( - VkDevice device, - const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); +// VK_NV_memory_decompression is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_memory_decompression 1 +#define VK_NV_MEMORY_DECOMPRESSION_SPEC_VERSION 1 +#define VK_NV_MEMORY_DECOMPRESSION_EXTENSION_NAME "VK_NV_memory_decompression" -VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNV( - VkDevice device, - VkIndirectCommandsLayoutNV indirectCommandsLayout, - const VkAllocationCallbacks* pAllocator); -#endif +// Flag bits for VkMemoryDecompressionMethodFlagBitsNV +typedef VkFlags64 VkMemoryDecompressionMethodFlagBitsNV; +static const VkMemoryDecompressionMethodFlagBitsNV VK_MEMORY_DECOMPRESSION_METHOD_GDEFLATE_1_0_BIT_NV = 0x00000001ULL; +typedef VkFlags64 VkMemoryDecompressionMethodFlagsNV; +typedef struct VkDecompressMemoryRegionNV { + VkDeviceAddress srcAddress; + VkDeviceAddress dstAddress; + VkDeviceSize compressedSize; + VkDeviceSize decompressedSize; + VkMemoryDecompressionMethodFlagsNV decompressionMethod; +} VkDecompressMemoryRegionNV; -#define VK_NV_inherited_viewport_scissor 1 -#define VK_NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION 1 -#define VK_NV_INHERITED_VIEWPORT_SCISSOR_EXTENSION_NAME "VK_NV_inherited_viewport_scissor" -typedef struct VkPhysicalDeviceInheritedViewportScissorFeaturesNV { +typedef struct VkPhysicalDeviceMemoryDecompressionFeaturesNV { VkStructureType sType; void* pNext; - VkBool32 inheritedViewportScissor2D; -} VkPhysicalDeviceInheritedViewportScissorFeaturesNV; + VkBool32 memoryDecompression; +} VkPhysicalDeviceMemoryDecompressionFeaturesNV; -typedef struct VkCommandBufferInheritanceViewportScissorInfoNV { - VkStructureType sType; - const void* pNext; - VkBool32 viewportScissor2D; - uint32_t viewportDepthCount; - const VkViewport* pViewportDepths; -} VkCommandBufferInheritanceViewportScissorInfoNV; +typedef struct VkPhysicalDeviceMemoryDecompressionPropertiesNV { + VkStructureType sType; + void* pNext; + VkMemoryDecompressionMethodFlagsNV decompressionMethods; + uint64_t maxDecompressionIndirectCount; +} VkPhysicalDeviceMemoryDecompressionPropertiesNV; +typedef void (VKAPI_PTR *PFN_vkCmdDecompressMemoryNV)(VkCommandBuffer commandBuffer, uint32_t decompressRegionCount, const VkDecompressMemoryRegionNV* pDecompressMemoryRegions); +typedef void (VKAPI_PTR *PFN_vkCmdDecompressMemoryIndirectCountNV)(VkCommandBuffer commandBuffer, VkDeviceAddress indirectCommandsAddress, VkDeviceAddress indirectCommandsCountAddress, uint32_t stride); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDecompressMemoryNV( + VkCommandBuffer commandBuffer, + uint32_t decompressRegionCount, + const VkDecompressMemoryRegionNV* pDecompressMemoryRegions); -#define VK_EXT_texel_buffer_alignment 1 -#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION 1 -#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME "VK_EXT_texel_buffer_alignment" -typedef struct VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT { +VKAPI_ATTR void VKAPI_CALL vkCmdDecompressMemoryIndirectCountNV( + VkCommandBuffer commandBuffer, + VkDeviceAddress indirectCommandsAddress, + VkDeviceAddress indirectCommandsCountAddress, + uint32_t stride); +#endif + + +// VK_NV_device_generated_commands_compute is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_device_generated_commands_compute 1 +#define VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_SPEC_VERSION 2 +#define VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_EXTENSION_NAME "VK_NV_device_generated_commands_compute" +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV { VkStructureType sType; void* pNext; - VkBool32 texelBufferAlignment; -} VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT; + VkBool32 deviceGeneratedCompute; + VkBool32 deviceGeneratedComputePipelines; + VkBool32 deviceGeneratedComputeCaptureReplay; +} VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; -typedef VkPhysicalDeviceTexelBufferAlignmentProperties VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT; +typedef struct VkComputePipelineIndirectBufferInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceAddress deviceAddress; + VkDeviceSize size; + VkDeviceAddress pipelineDeviceAddressCaptureReplay; +} VkComputePipelineIndirectBufferInfoNV; +typedef struct VkPipelineIndirectDeviceAddressInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; +} VkPipelineIndirectDeviceAddressInfoNV; +typedef struct VkBindPipelineIndirectCommandNV { + VkDeviceAddress pipelineAddress; +} VkBindPipelineIndirectCommandNV; -#define VK_QCOM_render_pass_transform 1 -#define VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION 3 -#define VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME "VK_QCOM_render_pass_transform" -typedef struct VkRenderPassTransformBeginInfoQCOM { - VkStructureType sType; - void* pNext; - VkSurfaceTransformFlagBitsKHR transform; -} VkRenderPassTransformBeginInfoQCOM; +typedef void (VKAPI_PTR *PFN_vkGetPipelineIndirectMemoryRequirementsNV)(VkDevice device, const VkComputePipelineCreateInfo* pCreateInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdUpdatePipelineIndirectBufferNV)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetPipelineIndirectDeviceAddressNV)(VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV* pInfo); -typedef struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM { - VkStructureType sType; - void* pNext; - VkSurfaceTransformFlagBitsKHR transform; - VkRect2D renderArea; -} VkCommandBufferInheritanceRenderPassTransformInfoQCOM; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPipelineIndirectMemoryRequirementsNV( + VkDevice device, + const VkComputePipelineCreateInfo* pCreateInfo, + VkMemoryRequirements2* pMemoryRequirements); +VKAPI_ATTR void VKAPI_CALL vkCmdUpdatePipelineIndirectBufferNV( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline); +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetPipelineIndirectDeviceAddressNV( + VkDevice device, + const VkPipelineIndirectDeviceAddressInfoNV* pInfo); +#endif -#define VK_EXT_device_memory_report 1 -#define VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION 2 -#define VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME "VK_EXT_device_memory_report" -typedef enum VkDeviceMemoryReportEventTypeEXT { - VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT = 0, - VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT = 1, - VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT = 2, - VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT = 3, - VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT = 4, - VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDeviceMemoryReportEventTypeEXT; -typedef VkFlags VkDeviceMemoryReportFlagsEXT; -typedef struct VkPhysicalDeviceDeviceMemoryReportFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 deviceMemoryReport; -} VkPhysicalDeviceDeviceMemoryReportFeaturesEXT; +// VK_NV_ray_tracing_linear_swept_spheres is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_ray_tracing_linear_swept_spheres 1 +#define VK_NV_RAY_TRACING_LINEAR_SWEPT_SPHERES_SPEC_VERSION 1 +#define VK_NV_RAY_TRACING_LINEAR_SWEPT_SPHERES_EXTENSION_NAME "VK_NV_ray_tracing_linear_swept_spheres" -typedef struct VkDeviceMemoryReportCallbackDataEXT { - VkStructureType sType; - void* pNext; - VkDeviceMemoryReportFlagsEXT flags; - VkDeviceMemoryReportEventTypeEXT type; - uint64_t memoryObjectId; - VkDeviceSize size; - VkObjectType objectType; - uint64_t objectHandle; - uint32_t heapIndex; -} VkDeviceMemoryReportCallbackDataEXT; +typedef enum VkRayTracingLssIndexingModeNV { + VK_RAY_TRACING_LSS_INDEXING_MODE_LIST_NV = 0, + VK_RAY_TRACING_LSS_INDEXING_MODE_SUCCESSIVE_NV = 1, + VK_RAY_TRACING_LSS_INDEXING_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkRayTracingLssIndexingModeNV; -typedef void (VKAPI_PTR *PFN_vkDeviceMemoryReportCallbackEXT)( - const VkDeviceMemoryReportCallbackDataEXT* pCallbackData, - void* pUserData); +typedef enum VkRayTracingLssPrimitiveEndCapsModeNV { + VK_RAY_TRACING_LSS_PRIMITIVE_END_CAPS_MODE_NONE_NV = 0, + VK_RAY_TRACING_LSS_PRIMITIVE_END_CAPS_MODE_CHAINED_NV = 1, + VK_RAY_TRACING_LSS_PRIMITIVE_END_CAPS_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkRayTracingLssPrimitiveEndCapsModeNV; +typedef struct VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 spheres; + VkBool32 linearSweptSpheres; +} VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; -typedef struct VkDeviceDeviceMemoryReportCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkDeviceMemoryReportFlagsEXT flags; - PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback; - void* pUserData; -} VkDeviceDeviceMemoryReportCreateInfoEXT; +typedef struct VkAccelerationStructureGeometryLinearSweptSpheresDataNV { + VkStructureType sType; + const void* pNext; + VkFormat vertexFormat; + VkDeviceOrHostAddressConstKHR vertexData; + VkDeviceSize vertexStride; + VkFormat radiusFormat; + VkDeviceOrHostAddressConstKHR radiusData; + VkDeviceSize radiusStride; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexData; + VkDeviceSize indexStride; + VkRayTracingLssIndexingModeNV indexingMode; + VkRayTracingLssPrimitiveEndCapsModeNV endCapsMode; +} VkAccelerationStructureGeometryLinearSweptSpheresDataNV; + +typedef struct VkAccelerationStructureGeometrySpheresDataNV { + VkStructureType sType; + const void* pNext; + VkFormat vertexFormat; + VkDeviceOrHostAddressConstKHR vertexData; + VkDeviceSize vertexStride; + VkFormat radiusFormat; + VkDeviceOrHostAddressConstKHR radiusData; + VkDeviceSize radiusStride; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexData; + VkDeviceSize indexStride; +} VkAccelerationStructureGeometrySpheresDataNV; -#define VK_EXT_acquire_drm_display 1 -#define VK_EXT_ACQUIRE_DRM_DISPLAY_SPEC_VERSION 1 -#define VK_EXT_ACQUIRE_DRM_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_drm_display" -typedef VkResult (VKAPI_PTR *PFN_vkAcquireDrmDisplayEXT)(VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display); -typedef VkResult (VKAPI_PTR *PFN_vkGetDrmDisplayEXT)(VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR* display); +// VK_NV_linear_color_attachment is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_linear_color_attachment 1 +#define VK_NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION 1 +#define VK_NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME "VK_NV_linear_color_attachment" +typedef struct VkPhysicalDeviceLinearColorAttachmentFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 linearColorAttachment; +} VkPhysicalDeviceLinearColorAttachmentFeaturesNV; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkAcquireDrmDisplayEXT( - VkPhysicalDevice physicalDevice, - int32_t drmFd, - VkDisplayKHR display); -VKAPI_ATTR VkResult VKAPI_CALL vkGetDrmDisplayEXT( - VkPhysicalDevice physicalDevice, - int32_t drmFd, - uint32_t connectorId, - VkDisplayKHR* display); -#endif +// VK_GOOGLE_surfaceless_query is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_surfaceless_query 1 +#define VK_GOOGLE_SURFACELESS_QUERY_SPEC_VERSION 2 +#define VK_GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME "VK_GOOGLE_surfaceless_query" -#define VK_EXT_robustness2 1 -#define VK_EXT_ROBUSTNESS_2_SPEC_VERSION 1 -#define VK_EXT_ROBUSTNESS_2_EXTENSION_NAME "VK_EXT_robustness2" -typedef struct VkPhysicalDeviceRobustness2FeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 robustBufferAccess2; - VkBool32 robustImageAccess2; - VkBool32 nullDescriptor; -} VkPhysicalDeviceRobustness2FeaturesEXT; -typedef struct VkPhysicalDeviceRobustness2PropertiesEXT { +// VK_EXT_image_compression_control_swapchain is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_compression_control_swapchain 1 +#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_SPEC_VERSION 1 +#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_EXTENSION_NAME "VK_EXT_image_compression_control_swapchain" +typedef struct VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT { VkStructureType sType; void* pNext; - VkDeviceSize robustStorageBufferAccessSizeAlignment; - VkDeviceSize robustUniformBufferAccessSizeAlignment; -} VkPhysicalDeviceRobustness2PropertiesEXT; + VkBool32 imageCompressionControlSwapchain; +} VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; -#define VK_EXT_custom_border_color 1 -#define VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION 12 -#define VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME "VK_EXT_custom_border_color" -typedef struct VkSamplerCustomBorderColorCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkClearColorValue customBorderColor; - VkFormat format; -} VkSamplerCustomBorderColorCreateInfoEXT; +// VK_QCOM_image_processing is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_image_processing 1 +#define VK_QCOM_IMAGE_PROCESSING_SPEC_VERSION 1 +#define VK_QCOM_IMAGE_PROCESSING_EXTENSION_NAME "VK_QCOM_image_processing" +typedef struct VkImageViewSampleWeightCreateInfoQCOM { + VkStructureType sType; + const void* pNext; + VkOffset2D filterCenter; + VkExtent2D filterSize; + uint32_t numPhases; +} VkImageViewSampleWeightCreateInfoQCOM; -typedef struct VkPhysicalDeviceCustomBorderColorPropertiesEXT { +typedef struct VkPhysicalDeviceImageProcessingFeaturesQCOM { VkStructureType sType; void* pNext; - uint32_t maxCustomBorderColorSamplers; -} VkPhysicalDeviceCustomBorderColorPropertiesEXT; + VkBool32 textureSampleWeighted; + VkBool32 textureBoxFilter; + VkBool32 textureBlockMatch; +} VkPhysicalDeviceImageProcessingFeaturesQCOM; -typedef struct VkPhysicalDeviceCustomBorderColorFeaturesEXT { +typedef struct VkPhysicalDeviceImageProcessingPropertiesQCOM { VkStructureType sType; void* pNext; - VkBool32 customBorderColors; - VkBool32 customBorderColorWithoutFormat; -} VkPhysicalDeviceCustomBorderColorFeaturesEXT; + uint32_t maxWeightFilterPhases; + VkExtent2D maxWeightFilterDimension; + VkExtent2D maxBlockMatchRegion; + VkExtent2D maxBoxFilterBlockSize; +} VkPhysicalDeviceImageProcessingPropertiesQCOM; -#define VK_GOOGLE_user_type 1 -#define VK_GOOGLE_USER_TYPE_SPEC_VERSION 1 -#define VK_GOOGLE_USER_TYPE_EXTENSION_NAME "VK_GOOGLE_user_type" +// VK_EXT_nested_command_buffer is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_nested_command_buffer 1 +#define VK_EXT_NESTED_COMMAND_BUFFER_SPEC_VERSION 1 +#define VK_EXT_NESTED_COMMAND_BUFFER_EXTENSION_NAME "VK_EXT_nested_command_buffer" +typedef struct VkPhysicalDeviceNestedCommandBufferFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 nestedCommandBuffer; + VkBool32 nestedCommandBufferRendering; + VkBool32 nestedCommandBufferSimultaneousUse; +} VkPhysicalDeviceNestedCommandBufferFeaturesEXT; +typedef struct VkPhysicalDeviceNestedCommandBufferPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxCommandBufferNestingLevel; +} VkPhysicalDeviceNestedCommandBufferPropertiesEXT; -#define VK_EXT_private_data 1 -typedef VkPrivateDataSlot VkPrivateDataSlotEXT; -#define VK_EXT_PRIVATE_DATA_SPEC_VERSION 1 -#define VK_EXT_PRIVATE_DATA_EXTENSION_NAME "VK_EXT_private_data" -typedef VkPrivateDataSlotCreateFlags VkPrivateDataSlotCreateFlagsEXT; -typedef VkPhysicalDevicePrivateDataFeatures VkPhysicalDevicePrivateDataFeaturesEXT; +// VK_EXT_external_memory_acquire_unmodified is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_external_memory_acquire_unmodified 1 +#define VK_EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXTENSION_NAME "VK_EXT_external_memory_acquire_unmodified" +typedef struct VkExternalMemoryAcquireUnmodifiedEXT { + VkStructureType sType; + const void* pNext; + VkBool32 acquireUnmodifiedMemory; +} VkExternalMemoryAcquireUnmodifiedEXT; -typedef VkDevicePrivateDataCreateInfo VkDevicePrivateDataCreateInfoEXT; -typedef VkPrivateDataSlotCreateInfo VkPrivateDataSlotCreateInfoEXT; -typedef VkResult (VKAPI_PTR *PFN_vkCreatePrivateDataSlotEXT)(VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlot* pPrivateDataSlot); -typedef void (VKAPI_PTR *PFN_vkDestroyPrivateDataSlotEXT)(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data); -typedef void (VKAPI_PTR *PFN_vkGetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t* pData); +// VK_EXT_extended_dynamic_state3 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_extended_dynamic_state3 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_3_SPEC_VERSION 2 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_3_EXTENSION_NAME "VK_EXT_extended_dynamic_state3" +typedef struct VkPhysicalDeviceExtendedDynamicState3FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 extendedDynamicState3TessellationDomainOrigin; + VkBool32 extendedDynamicState3DepthClampEnable; + VkBool32 extendedDynamicState3PolygonMode; + VkBool32 extendedDynamicState3RasterizationSamples; + VkBool32 extendedDynamicState3SampleMask; + VkBool32 extendedDynamicState3AlphaToCoverageEnable; + VkBool32 extendedDynamicState3AlphaToOneEnable; + VkBool32 extendedDynamicState3LogicOpEnable; + VkBool32 extendedDynamicState3ColorBlendEnable; + VkBool32 extendedDynamicState3ColorBlendEquation; + VkBool32 extendedDynamicState3ColorWriteMask; + VkBool32 extendedDynamicState3RasterizationStream; + VkBool32 extendedDynamicState3ConservativeRasterizationMode; + VkBool32 extendedDynamicState3ExtraPrimitiveOverestimationSize; + VkBool32 extendedDynamicState3DepthClipEnable; + VkBool32 extendedDynamicState3SampleLocationsEnable; + VkBool32 extendedDynamicState3ColorBlendAdvanced; + VkBool32 extendedDynamicState3ProvokingVertexMode; + VkBool32 extendedDynamicState3LineRasterizationMode; + VkBool32 extendedDynamicState3LineStippleEnable; + VkBool32 extendedDynamicState3DepthClipNegativeOneToOne; + VkBool32 extendedDynamicState3ViewportWScalingEnable; + VkBool32 extendedDynamicState3ViewportSwizzle; + VkBool32 extendedDynamicState3CoverageToColorEnable; + VkBool32 extendedDynamicState3CoverageToColorLocation; + VkBool32 extendedDynamicState3CoverageModulationMode; + VkBool32 extendedDynamicState3CoverageModulationTableEnable; + VkBool32 extendedDynamicState3CoverageModulationTable; + VkBool32 extendedDynamicState3CoverageReductionMode; + VkBool32 extendedDynamicState3RepresentativeFragmentTestEnable; + VkBool32 extendedDynamicState3ShadingRateImageEnable; +} VkPhysicalDeviceExtendedDynamicState3FeaturesEXT; + +typedef struct VkPhysicalDeviceExtendedDynamicState3PropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 dynamicPrimitiveTopologyUnrestricted; +} VkPhysicalDeviceExtendedDynamicState3PropertiesEXT; + +typedef struct VkColorBlendEquationEXT { + VkBlendFactor srcColorBlendFactor; + VkBlendFactor dstColorBlendFactor; + VkBlendOp colorBlendOp; + VkBlendFactor srcAlphaBlendFactor; + VkBlendFactor dstAlphaBlendFactor; + VkBlendOp alphaBlendOp; +} VkColorBlendEquationEXT; + +typedef struct VkColorBlendAdvancedEXT { + VkBlendOp advancedBlendOp; + VkBool32 srcPremultiplied; + VkBool32 dstPremultiplied; + VkBlendOverlapEXT blendOverlap; + VkBool32 clampResults; +} VkColorBlendAdvancedEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClampEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthClampEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetPolygonModeEXT)(VkCommandBuffer commandBuffer, VkPolygonMode polygonMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizationSamplesEXT)(VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples); +typedef void (VKAPI_PTR *PFN_vkCmdSetSampleMaskEXT)(VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask* pSampleMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetAlphaToCoverageEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetAlphaToOneEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 logicOpEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendEnableEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkBool32* pColorBlendEnables); +typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendEquationEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendEquationEXT* pColorBlendEquations); +typedef void (VKAPI_PTR *PFN_vkCmdSetColorWriteMaskEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorComponentFlags* pColorWriteMasks); +typedef void (VKAPI_PTR *PFN_vkCmdSetTessellationDomainOriginEXT)(VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizationStreamEXT)(VkCommandBuffer commandBuffer, uint32_t rasterizationStream); +typedef void (VKAPI_PTR *PFN_vkCmdSetConservativeRasterizationModeEXT)(VkCommandBuffer commandBuffer, VkConservativeRasterizationModeEXT conservativeRasterizationMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT)(VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClipEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthClipEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendAdvancedEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendAdvancedEXT* pColorBlendAdvanced); +typedef void (VKAPI_PTR *PFN_vkCmdSetProvokingVertexModeEXT)(VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetLineRasterizationModeEXT)(VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClipNegativeOneToOneEXT)(VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingEnableNV)(VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportSwizzleNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportSwizzleNV* pViewportSwizzles); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageToColorEnableNV)(VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageToColorLocationNV)(VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationModeNV)(VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationTableEnableNV)(VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationTableNV)(VkCommandBuffer commandBuffer, uint32_t coverageModulationTableCount, const float* pCoverageModulationTable); +typedef void (VKAPI_PTR *PFN_vkCmdSetShadingRateImageEnableNV)(VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetRepresentativeFragmentTestEnableNV)(VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageReductionModeNV)(VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlotEXT( - VkDevice device, - const VkPrivateDataSlotCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkPrivateDataSlot* pPrivateDataSlot); +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClampEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthClampEnable); -VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlotEXT( - VkDevice device, - VkPrivateDataSlot privateDataSlot, - const VkAllocationCallbacks* pAllocator); +VKAPI_ATTR void VKAPI_CALL vkCmdSetPolygonModeEXT( + VkCommandBuffer commandBuffer, + VkPolygonMode polygonMode); -VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateDataEXT( - VkDevice device, - VkObjectType objectType, - uint64_t objectHandle, - VkPrivateDataSlot privateDataSlot, - uint64_t data); +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizationSamplesEXT( + VkCommandBuffer commandBuffer, + VkSampleCountFlagBits rasterizationSamples); -VKAPI_ATTR void VKAPI_CALL vkGetPrivateDataEXT( - VkDevice device, - VkObjectType objectType, - uint64_t objectHandle, - VkPrivateDataSlot privateDataSlot, - uint64_t* pData); -#endif +VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleMaskEXT( + VkCommandBuffer commandBuffer, + VkSampleCountFlagBits samples, + const VkSampleMask* pSampleMask); +VKAPI_ATTR void VKAPI_CALL vkCmdSetAlphaToCoverageEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 alphaToCoverageEnable); -#define VK_EXT_pipeline_creation_cache_control 1 -#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION 3 -#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME "VK_EXT_pipeline_creation_cache_control" -typedef VkPhysicalDevicePipelineCreationCacheControlFeatures VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdSetAlphaToOneEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 alphaToOneEnable); +VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 logicOpEnable); +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendEnableEXT( + VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkBool32* pColorBlendEnables); -#define VK_NV_device_diagnostics_config 1 -#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION 2 -#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME "VK_NV_device_diagnostics_config" +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendEquationEXT( + VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendEquationEXT* pColorBlendEquations); -typedef enum VkDeviceDiagnosticsConfigFlagBitsNV { - VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV = 0x00000001, - VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV = 0x00000002, - VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV = 0x00000004, - VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV = 0x00000008, - VK_DEVICE_DIAGNOSTICS_CONFIG_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkDeviceDiagnosticsConfigFlagBitsNV; -typedef VkFlags VkDeviceDiagnosticsConfigFlagsNV; -typedef struct VkPhysicalDeviceDiagnosticsConfigFeaturesNV { - VkStructureType sType; - void* pNext; - VkBool32 diagnosticsConfig; -} VkPhysicalDeviceDiagnosticsConfigFeaturesNV; +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWriteMaskEXT( + VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorComponentFlags* pColorWriteMasks); -typedef struct VkDeviceDiagnosticsConfigCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkDeviceDiagnosticsConfigFlagsNV flags; -} VkDeviceDiagnosticsConfigCreateInfoNV; +VKAPI_ATTR void VKAPI_CALL vkCmdSetTessellationDomainOriginEXT( + VkCommandBuffer commandBuffer, + VkTessellationDomainOrigin domainOrigin); +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizationStreamEXT( + VkCommandBuffer commandBuffer, + uint32_t rasterizationStream); +VKAPI_ATTR void VKAPI_CALL vkCmdSetConservativeRasterizationModeEXT( + VkCommandBuffer commandBuffer, + VkConservativeRasterizationModeEXT conservativeRasterizationMode); -#define VK_QCOM_render_pass_store_ops 1 -#define VK_QCOM_RENDER_PASS_STORE_OPS_SPEC_VERSION 2 -#define VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME "VK_QCOM_render_pass_store_ops" +VKAPI_ATTR void VKAPI_CALL vkCmdSetExtraPrimitiveOverestimationSizeEXT( + VkCommandBuffer commandBuffer, + float extraPrimitiveOverestimationSize); +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClipEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthClipEnable); -#define VK_EXT_graphics_pipeline_library 1 -#define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_SPEC_VERSION 1 -#define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME "VK_EXT_graphics_pipeline_library" +VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 sampleLocationsEnable); -typedef enum VkGraphicsPipelineLibraryFlagBitsEXT { - VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT = 0x00000001, - VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT = 0x00000002, - VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT = 0x00000004, - VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT = 0x00000008, - VK_GRAPHICS_PIPELINE_LIBRARY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkGraphicsPipelineLibraryFlagBitsEXT; -typedef VkFlags VkGraphicsPipelineLibraryFlagsEXT; -typedef struct VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 graphicsPipelineLibrary; -} VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendAdvancedEXT( + VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendAdvancedEXT* pColorBlendAdvanced); -typedef struct VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT { - VkStructureType sType; - void* pNext; - VkBool32 graphicsPipelineLibraryFastLinking; - VkBool32 graphicsPipelineLibraryIndependentInterpolationDecoration; -} VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdSetProvokingVertexModeEXT( + VkCommandBuffer commandBuffer, + VkProvokingVertexModeEXT provokingVertexMode); -typedef struct VkGraphicsPipelineLibraryCreateInfoEXT { - VkStructureType sType; - void* pNext; - VkGraphicsPipelineLibraryFlagsEXT flags; -} VkGraphicsPipelineLibraryCreateInfoEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineRasterizationModeEXT( + VkCommandBuffer commandBuffer, + VkLineRasterizationModeEXT lineRasterizationMode); +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 stippledLineEnable); +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClipNegativeOneToOneEXT( + VkCommandBuffer commandBuffer, + VkBool32 negativeOneToOne); -#define VK_AMD_shader_early_and_late_fragment_tests 1 -#define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_SPEC_VERSION 1 -#define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_EXTENSION_NAME "VK_AMD_shader_early_and_late_fragment_tests" -typedef struct VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD { - VkStructureType sType; - void* pNext; - VkBool32 shaderEarlyAndLateFragmentTests; -} VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 viewportWScalingEnable); +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportSwizzleNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportSwizzleNV* pViewportSwizzles); +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageToColorEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 coverageToColorEnable); -#define VK_NV_fragment_shading_rate_enums 1 -#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION 1 -#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME "VK_NV_fragment_shading_rate_enums" +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageToColorLocationNV( + VkCommandBuffer commandBuffer, + uint32_t coverageToColorLocation); -typedef enum VkFragmentShadingRateTypeNV { - VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV = 0, - VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV = 1, - VK_FRAGMENT_SHADING_RATE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkFragmentShadingRateTypeNV; +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationModeNV( + VkCommandBuffer commandBuffer, + VkCoverageModulationModeNV coverageModulationMode); -typedef enum VkFragmentShadingRateNV { - VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV = 0, - VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV = 1, - VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV = 4, - VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV = 5, - VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV = 6, - VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV = 9, - VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV = 10, - VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV = 11, - VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV = 12, - VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV = 13, - VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV = 14, - VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV = 15, - VK_FRAGMENT_SHADING_RATE_MAX_ENUM_NV = 0x7FFFFFFF -} VkFragmentShadingRateNV; -typedef struct VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV { - VkStructureType sType; - void* pNext; - VkBool32 fragmentShadingRateEnums; - VkBool32 supersampleFragmentShadingRates; - VkBool32 noInvocationFragmentShadingRates; -} VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV; +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationTableEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 coverageModulationTableEnable); -typedef struct VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV { - VkStructureType sType; - void* pNext; - VkSampleCountFlagBits maxFragmentShadingRateInvocationCount; -} VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV; +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationTableNV( + VkCommandBuffer commandBuffer, + uint32_t coverageModulationTableCount, + const float* pCoverageModulationTable); -typedef struct VkPipelineFragmentShadingRateEnumStateCreateInfoNV { - VkStructureType sType; - const void* pNext; - VkFragmentShadingRateTypeNV shadingRateType; - VkFragmentShadingRateNV shadingRate; - VkFragmentShadingRateCombinerOpKHR combinerOps[2]; -} VkPipelineFragmentShadingRateEnumStateCreateInfoNV; +VKAPI_ATTR void VKAPI_CALL vkCmdSetShadingRateImageEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 shadingRateImageEnable); -typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateEnumNV)(VkCommandBuffer commandBuffer, VkFragmentShadingRateNV shadingRate, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); +VKAPI_ATTR void VKAPI_CALL vkCmdSetRepresentativeFragmentTestEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 representativeFragmentTestEnable); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateEnumNV( +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageReductionModeNV( VkCommandBuffer commandBuffer, - VkFragmentShadingRateNV shadingRate, - const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); + VkCoverageReductionModeNV coverageReductionMode); #endif -#define VK_NV_ray_tracing_motion_blur 1 -#define VK_NV_RAY_TRACING_MOTION_BLUR_SPEC_VERSION 1 -#define VK_NV_RAY_TRACING_MOTION_BLUR_EXTENSION_NAME "VK_NV_ray_tracing_motion_blur" +// VK_EXT_subpass_merge_feedback is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_subpass_merge_feedback 1 +#define VK_EXT_SUBPASS_MERGE_FEEDBACK_SPEC_VERSION 2 +#define VK_EXT_SUBPASS_MERGE_FEEDBACK_EXTENSION_NAME "VK_EXT_subpass_merge_feedback" -typedef enum VkAccelerationStructureMotionInstanceTypeNV { - VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_STATIC_NV = 0, - VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MATRIX_MOTION_NV = 1, - VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV = 2, - VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkAccelerationStructureMotionInstanceTypeNV; -typedef VkFlags VkAccelerationStructureMotionInfoFlagsNV; -typedef VkFlags VkAccelerationStructureMotionInstanceFlagsNV; -typedef union VkDeviceOrHostAddressConstKHR { - VkDeviceAddress deviceAddress; - const void* hostAddress; -} VkDeviceOrHostAddressConstKHR; +typedef enum VkSubpassMergeStatusEXT { + VK_SUBPASS_MERGE_STATUS_MERGED_EXT = 0, + VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT = 1, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT = 2, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT = 3, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT = 4, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT = 5, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT = 6, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT = 7, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT = 8, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT = 9, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT = 10, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT = 11, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT = 12, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT = 13, + VK_SUBPASS_MERGE_STATUS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkSubpassMergeStatusEXT; +typedef struct VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 subpassMergeFeedback; +} VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT; -typedef struct VkAccelerationStructureGeometryMotionTrianglesDataNV { - VkStructureType sType; - const void* pNext; - VkDeviceOrHostAddressConstKHR vertexData; -} VkAccelerationStructureGeometryMotionTrianglesDataNV; +typedef struct VkRenderPassCreationControlEXT { + VkStructureType sType; + const void* pNext; + VkBool32 disallowMerging; +} VkRenderPassCreationControlEXT; -typedef struct VkAccelerationStructureMotionInfoNV { - VkStructureType sType; - const void* pNext; - uint32_t maxInstances; - VkAccelerationStructureMotionInfoFlagsNV flags; -} VkAccelerationStructureMotionInfoNV; +typedef struct VkRenderPassCreationFeedbackInfoEXT { + uint32_t postMergeSubpassCount; +} VkRenderPassCreationFeedbackInfoEXT; -typedef struct VkAccelerationStructureMatrixMotionInstanceNV { - VkTransformMatrixKHR transformT0; - VkTransformMatrixKHR transformT1; - uint32_t instanceCustomIndex:24; - uint32_t mask:8; - uint32_t instanceShaderBindingTableRecordOffset:24; - VkGeometryInstanceFlagsKHR flags:8; - uint64_t accelerationStructureReference; -} VkAccelerationStructureMatrixMotionInstanceNV; +typedef struct VkRenderPassCreationFeedbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkRenderPassCreationFeedbackInfoEXT* pRenderPassFeedback; +} VkRenderPassCreationFeedbackCreateInfoEXT; -typedef struct VkSRTDataNV { - float sx; - float a; - float b; - float pvx; - float sy; - float c; - float pvy; - float sz; - float pvz; - float qx; - float qy; - float qz; - float qw; - float tx; - float ty; - float tz; -} VkSRTDataNV; +typedef struct VkRenderPassSubpassFeedbackInfoEXT { + VkSubpassMergeStatusEXT subpassMergeStatus; + char description[VK_MAX_DESCRIPTION_SIZE]; + uint32_t postMergeIndex; +} VkRenderPassSubpassFeedbackInfoEXT; -typedef struct VkAccelerationStructureSRTMotionInstanceNV { - VkSRTDataNV transformT0; - VkSRTDataNV transformT1; - uint32_t instanceCustomIndex:24; - uint32_t mask:8; - uint32_t instanceShaderBindingTableRecordOffset:24; - VkGeometryInstanceFlagsKHR flags:8; - uint64_t accelerationStructureReference; -} VkAccelerationStructureSRTMotionInstanceNV; +typedef struct VkRenderPassSubpassFeedbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkRenderPassSubpassFeedbackInfoEXT* pSubpassFeedback; +} VkRenderPassSubpassFeedbackCreateInfoEXT; -typedef union VkAccelerationStructureMotionInstanceDataNV { - VkAccelerationStructureInstanceKHR staticInstance; - VkAccelerationStructureMatrixMotionInstanceNV matrixMotionInstance; - VkAccelerationStructureSRTMotionInstanceNV srtMotionInstance; -} VkAccelerationStructureMotionInstanceDataNV; -typedef struct VkAccelerationStructureMotionInstanceNV { - VkAccelerationStructureMotionInstanceTypeNV type; - VkAccelerationStructureMotionInstanceFlagsNV flags; - VkAccelerationStructureMotionInstanceDataNV data; -} VkAccelerationStructureMotionInstanceNV; -typedef struct VkPhysicalDeviceRayTracingMotionBlurFeaturesNV { - VkStructureType sType; - void* pNext; - VkBool32 rayTracingMotionBlur; - VkBool32 rayTracingMotionBlurPipelineTraceRaysIndirect; -} VkPhysicalDeviceRayTracingMotionBlurFeaturesNV; +// VK_LUNARG_direct_driver_loading is a preprocessor guard. Do not pass it to API calls. +#define VK_LUNARG_direct_driver_loading 1 +#define VK_LUNARG_DIRECT_DRIVER_LOADING_SPEC_VERSION 1 +#define VK_LUNARG_DIRECT_DRIVER_LOADING_EXTENSION_NAME "VK_LUNARG_direct_driver_loading" +typedef enum VkDirectDriverLoadingModeLUNARG { + VK_DIRECT_DRIVER_LOADING_MODE_EXCLUSIVE_LUNARG = 0, + VK_DIRECT_DRIVER_LOADING_MODE_INCLUSIVE_LUNARG = 1, + VK_DIRECT_DRIVER_LOADING_MODE_MAX_ENUM_LUNARG = 0x7FFFFFFF +} VkDirectDriverLoadingModeLUNARG; +typedef VkFlags VkDirectDriverLoadingFlagsLUNARG; +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddrLUNARG)( + VkInstance instance, const char* pName); +typedef struct VkDirectDriverLoadingInfoLUNARG { + VkStructureType sType; + void* pNext; + VkDirectDriverLoadingFlagsLUNARG flags; + PFN_vkGetInstanceProcAddrLUNARG pfnGetInstanceProcAddr; +} VkDirectDriverLoadingInfoLUNARG; -#define VK_EXT_ycbcr_2plane_444_formats 1 -#define VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION 1 -#define VK_EXT_YCBCR_2PLANE_444_FORMATS_EXTENSION_NAME "VK_EXT_ycbcr_2plane_444_formats" -typedef struct VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT { +typedef struct VkDirectDriverLoadingListLUNARG { + VkStructureType sType; + const void* pNext; + VkDirectDriverLoadingModeLUNARG mode; + uint32_t driverCount; + const VkDirectDriverLoadingInfoLUNARG* pDrivers; +} VkDirectDriverLoadingListLUNARG; + + + +// VK_EXT_shader_module_identifier is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_module_identifier 1 +#define VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT 32U +#define VK_EXT_SHADER_MODULE_IDENTIFIER_SPEC_VERSION 1 +#define VK_EXT_SHADER_MODULE_IDENTIFIER_EXTENSION_NAME "VK_EXT_shader_module_identifier" +typedef struct VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 ycbcr2plane444Formats; -} VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; - - + VkBool32 shaderModuleIdentifier; +} VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT; -#define VK_EXT_fragment_density_map2 1 -#define VK_EXT_FRAGMENT_DENSITY_MAP_2_SPEC_VERSION 1 -#define VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME "VK_EXT_fragment_density_map2" -typedef struct VkPhysicalDeviceFragmentDensityMap2FeaturesEXT { +typedef struct VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT { VkStructureType sType; void* pNext; - VkBool32 fragmentDensityMapDeferred; -} VkPhysicalDeviceFragmentDensityMap2FeaturesEXT; + uint8_t shaderModuleIdentifierAlgorithmUUID[VK_UUID_SIZE]; +} VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT; -typedef struct VkPhysicalDeviceFragmentDensityMap2PropertiesEXT { +typedef struct VkPipelineShaderStageModuleIdentifierCreateInfoEXT { VkStructureType sType; - void* pNext; - VkBool32 subsampledLoads; - VkBool32 subsampledCoarseReconstructionEarlyAccess; - uint32_t maxSubsampledArrayLayers; - uint32_t maxDescriptorSetSubsampledSamplers; -} VkPhysicalDeviceFragmentDensityMap2PropertiesEXT; + const void* pNext; + uint32_t identifierSize; + const uint8_t* pIdentifier; +} VkPipelineShaderStageModuleIdentifierCreateInfoEXT; +typedef struct VkShaderModuleIdentifierEXT { + VkStructureType sType; + void* pNext; + uint32_t identifierSize; + uint8_t identifier[VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT]; +} VkShaderModuleIdentifierEXT; +typedef void (VKAPI_PTR *PFN_vkGetShaderModuleIdentifierEXT)(VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT* pIdentifier); +typedef void (VKAPI_PTR *PFN_vkGetShaderModuleCreateInfoIdentifierEXT)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, VkShaderModuleIdentifierEXT* pIdentifier); -#define VK_QCOM_rotated_copy_commands 1 -#define VK_QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION 1 -#define VK_QCOM_ROTATED_COPY_COMMANDS_EXTENSION_NAME "VK_QCOM_rotated_copy_commands" -typedef struct VkCopyCommandTransformInfoQCOM { - VkStructureType sType; - const void* pNext; - VkSurfaceTransformFlagBitsKHR transform; -} VkCopyCommandTransformInfoQCOM; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleIdentifierEXT( + VkDevice device, + VkShaderModule shaderModule, + VkShaderModuleIdentifierEXT* pIdentifier); +VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleCreateInfoIdentifierEXT( + VkDevice device, + const VkShaderModuleCreateInfo* pCreateInfo, + VkShaderModuleIdentifierEXT* pIdentifier); +#endif -#define VK_EXT_image_robustness 1 -#define VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION 1 -#define VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_image_robustness" -typedef VkPhysicalDeviceImageRobustnessFeatures VkPhysicalDeviceImageRobustnessFeaturesEXT; +// VK_EXT_rasterization_order_attachment_access is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_rasterization_order_attachment_access 1 +#define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1 +#define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_EXT_rasterization_order_attachment_access" +// VK_NV_optical_flow is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_optical_flow 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkOpticalFlowSessionNV) +#define VK_NV_OPTICAL_FLOW_SPEC_VERSION 1 +#define VK_NV_OPTICAL_FLOW_EXTENSION_NAME "VK_NV_optical_flow" + +typedef enum VkOpticalFlowPerformanceLevelNV { + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV = 0, + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV = 1, + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV = 2, + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV = 3, + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowPerformanceLevelNV; + +typedef enum VkOpticalFlowSessionBindingPointNV { + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV = 0, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV = 1, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV = 2, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV = 3, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV = 4, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV = 5, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV = 6, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV = 7, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV = 8, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowSessionBindingPointNV; + +typedef enum VkOpticalFlowGridSizeFlagBitsNV { + VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV = 0, + VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV = 0x00000001, + VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV = 0x00000002, + VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV = 0x00000004, + VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV = 0x00000008, + VK_OPTICAL_FLOW_GRID_SIZE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowGridSizeFlagBitsNV; +typedef VkFlags VkOpticalFlowGridSizeFlagsNV; + +typedef enum VkOpticalFlowUsageFlagBitsNV { + VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV = 0, + VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV = 0x00000001, + VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV = 0x00000002, + VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV = 0x00000004, + VK_OPTICAL_FLOW_USAGE_COST_BIT_NV = 0x00000008, + VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV = 0x00000010, + VK_OPTICAL_FLOW_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowUsageFlagBitsNV; +typedef VkFlags VkOpticalFlowUsageFlagsNV; + +typedef enum VkOpticalFlowSessionCreateFlagBitsNV { + VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV = 0x00000001, + VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV = 0x00000002, + VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV = 0x00000004, + VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV = 0x00000008, + VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV = 0x00000010, + VK_OPTICAL_FLOW_SESSION_CREATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowSessionCreateFlagBitsNV; +typedef VkFlags VkOpticalFlowSessionCreateFlagsNV; + +typedef enum VkOpticalFlowExecuteFlagBitsNV { + VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV = 0x00000001, + VK_OPTICAL_FLOW_EXECUTE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowExecuteFlagBitsNV; +typedef VkFlags VkOpticalFlowExecuteFlagsNV; +typedef struct VkPhysicalDeviceOpticalFlowFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 opticalFlow; +} VkPhysicalDeviceOpticalFlowFeaturesNV; -#define VK_EXT_image_compression_control 1 -#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SPEC_VERSION 1 -#define VK_EXT_IMAGE_COMPRESSION_CONTROL_EXTENSION_NAME "VK_EXT_image_compression_control" +typedef struct VkPhysicalDeviceOpticalFlowPropertiesNV { + VkStructureType sType; + void* pNext; + VkOpticalFlowGridSizeFlagsNV supportedOutputGridSizes; + VkOpticalFlowGridSizeFlagsNV supportedHintGridSizes; + VkBool32 hintSupported; + VkBool32 costSupported; + VkBool32 bidirectionalFlowSupported; + VkBool32 globalFlowSupported; + uint32_t minWidth; + uint32_t minHeight; + uint32_t maxWidth; + uint32_t maxHeight; + uint32_t maxNumRegionsOfInterest; +} VkPhysicalDeviceOpticalFlowPropertiesNV; + +typedef struct VkOpticalFlowImageFormatInfoNV { + VkStructureType sType; + const void* pNext; + VkOpticalFlowUsageFlagsNV usage; +} VkOpticalFlowImageFormatInfoNV; -typedef enum VkImageCompressionFlagBitsEXT { - VK_IMAGE_COMPRESSION_DEFAULT_EXT = 0, - VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT = 0x00000001, - VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT = 0x00000002, - VK_IMAGE_COMPRESSION_DISABLED_EXT = 0x00000004, - VK_IMAGE_COMPRESSION_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkImageCompressionFlagBitsEXT; -typedef VkFlags VkImageCompressionFlagsEXT; +typedef struct VkOpticalFlowImageFormatPropertiesNV { + VkStructureType sType; + const void* pNext; + VkFormat format; +} VkOpticalFlowImageFormatPropertiesNV; -typedef enum VkImageCompressionFixedRateFlagBitsEXT { - VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT = 0, - VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT = 0x00000001, - VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT = 0x00000002, - VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT = 0x00000004, - VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT = 0x00000008, - VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT = 0x00000010, - VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT = 0x00000020, - VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT = 0x00000040, - VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT = 0x00000080, - VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT = 0x00000100, - VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT = 0x00000200, - VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT = 0x00000400, - VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT = 0x00000800, - VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT = 0x00001000, - VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT = 0x00002000, - VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT = 0x00004000, - VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT = 0x00008000, - VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT = 0x00010000, - VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT = 0x00020000, - VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT = 0x00040000, - VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT = 0x00080000, - VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT = 0x00100000, - VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT = 0x00200000, - VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT = 0x00400000, - VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT = 0x00800000, - VK_IMAGE_COMPRESSION_FIXED_RATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkImageCompressionFixedRateFlagBitsEXT; -typedef VkFlags VkImageCompressionFixedRateFlagsEXT; -typedef struct VkPhysicalDeviceImageCompressionControlFeaturesEXT { +typedef struct VkOpticalFlowSessionCreateInfoNV { + VkStructureType sType; + void* pNext; + uint32_t width; + uint32_t height; + VkFormat imageFormat; + VkFormat flowVectorFormat; + VkFormat costFormat; + VkOpticalFlowGridSizeFlagsNV outputGridSize; + VkOpticalFlowGridSizeFlagsNV hintGridSize; + VkOpticalFlowPerformanceLevelNV performanceLevel; + VkOpticalFlowSessionCreateFlagsNV flags; +} VkOpticalFlowSessionCreateInfoNV; + +typedef struct VkOpticalFlowSessionCreatePrivateDataInfoNV { VkStructureType sType; void* pNext; - VkBool32 imageCompressionControl; -} VkPhysicalDeviceImageCompressionControlFeaturesEXT; + uint32_t id; + uint32_t size; + const void* pPrivateData; +} VkOpticalFlowSessionCreatePrivateDataInfoNV; -typedef struct VkImageCompressionControlEXT { - VkStructureType sType; - const void* pNext; - VkImageCompressionFlagsEXT flags; - uint32_t compressionControlPlaneCount; - VkImageCompressionFixedRateFlagsEXT* pFixedRateFlags; -} VkImageCompressionControlEXT; +typedef struct VkOpticalFlowExecuteInfoNV { + VkStructureType sType; + void* pNext; + VkOpticalFlowExecuteFlagsNV flags; + uint32_t regionCount; + const VkRect2D* pRegions; +} VkOpticalFlowExecuteInfoNV; -typedef struct VkSubresourceLayout2EXT { - VkStructureType sType; - void* pNext; - VkSubresourceLayout subresourceLayout; -} VkSubresourceLayout2EXT; +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV)(VkPhysicalDevice physicalDevice, const VkOpticalFlowImageFormatInfoNV* pOpticalFlowImageFormatInfo, uint32_t* pFormatCount, VkOpticalFlowImageFormatPropertiesNV* pImageFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateOpticalFlowSessionNV)(VkDevice device, const VkOpticalFlowSessionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkOpticalFlowSessionNV* pSession); +typedef void (VKAPI_PTR *PFN_vkDestroyOpticalFlowSessionNV)(VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkBindOpticalFlowSessionImageNV)(VkDevice device, VkOpticalFlowSessionNV session, VkOpticalFlowSessionBindingPointNV bindingPoint, VkImageView view, VkImageLayout layout); +typedef void (VKAPI_PTR *PFN_vkCmdOpticalFlowExecuteNV)(VkCommandBuffer commandBuffer, VkOpticalFlowSessionNV session, const VkOpticalFlowExecuteInfoNV* pExecuteInfo); -typedef struct VkImageSubresource2EXT { - VkStructureType sType; - void* pNext; - VkImageSubresource imageSubresource; -} VkImageSubresource2EXT; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + VkPhysicalDevice physicalDevice, + const VkOpticalFlowImageFormatInfoNV* pOpticalFlowImageFormatInfo, + uint32_t* pFormatCount, + VkOpticalFlowImageFormatPropertiesNV* pImageFormatProperties); -typedef struct VkImageCompressionPropertiesEXT { - VkStructureType sType; - void* pNext; - VkImageCompressionFlagsEXT imageCompressionFlags; - VkImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags; -} VkImageCompressionPropertiesEXT; +VKAPI_ATTR VkResult VKAPI_CALL vkCreateOpticalFlowSessionNV( + VkDevice device, + const VkOpticalFlowSessionCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkOpticalFlowSessionNV* pSession); -typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2EXT* pSubresource, VkSubresourceLayout2EXT* pLayout); +VKAPI_ATTR void VKAPI_CALL vkDestroyOpticalFlowSessionNV( + VkDevice device, + VkOpticalFlowSessionNV session, + const VkAllocationCallbacks* pAllocator); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT( +VKAPI_ATTR VkResult VKAPI_CALL vkBindOpticalFlowSessionImageNV( VkDevice device, - VkImage image, - const VkImageSubresource2EXT* pSubresource, - VkSubresourceLayout2EXT* pLayout); + VkOpticalFlowSessionNV session, + VkOpticalFlowSessionBindingPointNV bindingPoint, + VkImageView view, + VkImageLayout layout); + +VKAPI_ATTR void VKAPI_CALL vkCmdOpticalFlowExecuteNV( + VkCommandBuffer commandBuffer, + VkOpticalFlowSessionNV session, + const VkOpticalFlowExecuteInfoNV* pExecuteInfo); #endif -#define VK_EXT_attachment_feedback_loop_layout 1 -#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_SPEC_VERSION 2 -#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_EXTENSION_NAME "VK_EXT_attachment_feedback_loop_layout" -typedef struct VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT { +// VK_EXT_legacy_dithering is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_legacy_dithering 1 +#define VK_EXT_LEGACY_DITHERING_SPEC_VERSION 2 +#define VK_EXT_LEGACY_DITHERING_EXTENSION_NAME "VK_EXT_legacy_dithering" +typedef struct VkPhysicalDeviceLegacyDitheringFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 attachmentFeedbackLoopLayout; -} VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; - - + VkBool32 legacyDithering; +} VkPhysicalDeviceLegacyDitheringFeaturesEXT; -#define VK_EXT_4444_formats 1 -#define VK_EXT_4444_FORMATS_SPEC_VERSION 1 -#define VK_EXT_4444_FORMATS_EXTENSION_NAME "VK_EXT_4444_formats" -typedef struct VkPhysicalDevice4444FormatsFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 formatA4R4G4B4; - VkBool32 formatA4B4G4R4; -} VkPhysicalDevice4444FormatsFeaturesEXT; +// VK_EXT_pipeline_protected_access is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_protected_access 1 +#define VK_EXT_PIPELINE_PROTECTED_ACCESS_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME "VK_EXT_pipeline_protected_access" +typedef VkPhysicalDevicePipelineProtectedAccessFeatures VkPhysicalDevicePipelineProtectedAccessFeaturesEXT; -#define VK_ARM_rasterization_order_attachment_access 1 -#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1 -#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_ARM_rasterization_order_attachment_access" -typedef struct VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 rasterizationOrderColorAttachmentAccess; - VkBool32 rasterizationOrderDepthAttachmentAccess; - VkBool32 rasterizationOrderStencilAttachmentAccess; -} VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; -typedef VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; +// VK_AMD_anti_lag is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_anti_lag 1 +#define VK_AMD_ANTI_LAG_SPEC_VERSION 1 +#define VK_AMD_ANTI_LAG_EXTENSION_NAME "VK_AMD_anti_lag" +typedef enum VkAntiLagModeAMD { + VK_ANTI_LAG_MODE_DRIVER_CONTROL_AMD = 0, + VK_ANTI_LAG_MODE_ON_AMD = 1, + VK_ANTI_LAG_MODE_OFF_AMD = 2, + VK_ANTI_LAG_MODE_MAX_ENUM_AMD = 0x7FFFFFFF +} VkAntiLagModeAMD; -#define VK_EXT_rgba10x6_formats 1 -#define VK_EXT_RGBA10X6_FORMATS_SPEC_VERSION 1 -#define VK_EXT_RGBA10X6_FORMATS_EXTENSION_NAME "VK_EXT_rgba10x6_formats" -typedef struct VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT { +typedef enum VkAntiLagStageAMD { + VK_ANTI_LAG_STAGE_INPUT_AMD = 0, + VK_ANTI_LAG_STAGE_PRESENT_AMD = 1, + VK_ANTI_LAG_STAGE_MAX_ENUM_AMD = 0x7FFFFFFF +} VkAntiLagStageAMD; +typedef struct VkPhysicalDeviceAntiLagFeaturesAMD { VkStructureType sType; void* pNext; - VkBool32 formatRgba10x6WithoutYCbCrSampler; -} VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT; + VkBool32 antiLag; +} VkPhysicalDeviceAntiLagFeaturesAMD; +typedef struct VkAntiLagPresentationInfoAMD { + VkStructureType sType; + void* pNext; + VkAntiLagStageAMD stage; + uint64_t frameIndex; +} VkAntiLagPresentationInfoAMD; +typedef struct VkAntiLagDataAMD { + VkStructureType sType; + const void* pNext; + VkAntiLagModeAMD mode; + uint32_t maxFPS; + const VkAntiLagPresentationInfoAMD* pPresentationInfo; +} VkAntiLagDataAMD; -#define VK_NV_acquire_winrt_display 1 -#define VK_NV_ACQUIRE_WINRT_DISPLAY_SPEC_VERSION 1 -#define VK_NV_ACQUIRE_WINRT_DISPLAY_EXTENSION_NAME "VK_NV_acquire_winrt_display" -typedef VkResult (VKAPI_PTR *PFN_vkAcquireWinrtDisplayNV)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); -typedef VkResult (VKAPI_PTR *PFN_vkGetWinrtDisplayNV)(VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR* pDisplay); +typedef void (VKAPI_PTR *PFN_vkAntiLagUpdateAMD)(VkDevice device, const VkAntiLagDataAMD* pData); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkAcquireWinrtDisplayNV( - VkPhysicalDevice physicalDevice, - VkDisplayKHR display); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetWinrtDisplayNV( - VkPhysicalDevice physicalDevice, - uint32_t deviceRelativeId, - VkDisplayKHR* pDisplay); +VKAPI_ATTR void VKAPI_CALL vkAntiLagUpdateAMD( + VkDevice device, + const VkAntiLagDataAMD* pData); #endif -#define VK_VALVE_mutable_descriptor_type 1 -#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1 -#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_VALVE_mutable_descriptor_type" -typedef struct VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT { +// VK_EXT_shader_object is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_object 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderEXT) +#define VK_EXT_SHADER_OBJECT_SPEC_VERSION 1 +#define VK_EXT_SHADER_OBJECT_EXTENSION_NAME "VK_EXT_shader_object" + +typedef enum VkShaderCodeTypeEXT { + VK_SHADER_CODE_TYPE_BINARY_EXT = 0, + VK_SHADER_CODE_TYPE_SPIRV_EXT = 1, + VK_SHADER_CODE_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkShaderCodeTypeEXT; + +typedef enum VkDepthClampModeEXT { + VK_DEPTH_CLAMP_MODE_VIEWPORT_RANGE_EXT = 0, + VK_DEPTH_CLAMP_MODE_USER_DEFINED_RANGE_EXT = 1, + VK_DEPTH_CLAMP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDepthClampModeEXT; + +typedef enum VkShaderCreateFlagBitsEXT { + VK_SHADER_CREATE_LINK_STAGE_BIT_EXT = 0x00000001, + VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT = 0x00000002, + VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT = 0x00000004, + VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT = 0x00000008, + VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT = 0x00000010, + VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT = 0x00000020, + VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00000040, + VK_SHADER_CREATE_INDIRECT_BINDABLE_BIT_EXT = 0x00000080, + VK_SHADER_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkShaderCreateFlagBitsEXT; +typedef VkFlags VkShaderCreateFlagsEXT; +typedef struct VkPhysicalDeviceShaderObjectFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 mutableDescriptorType; -} VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT; + VkBool32 shaderObject; +} VkPhysicalDeviceShaderObjectFeaturesEXT; -typedef VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE; - -typedef struct VkMutableDescriptorTypeListEXT { - uint32_t descriptorTypeCount; - const VkDescriptorType* pDescriptorTypes; -} VkMutableDescriptorTypeListEXT; - -typedef VkMutableDescriptorTypeListEXT VkMutableDescriptorTypeListVALVE; +typedef struct VkPhysicalDeviceShaderObjectPropertiesEXT { + VkStructureType sType; + void* pNext; + uint8_t shaderBinaryUUID[VK_UUID_SIZE]; + uint32_t shaderBinaryVersion; +} VkPhysicalDeviceShaderObjectPropertiesEXT; -typedef struct VkMutableDescriptorTypeCreateInfoEXT { - VkStructureType sType; - const void* pNext; - uint32_t mutableDescriptorTypeListCount; - const VkMutableDescriptorTypeListEXT* pMutableDescriptorTypeLists; -} VkMutableDescriptorTypeCreateInfoEXT; +typedef struct VkShaderCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkShaderCreateFlagsEXT flags; + VkShaderStageFlagBits stage; + VkShaderStageFlags nextStage; + VkShaderCodeTypeEXT codeType; + size_t codeSize; + const void* pCode; + const char* pName; + uint32_t setLayoutCount; + const VkDescriptorSetLayout* pSetLayouts; + uint32_t pushConstantRangeCount; + const VkPushConstantRange* pPushConstantRanges; + const VkSpecializationInfo* pSpecializationInfo; +} VkShaderCreateInfoEXT; -typedef VkMutableDescriptorTypeCreateInfoEXT VkMutableDescriptorTypeCreateInfoVALVE; +typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfo VkShaderRequiredSubgroupSizeCreateInfoEXT; +typedef struct VkDepthClampRangeEXT { + float minDepthClamp; + float maxDepthClamp; +} VkDepthClampRangeEXT; +typedef VkResult (VKAPI_PTR *PFN_vkCreateShadersEXT)(VkDevice device, uint32_t createInfoCount, const VkShaderCreateInfoEXT* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkShaderEXT* pShaders); +typedef void (VKAPI_PTR *PFN_vkDestroyShaderEXT)(VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetShaderBinaryDataEXT)(VkDevice device, VkShaderEXT shader, size_t* pDataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdBindShadersEXT)(VkCommandBuffer commandBuffer, uint32_t stageCount, const VkShaderStageFlagBits* pStages, const VkShaderEXT* pShaders); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClampRangeEXT)(VkCommandBuffer commandBuffer, VkDepthClampModeEXT depthClampMode, const VkDepthClampRangeEXT* pDepthClampRange); -#define VK_EXT_vertex_input_dynamic_state 1 -#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION 2 -#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_vertex_input_dynamic_state" -typedef struct VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 vertexInputDynamicState; -} VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateShadersEXT( + VkDevice device, + uint32_t createInfoCount, + const VkShaderCreateInfoEXT* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkShaderEXT* pShaders); -typedef struct VkVertexInputBindingDescription2EXT { - VkStructureType sType; - void* pNext; - uint32_t binding; - uint32_t stride; - VkVertexInputRate inputRate; - uint32_t divisor; -} VkVertexInputBindingDescription2EXT; +VKAPI_ATTR void VKAPI_CALL vkDestroyShaderEXT( + VkDevice device, + VkShaderEXT shader, + const VkAllocationCallbacks* pAllocator); -typedef struct VkVertexInputAttributeDescription2EXT { - VkStructureType sType; - void* pNext; - uint32_t location; - uint32_t binding; - VkFormat format; - uint32_t offset; -} VkVertexInputAttributeDescription2EXT; +VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderBinaryDataEXT( + VkDevice device, + VkShaderEXT shader, + size_t* pDataSize, + void* pData); -typedef void (VKAPI_PTR *PFN_vkCmdSetVertexInputEXT)(VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); +VKAPI_ATTR void VKAPI_CALL vkCmdBindShadersEXT( + VkCommandBuffer commandBuffer, + uint32_t stageCount, + const VkShaderStageFlagBits* pStages, + const VkShaderEXT* pShaders); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetVertexInputEXT( +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClampRangeEXT( VkCommandBuffer commandBuffer, - uint32_t vertexBindingDescriptionCount, - const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, - uint32_t vertexAttributeDescriptionCount, - const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); + VkDepthClampModeEXT depthClampMode, + const VkDepthClampRangeEXT* pDepthClampRange); #endif -#define VK_EXT_physical_device_drm 1 -#define VK_EXT_PHYSICAL_DEVICE_DRM_SPEC_VERSION 1 -#define VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME "VK_EXT_physical_device_drm" -typedef struct VkPhysicalDeviceDrmPropertiesEXT { +// VK_QCOM_tile_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_tile_properties 1 +#define VK_QCOM_TILE_PROPERTIES_SPEC_VERSION 1 +#define VK_QCOM_TILE_PROPERTIES_EXTENSION_NAME "VK_QCOM_tile_properties" +typedef struct VkPhysicalDeviceTilePropertiesFeaturesQCOM { VkStructureType sType; void* pNext; - VkBool32 hasPrimary; - VkBool32 hasRender; - int64_t primaryMajor; - int64_t primaryMinor; - int64_t renderMajor; - int64_t renderMinor; -} VkPhysicalDeviceDrmPropertiesEXT; + VkBool32 tileProperties; +} VkPhysicalDeviceTilePropertiesFeaturesQCOM; + +typedef struct VkTilePropertiesQCOM { + VkStructureType sType; + void* pNext; + VkExtent3D tileSize; + VkExtent2D apronSize; + VkOffset2D origin; +} VkTilePropertiesQCOM; +typedef VkResult (VKAPI_PTR *PFN_vkGetFramebufferTilePropertiesQCOM)(VkDevice device, VkFramebuffer framebuffer, uint32_t* pPropertiesCount, VkTilePropertiesQCOM* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDynamicRenderingTilePropertiesQCOM)(VkDevice device, const VkRenderingInfo* pRenderingInfo, VkTilePropertiesQCOM* pProperties); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetFramebufferTilePropertiesQCOM( + VkDevice device, + VkFramebuffer framebuffer, + uint32_t* pPropertiesCount, + VkTilePropertiesQCOM* pProperties); -#define VK_EXT_depth_clip_control 1 -#define VK_EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION 1 -#define VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME "VK_EXT_depth_clip_control" -typedef struct VkPhysicalDeviceDepthClipControlFeaturesEXT { +VKAPI_ATTR VkResult VKAPI_CALL vkGetDynamicRenderingTilePropertiesQCOM( + VkDevice device, + const VkRenderingInfo* pRenderingInfo, + VkTilePropertiesQCOM* pProperties); +#endif + + +// VK_SEC_amigo_profiling is a preprocessor guard. Do not pass it to API calls. +#define VK_SEC_amigo_profiling 1 +#define VK_SEC_AMIGO_PROFILING_SPEC_VERSION 1 +#define VK_SEC_AMIGO_PROFILING_EXTENSION_NAME "VK_SEC_amigo_profiling" +typedef struct VkPhysicalDeviceAmigoProfilingFeaturesSEC { VkStructureType sType; void* pNext; - VkBool32 depthClipControl; -} VkPhysicalDeviceDepthClipControlFeaturesEXT; + VkBool32 amigoProfiling; +} VkPhysicalDeviceAmigoProfilingFeaturesSEC; -typedef struct VkPipelineViewportDepthClipControlCreateInfoEXT { +typedef struct VkAmigoProfilingSubmitInfoSEC { VkStructureType sType; const void* pNext; - VkBool32 negativeOneToOne; -} VkPipelineViewportDepthClipControlCreateInfoEXT; + uint64_t firstDrawTimestamp; + uint64_t swapBufferTimestamp; +} VkAmigoProfilingSubmitInfoSEC; -#define VK_EXT_primitive_topology_list_restart 1 -#define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION 1 -#define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME "VK_EXT_primitive_topology_list_restart" -typedef struct VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT { +// VK_QCOM_multiview_per_view_viewports is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_multiview_per_view_viewports 1 +#define VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_SPEC_VERSION 1 +#define VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_EXTENSION_NAME "VK_QCOM_multiview_per_view_viewports" +typedef struct VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM { VkStructureType sType; void* pNext; - VkBool32 primitiveTopologyListRestart; - VkBool32 primitiveTopologyPatchListRestart; -} VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + VkBool32 multiviewPerViewViewports; +} VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; -#define VK_HUAWEI_subpass_shading 1 -#define VK_HUAWEI_SUBPASS_SHADING_SPEC_VERSION 2 -#define VK_HUAWEI_SUBPASS_SHADING_EXTENSION_NAME "VK_HUAWEI_subpass_shading" -typedef struct VkSubpassShadingPipelineCreateInfoHUAWEI { - VkStructureType sType; - void* pNext; - VkRenderPass renderPass; - uint32_t subpass; -} VkSubpassShadingPipelineCreateInfoHUAWEI; +// VK_NV_ray_tracing_invocation_reorder is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_ray_tracing_invocation_reorder 1 +#define VK_NV_RAY_TRACING_INVOCATION_REORDER_SPEC_VERSION 1 +#define VK_NV_RAY_TRACING_INVOCATION_REORDER_EXTENSION_NAME "VK_NV_ray_tracing_invocation_reorder" -typedef struct VkPhysicalDeviceSubpassShadingFeaturesHUAWEI { +typedef enum VkRayTracingInvocationReorderModeNV { + VK_RAY_TRACING_INVOCATION_REORDER_MODE_NONE_NV = 0, + VK_RAY_TRACING_INVOCATION_REORDER_MODE_REORDER_NV = 1, + VK_RAY_TRACING_INVOCATION_REORDER_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkRayTracingInvocationReorderModeNV; +typedef struct VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV { + VkStructureType sType; + void* pNext; + VkRayTracingInvocationReorderModeNV rayTracingInvocationReorderReorderingHint; +} VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV; + +typedef struct VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV { VkStructureType sType; void* pNext; - VkBool32 subpassShading; -} VkPhysicalDeviceSubpassShadingFeaturesHUAWEI; + VkBool32 rayTracingInvocationReorder; +} VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV; -typedef struct VkPhysicalDeviceSubpassShadingPropertiesHUAWEI { + + +// VK_NV_cooperative_vector is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_cooperative_vector 1 +#define VK_NV_COOPERATIVE_VECTOR_SPEC_VERSION 4 +#define VK_NV_COOPERATIVE_VECTOR_EXTENSION_NAME "VK_NV_cooperative_vector" + +typedef enum VkCooperativeVectorMatrixLayoutNV { + VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_ROW_MAJOR_NV = 0, + VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_COLUMN_MAJOR_NV = 1, + VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_INFERENCING_OPTIMAL_NV = 2, + VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_TRAINING_OPTIMAL_NV = 3, + VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_MAX_ENUM_NV = 0x7FFFFFFF +} VkCooperativeVectorMatrixLayoutNV; +typedef struct VkPhysicalDeviceCooperativeVectorPropertiesNV { + VkStructureType sType; + void* pNext; + VkShaderStageFlags cooperativeVectorSupportedStages; + VkBool32 cooperativeVectorTrainingFloat16Accumulation; + VkBool32 cooperativeVectorTrainingFloat32Accumulation; + uint32_t maxCooperativeVectorComponents; +} VkPhysicalDeviceCooperativeVectorPropertiesNV; + +typedef struct VkPhysicalDeviceCooperativeVectorFeaturesNV { VkStructureType sType; void* pNext; - uint32_t maxSubpassShadingWorkgroupSizeAspectRatio; -} VkPhysicalDeviceSubpassShadingPropertiesHUAWEI; + VkBool32 cooperativeVector; + VkBool32 cooperativeVectorTraining; +} VkPhysicalDeviceCooperativeVectorFeaturesNV; -typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI)(VkDevice device, VkRenderPass renderpass, VkExtent2D* pMaxWorkgroupSize); -typedef void (VKAPI_PTR *PFN_vkCmdSubpassShadingHUAWEI)(VkCommandBuffer commandBuffer); +typedef struct VkCooperativeVectorPropertiesNV { + VkStructureType sType; + void* pNext; + VkComponentTypeKHR inputType; + VkComponentTypeKHR inputInterpretation; + VkComponentTypeKHR matrixInterpretation; + VkComponentTypeKHR biasInterpretation; + VkComponentTypeKHR resultType; + VkBool32 transpose; +} VkCooperativeVectorPropertiesNV; + +typedef struct VkConvertCooperativeVectorMatrixInfoNV { + VkStructureType sType; + const void* pNext; + size_t srcSize; + VkDeviceOrHostAddressConstKHR srcData; + size_t* pDstSize; + VkDeviceOrHostAddressKHR dstData; + VkComponentTypeKHR srcComponentType; + VkComponentTypeKHR dstComponentType; + uint32_t numRows; + uint32_t numColumns; + VkCooperativeVectorMatrixLayoutNV srcLayout; + size_t srcStride; + VkCooperativeVectorMatrixLayoutNV dstLayout; + size_t dstStride; +} VkConvertCooperativeVectorMatrixInfoNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeVectorPropertiesNV)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeVectorPropertiesNV* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkConvertCooperativeVectorMatrixNV)(VkDevice device, const VkConvertCooperativeVectorMatrixInfoNV* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdConvertCooperativeVectorMatrixNV)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkConvertCooperativeVectorMatrixInfoNV* pInfos); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeVectorPropertiesNV( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeVectorPropertiesNV* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkConvertCooperativeVectorMatrixNV( VkDevice device, - VkRenderPass renderpass, - VkExtent2D* pMaxWorkgroupSize); + const VkConvertCooperativeVectorMatrixInfoNV* pInfo); -VKAPI_ATTR void VKAPI_CALL vkCmdSubpassShadingHUAWEI( - VkCommandBuffer commandBuffer); +VKAPI_ATTR void VKAPI_CALL vkCmdConvertCooperativeVectorMatrixNV( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkConvertCooperativeVectorMatrixInfoNV* pInfos); #endif -#define VK_HUAWEI_invocation_mask 1 -#define VK_HUAWEI_INVOCATION_MASK_SPEC_VERSION 1 -#define VK_HUAWEI_INVOCATION_MASK_EXTENSION_NAME "VK_HUAWEI_invocation_mask" -typedef struct VkPhysicalDeviceInvocationMaskFeaturesHUAWEI { +// VK_NV_extended_sparse_address_space is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_extended_sparse_address_space 1 +#define VK_NV_EXTENDED_SPARSE_ADDRESS_SPACE_SPEC_VERSION 1 +#define VK_NV_EXTENDED_SPARSE_ADDRESS_SPACE_EXTENSION_NAME "VK_NV_extended_sparse_address_space" +typedef struct VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV { VkStructureType sType; void* pNext; - VkBool32 invocationMask; -} VkPhysicalDeviceInvocationMaskFeaturesHUAWEI; + VkBool32 extendedSparseAddressSpace; +} VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; -typedef void (VKAPI_PTR *PFN_vkCmdBindInvocationMaskHUAWEI)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout); +typedef struct VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV { + VkStructureType sType; + void* pNext; + VkDeviceSize extendedSparseAddressSpaceSize; + VkImageUsageFlags extendedSparseImageUsageFlags; + VkBufferUsageFlags extendedSparseBufferUsageFlags; +} VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdBindInvocationMaskHUAWEI( - VkCommandBuffer commandBuffer, - VkImageView imageView, - VkImageLayout imageLayout); -#endif -#define VK_NV_external_memory_rdma 1 -typedef void* VkRemoteAddressNV; -#define VK_NV_EXTERNAL_MEMORY_RDMA_SPEC_VERSION 1 -#define VK_NV_EXTERNAL_MEMORY_RDMA_EXTENSION_NAME "VK_NV_external_memory_rdma" -typedef struct VkMemoryGetRemoteAddressInfoNV { - VkStructureType sType; - const void* pNext; - VkDeviceMemory memory; - VkExternalMemoryHandleTypeFlagBits handleType; -} VkMemoryGetRemoteAddressInfoNV; +// VK_EXT_mutable_descriptor_type is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_mutable_descriptor_type 1 +#define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1 +#define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_EXT_mutable_descriptor_type" -typedef struct VkPhysicalDeviceExternalMemoryRDMAFeaturesNV { + +// VK_EXT_legacy_vertex_attributes is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_legacy_vertex_attributes 1 +#define VK_EXT_LEGACY_VERTEX_ATTRIBUTES_SPEC_VERSION 1 +#define VK_EXT_LEGACY_VERTEX_ATTRIBUTES_EXTENSION_NAME "VK_EXT_legacy_vertex_attributes" +typedef struct VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 externalMemoryRDMA; -} VkPhysicalDeviceExternalMemoryRDMAFeaturesNV; - -typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryRemoteAddressNV)(VkDevice device, const VkMemoryGetRemoteAddressInfoNV* pMemoryGetRemoteAddressInfo, VkRemoteAddressNV* pAddress); + VkBool32 legacyVertexAttributes; +} VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryRemoteAddressNV( - VkDevice device, - const VkMemoryGetRemoteAddressInfoNV* pMemoryGetRemoteAddressInfo, - VkRemoteAddressNV* pAddress); -#endif +typedef struct VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 nativeUnalignedPerformance; +} VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT; + + + +// VK_EXT_layer_settings is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_layer_settings 1 +#define VK_EXT_LAYER_SETTINGS_SPEC_VERSION 2 +#define VK_EXT_LAYER_SETTINGS_EXTENSION_NAME "VK_EXT_layer_settings" + +typedef enum VkLayerSettingTypeEXT { + VK_LAYER_SETTING_TYPE_BOOL32_EXT = 0, + VK_LAYER_SETTING_TYPE_INT32_EXT = 1, + VK_LAYER_SETTING_TYPE_INT64_EXT = 2, + VK_LAYER_SETTING_TYPE_UINT32_EXT = 3, + VK_LAYER_SETTING_TYPE_UINT64_EXT = 4, + VK_LAYER_SETTING_TYPE_FLOAT32_EXT = 5, + VK_LAYER_SETTING_TYPE_FLOAT64_EXT = 6, + VK_LAYER_SETTING_TYPE_STRING_EXT = 7, + VK_LAYER_SETTING_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkLayerSettingTypeEXT; +typedef struct VkLayerSettingEXT { + const char* pLayerName; + const char* pSettingName; + VkLayerSettingTypeEXT type; + uint32_t valueCount; + const void* pValues; +} VkLayerSettingEXT; + +typedef struct VkLayerSettingsCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t settingCount; + const VkLayerSettingEXT* pSettings; +} VkLayerSettingsCreateInfoEXT; -#define VK_EXT_pipeline_properties 1 -#define VK_EXT_PIPELINE_PROPERTIES_SPEC_VERSION 1 -#define VK_EXT_PIPELINE_PROPERTIES_EXTENSION_NAME "VK_EXT_pipeline_properties" -typedef VkPipelineInfoKHR VkPipelineInfoEXT; -typedef struct VkPipelinePropertiesIdentifierEXT { +// VK_ARM_shader_core_builtins is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_shader_core_builtins 1 +#define VK_ARM_SHADER_CORE_BUILTINS_SPEC_VERSION 2 +#define VK_ARM_SHADER_CORE_BUILTINS_EXTENSION_NAME "VK_ARM_shader_core_builtins" +typedef struct VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM { VkStructureType sType; void* pNext; - uint8_t pipelineIdentifier[VK_UUID_SIZE]; -} VkPipelinePropertiesIdentifierEXT; + VkBool32 shaderCoreBuiltins; +} VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM; -typedef struct VkPhysicalDevicePipelinePropertiesFeaturesEXT { +typedef struct VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM { VkStructureType sType; void* pNext; - VkBool32 pipelinePropertiesIdentifier; -} VkPhysicalDevicePipelinePropertiesFeaturesEXT; - -typedef VkResult (VKAPI_PTR *PFN_vkGetPipelinePropertiesEXT)(VkDevice device, const VkPipelineInfoEXT* pPipelineInfo, VkBaseOutStructure* pPipelineProperties); + uint64_t shaderCoreMask; + uint32_t shaderCoreCount; + uint32_t shaderWarpsPerCore; +} VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelinePropertiesEXT( - VkDevice device, - const VkPipelineInfoEXT* pPipelineInfo, - VkBaseOutStructure* pPipelineProperties); -#endif -#define VK_EXT_multisampled_render_to_single_sampled 1 -#define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_SPEC_VERSION 1 -#define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME "VK_EXT_multisampled_render_to_single_sampled" -typedef struct VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT { +// VK_EXT_pipeline_library_group_handles is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_library_group_handles 1 +#define VK_EXT_PIPELINE_LIBRARY_GROUP_HANDLES_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_LIBRARY_GROUP_HANDLES_EXTENSION_NAME "VK_EXT_pipeline_library_group_handles" +typedef struct VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 multisampledRenderToSingleSampled; -} VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + VkBool32 pipelineLibraryGroupHandles; +} VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; -typedef struct VkSubpassResolvePerformanceQueryEXT { + + +// VK_EXT_dynamic_rendering_unused_attachments is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_dynamic_rendering_unused_attachments 1 +#define VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_SPEC_VERSION 1 +#define VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_EXTENSION_NAME "VK_EXT_dynamic_rendering_unused_attachments" +typedef struct VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT { VkStructureType sType; void* pNext; - VkBool32 optimal; -} VkSubpassResolvePerformanceQueryEXT; + VkBool32 dynamicRenderingUnusedAttachments; +} VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + + + +// VK_NV_low_latency2 is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_low_latency2 1 +#define VK_NV_LOW_LATENCY_2_SPEC_VERSION 2 +#define VK_NV_LOW_LATENCY_2_EXTENSION_NAME "VK_NV_low_latency2" + +typedef enum VkLatencyMarkerNV { + VK_LATENCY_MARKER_SIMULATION_START_NV = 0, + VK_LATENCY_MARKER_SIMULATION_END_NV = 1, + VK_LATENCY_MARKER_RENDERSUBMIT_START_NV = 2, + VK_LATENCY_MARKER_RENDERSUBMIT_END_NV = 3, + VK_LATENCY_MARKER_PRESENT_START_NV = 4, + VK_LATENCY_MARKER_PRESENT_END_NV = 5, + VK_LATENCY_MARKER_INPUT_SAMPLE_NV = 6, + VK_LATENCY_MARKER_TRIGGER_FLASH_NV = 7, + VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_START_NV = 8, + VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_END_NV = 9, + VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_START_NV = 10, + VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_END_NV = 11, + VK_LATENCY_MARKER_MAX_ENUM_NV = 0x7FFFFFFF +} VkLatencyMarkerNV; + +typedef enum VkOutOfBandQueueTypeNV { + VK_OUT_OF_BAND_QUEUE_TYPE_RENDER_NV = 0, + VK_OUT_OF_BAND_QUEUE_TYPE_PRESENT_NV = 1, + VK_OUT_OF_BAND_QUEUE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkOutOfBandQueueTypeNV; +typedef struct VkLatencySleepModeInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 lowLatencyMode; + VkBool32 lowLatencyBoost; + uint32_t minimumIntervalUs; +} VkLatencySleepModeInfoNV; -typedef struct VkMultisampledRenderToSingleSampledInfoEXT { - VkStructureType sType; - const void* pNext; - VkBool32 multisampledRenderToSingleSampledEnable; - VkSampleCountFlagBits rasterizationSamples; -} VkMultisampledRenderToSingleSampledInfoEXT; +typedef struct VkLatencySleepInfoNV { + VkStructureType sType; + const void* pNext; + VkSemaphore signalSemaphore; + uint64_t value; +} VkLatencySleepInfoNV; +typedef struct VkSetLatencyMarkerInfoNV { + VkStructureType sType; + const void* pNext; + uint64_t presentID; + VkLatencyMarkerNV marker; +} VkSetLatencyMarkerInfoNV; +typedef struct VkLatencyTimingsFrameReportNV { + VkStructureType sType; + const void* pNext; + uint64_t presentID; + uint64_t inputSampleTimeUs; + uint64_t simStartTimeUs; + uint64_t simEndTimeUs; + uint64_t renderSubmitStartTimeUs; + uint64_t renderSubmitEndTimeUs; + uint64_t presentStartTimeUs; + uint64_t presentEndTimeUs; + uint64_t driverStartTimeUs; + uint64_t driverEndTimeUs; + uint64_t osRenderQueueStartTimeUs; + uint64_t osRenderQueueEndTimeUs; + uint64_t gpuRenderStartTimeUs; + uint64_t gpuRenderEndTimeUs; +} VkLatencyTimingsFrameReportNV; + +typedef struct VkGetLatencyMarkerInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t timingCount; + VkLatencyTimingsFrameReportNV* pTimings; +} VkGetLatencyMarkerInfoNV; -#define VK_EXT_extended_dynamic_state2 1 -#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION 1 -#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME "VK_EXT_extended_dynamic_state2" -typedef struct VkPhysicalDeviceExtendedDynamicState2FeaturesEXT { +typedef struct VkLatencySubmissionPresentIdNV { VkStructureType sType; - void* pNext; - VkBool32 extendedDynamicState2; - VkBool32 extendedDynamicState2LogicOp; - VkBool32 extendedDynamicState2PatchControlPoints; -} VkPhysicalDeviceExtendedDynamicState2FeaturesEXT; + const void* pNext; + uint64_t presentID; +} VkLatencySubmissionPresentIdNV; -typedef void (VKAPI_PTR *PFN_vkCmdSetPatchControlPointsEXT)(VkCommandBuffer commandBuffer, uint32_t patchControlPoints); -typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizerDiscardEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable); -typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBiasEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable); -typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEXT)(VkCommandBuffer commandBuffer, VkLogicOp logicOp); -typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveRestartEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable); +typedef struct VkSwapchainLatencyCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 latencyModeEnable; +} VkSwapchainLatencyCreateInfoNV; + +typedef struct VkOutOfBandQueueTypeInfoNV { + VkStructureType sType; + const void* pNext; + VkOutOfBandQueueTypeNV queueType; +} VkOutOfBandQueueTypeInfoNV; + +typedef struct VkLatencySurfaceCapabilitiesNV { + VkStructureType sType; + const void* pNext; + uint32_t presentModeCount; + VkPresentModeKHR* pPresentModes; +} VkLatencySurfaceCapabilitiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkSetLatencySleepModeNV)(VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepModeInfoNV* pSleepModeInfo); +typedef VkResult (VKAPI_PTR *PFN_vkLatencySleepNV)(VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepInfoNV* pSleepInfo); +typedef void (VKAPI_PTR *PFN_vkSetLatencyMarkerNV)(VkDevice device, VkSwapchainKHR swapchain, const VkSetLatencyMarkerInfoNV* pLatencyMarkerInfo); +typedef void (VKAPI_PTR *PFN_vkGetLatencyTimingsNV)(VkDevice device, VkSwapchainKHR swapchain, VkGetLatencyMarkerInfoNV* pLatencyMarkerInfo); +typedef void (VKAPI_PTR *PFN_vkQueueNotifyOutOfBandNV)(VkQueue queue, const VkOutOfBandQueueTypeInfoNV* pQueueTypeInfo); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetPatchControlPointsEXT( - VkCommandBuffer commandBuffer, - uint32_t patchControlPoints); +VKAPI_ATTR VkResult VKAPI_CALL vkSetLatencySleepModeNV( + VkDevice device, + VkSwapchainKHR swapchain, + const VkLatencySleepModeInfoNV* pSleepModeInfo); -VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnableEXT( - VkCommandBuffer commandBuffer, - VkBool32 rasterizerDiscardEnable); +VKAPI_ATTR VkResult VKAPI_CALL vkLatencySleepNV( + VkDevice device, + VkSwapchainKHR swapchain, + const VkLatencySleepInfoNV* pSleepInfo); -VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnableEXT( - VkCommandBuffer commandBuffer, - VkBool32 depthBiasEnable); +VKAPI_ATTR void VKAPI_CALL vkSetLatencyMarkerNV( + VkDevice device, + VkSwapchainKHR swapchain, + const VkSetLatencyMarkerInfoNV* pLatencyMarkerInfo); -VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEXT( - VkCommandBuffer commandBuffer, - VkLogicOp logicOp); +VKAPI_ATTR void VKAPI_CALL vkGetLatencyTimingsNV( + VkDevice device, + VkSwapchainKHR swapchain, + VkGetLatencyMarkerInfoNV* pLatencyMarkerInfo); -VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnableEXT( - VkCommandBuffer commandBuffer, - VkBool32 primitiveRestartEnable); +VKAPI_ATTR void VKAPI_CALL vkQueueNotifyOutOfBandNV( + VkQueue queue, + const VkOutOfBandQueueTypeInfoNV* pQueueTypeInfo); #endif - -#define VK_EXT_color_write_enable 1 -#define VK_EXT_COLOR_WRITE_ENABLE_SPEC_VERSION 1 -#define VK_EXT_COLOR_WRITE_ENABLE_EXTENSION_NAME "VK_EXT_color_write_enable" -typedef struct VkPhysicalDeviceColorWriteEnableFeaturesEXT { + +// VK_QCOM_multiview_per_view_render_areas is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_multiview_per_view_render_areas 1 +#define VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_SPEC_VERSION 1 +#define VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_EXTENSION_NAME "VK_QCOM_multiview_per_view_render_areas" +typedef struct VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM { VkStructureType sType; void* pNext; - VkBool32 colorWriteEnable; -} VkPhysicalDeviceColorWriteEnableFeaturesEXT; + VkBool32 multiviewPerViewRenderAreas; +} VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; -typedef struct VkPipelineColorWriteCreateInfoEXT { +typedef struct VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM { VkStructureType sType; const void* pNext; - uint32_t attachmentCount; - const VkBool32* pColorWriteEnables; -} VkPipelineColorWriteCreateInfoEXT; - -typedef void (VKAPI_PTR *PFN_vkCmdSetColorWriteEnableEXT)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32* pColorWriteEnables); + uint32_t perViewRenderAreaCount; + const VkRect2D* pPerViewRenderAreas; +} VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWriteEnableEXT( - VkCommandBuffer commandBuffer, - uint32_t attachmentCount, - const VkBool32* pColorWriteEnables); -#endif -#define VK_EXT_primitives_generated_query 1 -#define VK_EXT_PRIMITIVES_GENERATED_QUERY_SPEC_VERSION 1 -#define VK_EXT_PRIMITIVES_GENERATED_QUERY_EXTENSION_NAME "VK_EXT_primitives_generated_query" -typedef struct VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT { +// VK_NV_per_stage_descriptor_set is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_per_stage_descriptor_set 1 +#define VK_NV_PER_STAGE_DESCRIPTOR_SET_SPEC_VERSION 1 +#define VK_NV_PER_STAGE_DESCRIPTOR_SET_EXTENSION_NAME "VK_NV_per_stage_descriptor_set" +typedef struct VkPhysicalDevicePerStageDescriptorSetFeaturesNV { VkStructureType sType; void* pNext; - VkBool32 primitivesGeneratedQuery; - VkBool32 primitivesGeneratedQueryWithRasterizerDiscard; - VkBool32 primitivesGeneratedQueryWithNonZeroStreams; -} VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + VkBool32 perStageDescriptorSet; + VkBool32 dynamicPipelineLayout; +} VkPhysicalDevicePerStageDescriptorSetFeaturesNV; -#define VK_EXT_global_priority_query 1 -#define VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION 1 -#define VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME "VK_EXT_global_priority_query" -#define VK_MAX_GLOBAL_PRIORITY_SIZE_EXT VK_MAX_GLOBAL_PRIORITY_SIZE_KHR -typedef VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT; +// VK_QCOM_image_processing2 is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_image_processing2 1 +#define VK_QCOM_IMAGE_PROCESSING_2_SPEC_VERSION 1 +#define VK_QCOM_IMAGE_PROCESSING_2_EXTENSION_NAME "VK_QCOM_image_processing2" + +typedef enum VkBlockMatchWindowCompareModeQCOM { + VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MIN_QCOM = 0, + VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MAX_QCOM = 1, + VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MAX_ENUM_QCOM = 0x7FFFFFFF +} VkBlockMatchWindowCompareModeQCOM; +typedef struct VkPhysicalDeviceImageProcessing2FeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 textureBlockMatch2; +} VkPhysicalDeviceImageProcessing2FeaturesQCOM; + +typedef struct VkPhysicalDeviceImageProcessing2PropertiesQCOM { + VkStructureType sType; + void* pNext; + VkExtent2D maxBlockMatchWindow; +} VkPhysicalDeviceImageProcessing2PropertiesQCOM; -typedef VkQueueFamilyGlobalPriorityPropertiesKHR VkQueueFamilyGlobalPriorityPropertiesEXT; +typedef struct VkSamplerBlockMatchWindowCreateInfoQCOM { + VkStructureType sType; + const void* pNext; + VkExtent2D windowExtent; + VkBlockMatchWindowCompareModeQCOM windowCompareMode; +} VkSamplerBlockMatchWindowCreateInfoQCOM; -#define VK_EXT_image_view_min_lod 1 -#define VK_EXT_IMAGE_VIEW_MIN_LOD_SPEC_VERSION 1 -#define VK_EXT_IMAGE_VIEW_MIN_LOD_EXTENSION_NAME "VK_EXT_image_view_min_lod" -typedef struct VkPhysicalDeviceImageViewMinLodFeaturesEXT { +// VK_QCOM_filter_cubic_weights is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_filter_cubic_weights 1 +#define VK_QCOM_FILTER_CUBIC_WEIGHTS_SPEC_VERSION 1 +#define VK_QCOM_FILTER_CUBIC_WEIGHTS_EXTENSION_NAME "VK_QCOM_filter_cubic_weights" + +typedef enum VkCubicFilterWeightsQCOM { + VK_CUBIC_FILTER_WEIGHTS_CATMULL_ROM_QCOM = 0, + VK_CUBIC_FILTER_WEIGHTS_ZERO_TANGENT_CARDINAL_QCOM = 1, + VK_CUBIC_FILTER_WEIGHTS_B_SPLINE_QCOM = 2, + VK_CUBIC_FILTER_WEIGHTS_MITCHELL_NETRAVALI_QCOM = 3, + VK_CUBIC_FILTER_WEIGHTS_MAX_ENUM_QCOM = 0x7FFFFFFF +} VkCubicFilterWeightsQCOM; +typedef struct VkPhysicalDeviceCubicWeightsFeaturesQCOM { VkStructureType sType; void* pNext; - VkBool32 minLod; -} VkPhysicalDeviceImageViewMinLodFeaturesEXT; + VkBool32 selectableCubicWeights; +} VkPhysicalDeviceCubicWeightsFeaturesQCOM; -typedef struct VkImageViewMinLodCreateInfoEXT { - VkStructureType sType; - const void* pNext; - float minLod; -} VkImageViewMinLodCreateInfoEXT; +typedef struct VkSamplerCubicWeightsCreateInfoQCOM { + VkStructureType sType; + const void* pNext; + VkCubicFilterWeightsQCOM cubicWeights; +} VkSamplerCubicWeightsCreateInfoQCOM; +typedef struct VkBlitImageCubicWeightsInfoQCOM { + VkStructureType sType; + const void* pNext; + VkCubicFilterWeightsQCOM cubicWeights; +} VkBlitImageCubicWeightsInfoQCOM; -#define VK_EXT_multi_draw 1 -#define VK_EXT_MULTI_DRAW_SPEC_VERSION 1 -#define VK_EXT_MULTI_DRAW_EXTENSION_NAME "VK_EXT_multi_draw" -typedef struct VkPhysicalDeviceMultiDrawFeaturesEXT { + +// VK_QCOM_ycbcr_degamma is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_ycbcr_degamma 1 +#define VK_QCOM_YCBCR_DEGAMMA_SPEC_VERSION 1 +#define VK_QCOM_YCBCR_DEGAMMA_EXTENSION_NAME "VK_QCOM_ycbcr_degamma" +typedef struct VkPhysicalDeviceYcbcrDegammaFeaturesQCOM { VkStructureType sType; void* pNext; - VkBool32 multiDraw; -} VkPhysicalDeviceMultiDrawFeaturesEXT; + VkBool32 ycbcrDegamma; +} VkPhysicalDeviceYcbcrDegammaFeaturesQCOM; -typedef struct VkPhysicalDeviceMultiDrawPropertiesEXT { +typedef struct VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM { VkStructureType sType; void* pNext; - uint32_t maxMultiDrawCount; -} VkPhysicalDeviceMultiDrawPropertiesEXT; + VkBool32 enableYDegamma; + VkBool32 enableCbCrDegamma; +} VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; -typedef struct VkMultiDrawInfoEXT { - uint32_t firstVertex; - uint32_t vertexCount; -} VkMultiDrawInfoEXT; -typedef struct VkMultiDrawIndexedInfoEXT { - uint32_t firstIndex; - uint32_t indexCount; - int32_t vertexOffset; -} VkMultiDrawIndexedInfoEXT; -typedef void (VKAPI_PTR *PFN_vkCmdDrawMultiEXT)(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawInfoEXT* pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride); -typedef void (VKAPI_PTR *PFN_vkCmdDrawMultiIndexedEXT)(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawIndexedInfoEXT* pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, const int32_t* pVertexOffset); +// VK_QCOM_filter_cubic_clamp is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_filter_cubic_clamp 1 +#define VK_QCOM_FILTER_CUBIC_CLAMP_SPEC_VERSION 1 +#define VK_QCOM_FILTER_CUBIC_CLAMP_EXTENSION_NAME "VK_QCOM_filter_cubic_clamp" +typedef struct VkPhysicalDeviceCubicClampFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 cubicRangeClamp; +} VkPhysicalDeviceCubicClampFeaturesQCOM; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiEXT( - VkCommandBuffer commandBuffer, - uint32_t drawCount, - const VkMultiDrawInfoEXT* pVertexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - uint32_t stride); -VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiIndexedEXT( + +// VK_EXT_attachment_feedback_loop_dynamic_state is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_attachment_feedback_loop_dynamic_state 1 +#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_SPEC_VERSION 1 +#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_attachment_feedback_loop_dynamic_state" +typedef struct VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 attachmentFeedbackLoopDynamicState; +} VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT)(VkCommandBuffer commandBuffer, VkImageAspectFlags aspectMask); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetAttachmentFeedbackLoopEnableEXT( VkCommandBuffer commandBuffer, - uint32_t drawCount, - const VkMultiDrawIndexedInfoEXT* pIndexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - uint32_t stride, - const int32_t* pVertexOffset); + VkImageAspectFlags aspectMask); #endif -#define VK_EXT_image_2d_view_of_3d 1 -#define VK_EXT_IMAGE_2D_VIEW_OF_3D_SPEC_VERSION 1 -#define VK_EXT_IMAGE_2D_VIEW_OF_3D_EXTENSION_NAME "VK_EXT_image_2d_view_of_3d" -typedef struct VkPhysicalDeviceImage2DViewOf3DFeaturesEXT { +// VK_MSFT_layered_driver is a preprocessor guard. Do not pass it to API calls. +#define VK_MSFT_layered_driver 1 +#define VK_MSFT_LAYERED_DRIVER_SPEC_VERSION 1 +#define VK_MSFT_LAYERED_DRIVER_EXTENSION_NAME "VK_MSFT_layered_driver" + +typedef enum VkLayeredDriverUnderlyingApiMSFT { + VK_LAYERED_DRIVER_UNDERLYING_API_NONE_MSFT = 0, + VK_LAYERED_DRIVER_UNDERLYING_API_D3D12_MSFT = 1, + VK_LAYERED_DRIVER_UNDERLYING_API_MAX_ENUM_MSFT = 0x7FFFFFFF +} VkLayeredDriverUnderlyingApiMSFT; +typedef struct VkPhysicalDeviceLayeredDriverPropertiesMSFT { + VkStructureType sType; + void* pNext; + VkLayeredDriverUnderlyingApiMSFT underlyingAPI; +} VkPhysicalDeviceLayeredDriverPropertiesMSFT; + + + +// VK_NV_descriptor_pool_overallocation is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_descriptor_pool_overallocation 1 +#define VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_SPEC_VERSION 1 +#define VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_EXTENSION_NAME "VK_NV_descriptor_pool_overallocation" +typedef struct VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV { VkStructureType sType; void* pNext; - VkBool32 image2DViewOf3D; - VkBool32 sampler2DViewOf3D; -} VkPhysicalDeviceImage2DViewOf3DFeaturesEXT; + VkBool32 descriptorPoolOverallocation; +} VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV; -#define VK_EXT_load_store_op_none 1 -#define VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION 1 -#define VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME "VK_EXT_load_store_op_none" +// VK_NV_display_stereo is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_display_stereo 1 +#define VK_NV_DISPLAY_STEREO_SPEC_VERSION 1 +#define VK_NV_DISPLAY_STEREO_EXTENSION_NAME "VK_NV_display_stereo" +typedef enum VkDisplaySurfaceStereoTypeNV { + VK_DISPLAY_SURFACE_STEREO_TYPE_NONE_NV = 0, + VK_DISPLAY_SURFACE_STEREO_TYPE_ONBOARD_DIN_NV = 1, + VK_DISPLAY_SURFACE_STEREO_TYPE_HDMI_3D_NV = 2, + VK_DISPLAY_SURFACE_STEREO_TYPE_INBAND_DISPLAYPORT_NV = 3, + VK_DISPLAY_SURFACE_STEREO_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkDisplaySurfaceStereoTypeNV; +typedef struct VkDisplaySurfaceStereoCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkDisplaySurfaceStereoTypeNV stereoType; +} VkDisplaySurfaceStereoCreateInfoNV; -#define VK_EXT_border_color_swizzle 1 -#define VK_EXT_BORDER_COLOR_SWIZZLE_SPEC_VERSION 1 -#define VK_EXT_BORDER_COLOR_SWIZZLE_EXTENSION_NAME "VK_EXT_border_color_swizzle" -typedef struct VkPhysicalDeviceBorderColorSwizzleFeaturesEXT { +typedef struct VkDisplayModeStereoPropertiesNV { VkStructureType sType; - void* pNext; - VkBool32 borderColorSwizzle; - VkBool32 borderColorSwizzleFromImage; -} VkPhysicalDeviceBorderColorSwizzleFeaturesEXT; - -typedef struct VkSamplerBorderColorComponentMappingCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkComponentMapping components; - VkBool32 srgb; -} VkSamplerBorderColorComponentMappingCreateInfoEXT; + const void* pNext; + VkBool32 hdmi3DSupported; +} VkDisplayModeStereoPropertiesNV; -#define VK_EXT_pageable_device_local_memory 1 -#define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_SPEC_VERSION 1 -#define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME "VK_EXT_pageable_device_local_memory" -typedef struct VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT { +// VK_NV_raw_access_chains is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_raw_access_chains 1 +#define VK_NV_RAW_ACCESS_CHAINS_SPEC_VERSION 1 +#define VK_NV_RAW_ACCESS_CHAINS_EXTENSION_NAME "VK_NV_raw_access_chains" +typedef struct VkPhysicalDeviceRawAccessChainsFeaturesNV { VkStructureType sType; void* pNext; - VkBool32 pageableDeviceLocalMemory; -} VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; - -typedef void (VKAPI_PTR *PFN_vkSetDeviceMemoryPriorityEXT)(VkDevice device, VkDeviceMemory memory, float priority); + VkBool32 shaderRawAccessChains; +} VkPhysicalDeviceRawAccessChainsFeaturesNV; -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkSetDeviceMemoryPriorityEXT( - VkDevice device, - VkDeviceMemory memory, - float priority); -#endif -#define VK_VALVE_descriptor_set_host_mapping 1 -#define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_SPEC_VERSION 1 -#define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_EXTENSION_NAME "VK_VALVE_descriptor_set_host_mapping" -typedef struct VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE { +// VK_NV_command_buffer_inheritance is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_command_buffer_inheritance 1 +#define VK_NV_COMMAND_BUFFER_INHERITANCE_SPEC_VERSION 1 +#define VK_NV_COMMAND_BUFFER_INHERITANCE_EXTENSION_NAME "VK_NV_command_buffer_inheritance" +typedef struct VkPhysicalDeviceCommandBufferInheritanceFeaturesNV { VkStructureType sType; void* pNext; - VkBool32 descriptorSetHostMapping; -} VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + VkBool32 commandBufferInheritance; +} VkPhysicalDeviceCommandBufferInheritanceFeaturesNV; -typedef struct VkDescriptorSetBindingReferenceVALVE { - VkStructureType sType; - const void* pNext; - VkDescriptorSetLayout descriptorSetLayout; - uint32_t binding; -} VkDescriptorSetBindingReferenceVALVE; -typedef struct VkDescriptorSetLayoutHostMappingInfoVALVE { + +// VK_NV_shader_atomic_float16_vector is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shader_atomic_float16_vector 1 +#define VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_SPEC_VERSION 1 +#define VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_EXTENSION_NAME "VK_NV_shader_atomic_float16_vector" +typedef struct VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV { VkStructureType sType; void* pNext; - size_t descriptorOffset; - uint32_t descriptorSize; -} VkDescriptorSetLayoutHostMappingInfoVALVE; + VkBool32 shaderFloat16VectorAtomics; +} VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; -typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE)(VkDevice device, const VkDescriptorSetBindingReferenceVALVE* pBindingReference, VkDescriptorSetLayoutHostMappingInfoVALVE* pHostMapping); -typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetHostMappingVALVE)(VkDevice device, VkDescriptorSet descriptorSet, void** ppData); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutHostMappingInfoVALVE( - VkDevice device, - const VkDescriptorSetBindingReferenceVALVE* pBindingReference, - VkDescriptorSetLayoutHostMappingInfoVALVE* pHostMapping); -VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetHostMappingVALVE( - VkDevice device, - VkDescriptorSet descriptorSet, - void** ppData); -#endif +// VK_EXT_shader_replicated_composites is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_replicated_composites 1 +#define VK_EXT_SHADER_REPLICATED_COMPOSITES_SPEC_VERSION 1 +#define VK_EXT_SHADER_REPLICATED_COMPOSITES_EXTENSION_NAME "VK_EXT_shader_replicated_composites" +typedef struct VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderReplicatedComposites; +} VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT; -#define VK_EXT_depth_clamp_zero_one 1 -#define VK_EXT_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION 1 -#define VK_EXT_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME "VK_EXT_depth_clamp_zero_one" -typedef struct VkPhysicalDeviceDepthClampZeroOneFeaturesEXT { + +// VK_NV_ray_tracing_validation is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_ray_tracing_validation 1 +#define VK_NV_RAY_TRACING_VALIDATION_SPEC_VERSION 1 +#define VK_NV_RAY_TRACING_VALIDATION_EXTENSION_NAME "VK_NV_ray_tracing_validation" +typedef struct VkPhysicalDeviceRayTracingValidationFeaturesNV { VkStructureType sType; void* pNext; - VkBool32 depthClampZeroOne; -} VkPhysicalDeviceDepthClampZeroOneFeaturesEXT; - + VkBool32 rayTracingValidation; +} VkPhysicalDeviceRayTracingValidationFeaturesNV; + + + +// VK_NV_cluster_acceleration_structure is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_cluster_acceleration_structure 1 +#define VK_NV_CLUSTER_ACCELERATION_STRUCTURE_SPEC_VERSION 2 +#define VK_NV_CLUSTER_ACCELERATION_STRUCTURE_EXTENSION_NAME "VK_NV_cluster_acceleration_structure" + +typedef enum VkClusterAccelerationStructureTypeNV { + VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_CLUSTERS_BOTTOM_LEVEL_NV = 0, + VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_TRIANGLE_CLUSTER_NV = 1, + VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_TRIANGLE_CLUSTER_TEMPLATE_NV = 2, + VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkClusterAccelerationStructureTypeNV; + +typedef enum VkClusterAccelerationStructureOpTypeNV { + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_MOVE_OBJECTS_NV = 0, + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_CLUSTERS_BOTTOM_LEVEL_NV = 1, + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_TRIANGLE_CLUSTER_NV = 2, + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_TRIANGLE_CLUSTER_TEMPLATE_NV = 3, + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_INSTANTIATE_TRIANGLE_CLUSTER_NV = 4, + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkClusterAccelerationStructureOpTypeNV; + +typedef enum VkClusterAccelerationStructureOpModeNV { + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_IMPLICIT_DESTINATIONS_NV = 0, + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_EXPLICIT_DESTINATIONS_NV = 1, + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_COMPUTE_SIZES_NV = 2, + VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkClusterAccelerationStructureOpModeNV; + +typedef enum VkClusterAccelerationStructureAddressResolutionFlagBitsNV { + VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_IMPLICIT_DATA_BIT_NV = 0x00000001, + VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SCRATCH_DATA_BIT_NV = 0x00000002, + VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_ADDRESS_ARRAY_BIT_NV = 0x00000004, + VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_SIZES_ARRAY_BIT_NV = 0x00000008, + VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SRC_INFOS_ARRAY_BIT_NV = 0x00000010, + VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SRC_INFOS_COUNT_BIT_NV = 0x00000020, + VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkClusterAccelerationStructureAddressResolutionFlagBitsNV; +typedef VkFlags VkClusterAccelerationStructureAddressResolutionFlagsNV; + +typedef enum VkClusterAccelerationStructureClusterFlagBitsNV { + VK_CLUSTER_ACCELERATION_STRUCTURE_CLUSTER_ALLOW_DISABLE_OPACITY_MICROMAPS_NV = 0x00000001, + VK_CLUSTER_ACCELERATION_STRUCTURE_CLUSTER_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkClusterAccelerationStructureClusterFlagBitsNV; +typedef VkFlags VkClusterAccelerationStructureClusterFlagsNV; + +typedef enum VkClusterAccelerationStructureGeometryFlagBitsNV { + VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_CULL_DISABLE_BIT_NV = 0x00000001, + VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_NO_DUPLICATE_ANYHIT_INVOCATION_BIT_NV = 0x00000002, + VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_OPAQUE_BIT_NV = 0x00000004, + VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkClusterAccelerationStructureGeometryFlagBitsNV; +typedef VkFlags VkClusterAccelerationStructureGeometryFlagsNV; + +typedef enum VkClusterAccelerationStructureIndexFormatFlagBitsNV { + VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_8BIT_NV = 0x00000001, + VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_16BIT_NV = 0x00000002, + VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_32BIT_NV = 0x00000004, + VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkClusterAccelerationStructureIndexFormatFlagBitsNV; +typedef VkFlags VkClusterAccelerationStructureIndexFormatFlagsNV; +typedef struct VkPhysicalDeviceClusterAccelerationStructureFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 clusterAccelerationStructure; +} VkPhysicalDeviceClusterAccelerationStructureFeaturesNV; +typedef struct VkPhysicalDeviceClusterAccelerationStructurePropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxVerticesPerCluster; + uint32_t maxTrianglesPerCluster; + uint32_t clusterScratchByteAlignment; + uint32_t clusterByteAlignment; + uint32_t clusterTemplateByteAlignment; + uint32_t clusterBottomLevelByteAlignment; + uint32_t clusterTemplateBoundsByteAlignment; + uint32_t maxClusterGeometryIndex; +} VkPhysicalDeviceClusterAccelerationStructurePropertiesNV; + +typedef struct VkClusterAccelerationStructureClustersBottomLevelInputNV { + VkStructureType sType; + void* pNext; + uint32_t maxTotalClusterCount; + uint32_t maxClusterCountPerAccelerationStructure; +} VkClusterAccelerationStructureClustersBottomLevelInputNV; -#define VK_EXT_non_seamless_cube_map 1 -#define VK_EXT_NON_SEAMLESS_CUBE_MAP_SPEC_VERSION 1 -#define VK_EXT_NON_SEAMLESS_CUBE_MAP_EXTENSION_NAME "VK_EXT_non_seamless_cube_map" -typedef struct VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT { +typedef struct VkClusterAccelerationStructureTriangleClusterInputNV { VkStructureType sType; void* pNext; - VkBool32 nonSeamlessCubeMap; -} VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + VkFormat vertexFormat; + uint32_t maxGeometryIndexValue; + uint32_t maxClusterUniqueGeometryCount; + uint32_t maxClusterTriangleCount; + uint32_t maxClusterVertexCount; + uint32_t maxTotalTriangleCount; + uint32_t maxTotalVertexCount; + uint32_t minPositionTruncateBitCount; +} VkClusterAccelerationStructureTriangleClusterInputNV; + +typedef struct VkClusterAccelerationStructureMoveObjectsInputNV { + VkStructureType sType; + void* pNext; + VkClusterAccelerationStructureTypeNV type; + VkBool32 noMoveOverlap; + VkDeviceSize maxMovedBytes; +} VkClusterAccelerationStructureMoveObjectsInputNV; + +typedef union VkClusterAccelerationStructureOpInputNV { + VkClusterAccelerationStructureClustersBottomLevelInputNV* pClustersBottomLevel; + VkClusterAccelerationStructureTriangleClusterInputNV* pTriangleClusters; + VkClusterAccelerationStructureMoveObjectsInputNV* pMoveObjects; +} VkClusterAccelerationStructureOpInputNV; + +typedef struct VkClusterAccelerationStructureInputInfoNV { + VkStructureType sType; + void* pNext; + uint32_t maxAccelerationStructureCount; + VkBuildAccelerationStructureFlagsKHR flags; + VkClusterAccelerationStructureOpTypeNV opType; + VkClusterAccelerationStructureOpModeNV opMode; + VkClusterAccelerationStructureOpInputNV opInput; +} VkClusterAccelerationStructureInputInfoNV; +typedef struct VkStridedDeviceAddressRegionKHR { + VkDeviceAddress deviceAddress; + VkDeviceSize stride; + VkDeviceSize size; +} VkStridedDeviceAddressRegionKHR; +typedef struct VkClusterAccelerationStructureCommandsInfoNV { + VkStructureType sType; + void* pNext; + VkClusterAccelerationStructureInputInfoNV input; + VkDeviceAddress dstImplicitData; + VkDeviceAddress scratchData; + VkStridedDeviceAddressRegionKHR dstAddressesArray; + VkStridedDeviceAddressRegionKHR dstSizesArray; + VkStridedDeviceAddressRegionKHR srcInfosArray; + VkDeviceAddress srcInfosCount; + VkClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags; +} VkClusterAccelerationStructureCommandsInfoNV; + +typedef struct VkStridedDeviceAddressNV { + VkDeviceAddress startAddress; + VkDeviceSize strideInBytes; +} VkStridedDeviceAddressNV; + +typedef struct VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV { + uint32_t geometryIndex:24; + uint32_t reserved:5; + uint32_t geometryFlags:3; +} VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV; + +typedef struct VkClusterAccelerationStructureMoveObjectsInfoNV { + VkDeviceAddress srcAccelerationStructure; +} VkClusterAccelerationStructureMoveObjectsInfoNV; + +typedef struct VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV { + uint32_t clusterReferencesCount; + uint32_t clusterReferencesStride; + VkDeviceAddress clusterReferences; +} VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV; + +typedef struct VkClusterAccelerationStructureBuildTriangleClusterInfoNV { + uint32_t clusterID; + VkClusterAccelerationStructureClusterFlagsNV clusterFlags; + uint32_t triangleCount:9; + uint32_t vertexCount:9; + uint32_t positionTruncateBitCount:6; + uint32_t indexType:4; + uint32_t opacityMicromapIndexType:4; + VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags; + uint16_t indexBufferStride; + uint16_t vertexBufferStride; + uint16_t geometryIndexAndFlagsBufferStride; + uint16_t opacityMicromapIndexBufferStride; + VkDeviceAddress indexBuffer; + VkDeviceAddress vertexBuffer; + VkDeviceAddress geometryIndexAndFlagsBuffer; + VkDeviceAddress opacityMicromapArray; + VkDeviceAddress opacityMicromapIndexBuffer; +} VkClusterAccelerationStructureBuildTriangleClusterInfoNV; + +typedef struct VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV { + uint32_t clusterID; + VkClusterAccelerationStructureClusterFlagsNV clusterFlags; + uint32_t triangleCount:9; + uint32_t vertexCount:9; + uint32_t positionTruncateBitCount:6; + uint32_t indexType:4; + uint32_t opacityMicromapIndexType:4; + VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags; + uint16_t indexBufferStride; + uint16_t vertexBufferStride; + uint16_t geometryIndexAndFlagsBufferStride; + uint16_t opacityMicromapIndexBufferStride; + VkDeviceAddress indexBuffer; + VkDeviceAddress vertexBuffer; + VkDeviceAddress geometryIndexAndFlagsBuffer; + VkDeviceAddress opacityMicromapArray; + VkDeviceAddress opacityMicromapIndexBuffer; + VkDeviceAddress instantiationBoundingBoxLimit; +} VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV; + +typedef struct VkClusterAccelerationStructureInstantiateClusterInfoNV { + uint32_t clusterIdOffset; + uint32_t geometryIndexOffset:24; + uint32_t reserved:8; + VkDeviceAddress clusterTemplateAddress; + VkStridedDeviceAddressNV vertexBuffer; +} VkClusterAccelerationStructureInstantiateClusterInfoNV; -#define VK_QCOM_fragment_density_map_offset 1 -#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION 1 -#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME "VK_QCOM_fragment_density_map_offset" -typedef struct VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM { +typedef struct VkAccelerationStructureBuildSizesInfoKHR { VkStructureType sType; - void* pNext; - VkBool32 fragmentDensityMapOffset; -} VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + const void* pNext; + VkDeviceSize accelerationStructureSize; + VkDeviceSize updateScratchSize; + VkDeviceSize buildScratchSize; +} VkAccelerationStructureBuildSizesInfoKHR; -typedef struct VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM { +typedef struct VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV { VkStructureType sType; void* pNext; - VkExtent2D fragmentDensityOffsetGranularity; -} VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + VkBool32 allowClusterAccelerationStructure; +} VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV; -typedef struct VkSubpassFragmentDensityMapOffsetEndInfoQCOM { - VkStructureType sType; - const void* pNext; - uint32_t fragmentDensityOffsetCount; - const VkOffset2D* pFragmentDensityOffsets; -} VkSubpassFragmentDensityMapOffsetEndInfoQCOM; +typedef void (VKAPI_PTR *PFN_vkGetClusterAccelerationStructureBuildSizesNV)(VkDevice device, const VkClusterAccelerationStructureInputInfoNV* pInfo, VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBuildClusterAccelerationStructureIndirectNV)(VkCommandBuffer commandBuffer, const VkClusterAccelerationStructureCommandsInfoNV* pCommandInfos); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetClusterAccelerationStructureBuildSizesNV( + VkDevice device, + const VkClusterAccelerationStructureInputInfoNV* pInfo, + VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); +VKAPI_ATTR void VKAPI_CALL vkCmdBuildClusterAccelerationStructureIndirectNV( + VkCommandBuffer commandBuffer, + const VkClusterAccelerationStructureCommandsInfoNV* pCommandInfos); +#endif -#define VK_NV_linear_color_attachment 1 -#define VK_NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION 1 -#define VK_NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME "VK_NV_linear_color_attachment" -typedef struct VkPhysicalDeviceLinearColorAttachmentFeaturesNV { +// VK_NV_partitioned_acceleration_structure is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_partitioned_acceleration_structure 1 +#define VK_NV_PARTITIONED_ACCELERATION_STRUCTURE_SPEC_VERSION 1 +#define VK_NV_PARTITIONED_ACCELERATION_STRUCTURE_EXTENSION_NAME "VK_NV_partitioned_acceleration_structure" +#define VK_PARTITIONED_ACCELERATION_STRUCTURE_PARTITION_INDEX_GLOBAL_NV (~0U) + +typedef enum VkPartitionedAccelerationStructureOpTypeNV { + VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_WRITE_INSTANCE_NV = 0, + VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_UPDATE_INSTANCE_NV = 1, + VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_WRITE_PARTITION_TRANSLATION_NV = 2, + VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkPartitionedAccelerationStructureOpTypeNV; + +typedef enum VkPartitionedAccelerationStructureInstanceFlagBitsNV { + VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_TRIANGLE_FACING_CULL_DISABLE_BIT_NV = 0x00000001, + VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_TRIANGLE_FLIP_FACING_BIT_NV = 0x00000002, + VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_FORCE_OPAQUE_BIT_NV = 0x00000004, + VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_FORCE_NO_OPAQUE_BIT_NV = 0x00000008, + VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_ENABLE_EXPLICIT_BOUNDING_BOX_NV = 0x00000010, + VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkPartitionedAccelerationStructureInstanceFlagBitsNV; +typedef VkFlags VkPartitionedAccelerationStructureInstanceFlagsNV; +typedef struct VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV { VkStructureType sType; void* pNext; - VkBool32 linearColorAttachment; -} VkPhysicalDeviceLinearColorAttachmentFeaturesNV; + VkBool32 partitionedAccelerationStructure; +} VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV; + +typedef struct VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxPartitionCount; +} VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV; + +typedef struct VkPartitionedAccelerationStructureFlagsNV { + VkStructureType sType; + void* pNext; + VkBool32 enablePartitionTranslation; +} VkPartitionedAccelerationStructureFlagsNV; + +typedef struct VkBuildPartitionedAccelerationStructureIndirectCommandNV { + VkPartitionedAccelerationStructureOpTypeNV opType; + uint32_t argCount; + VkStridedDeviceAddressNV argData; +} VkBuildPartitionedAccelerationStructureIndirectCommandNV; + +typedef struct VkPartitionedAccelerationStructureWriteInstanceDataNV { + VkTransformMatrixKHR transform; + float explicitAABB[6]; + uint32_t instanceID; + uint32_t instanceMask; + uint32_t instanceContributionToHitGroupIndex; + VkPartitionedAccelerationStructureInstanceFlagsNV instanceFlags; + uint32_t instanceIndex; + uint32_t partitionIndex; + VkDeviceAddress accelerationStructure; +} VkPartitionedAccelerationStructureWriteInstanceDataNV; + +typedef struct VkPartitionedAccelerationStructureUpdateInstanceDataNV { + uint32_t instanceIndex; + uint32_t instanceContributionToHitGroupIndex; + VkDeviceAddress accelerationStructure; +} VkPartitionedAccelerationStructureUpdateInstanceDataNV; + +typedef struct VkPartitionedAccelerationStructureWritePartitionTranslationDataNV { + uint32_t partitionIndex; + float partitionTranslation[3]; +} VkPartitionedAccelerationStructureWritePartitionTranslationDataNV; + +typedef struct VkWriteDescriptorSetPartitionedAccelerationStructureNV { + VkStructureType sType; + void* pNext; + uint32_t accelerationStructureCount; + const VkDeviceAddress* pAccelerationStructures; +} VkWriteDescriptorSetPartitionedAccelerationStructureNV; +typedef struct VkPartitionedAccelerationStructureInstancesInputNV { + VkStructureType sType; + void* pNext; + VkBuildAccelerationStructureFlagsKHR flags; + uint32_t instanceCount; + uint32_t maxInstancePerPartitionCount; + uint32_t partitionCount; + uint32_t maxInstanceInGlobalPartitionCount; +} VkPartitionedAccelerationStructureInstancesInputNV; + +typedef struct VkBuildPartitionedAccelerationStructureInfoNV { + VkStructureType sType; + void* pNext; + VkPartitionedAccelerationStructureInstancesInputNV input; + VkDeviceAddress srcAccelerationStructureData; + VkDeviceAddress dstAccelerationStructureData; + VkDeviceAddress scratchData; + VkDeviceAddress srcInfos; + VkDeviceAddress srcInfosCount; +} VkBuildPartitionedAccelerationStructureInfoNV; + +typedef void (VKAPI_PTR *PFN_vkGetPartitionedAccelerationStructuresBuildSizesNV)(VkDevice device, const VkPartitionedAccelerationStructureInstancesInputNV* pInfo, VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBuildPartitionedAccelerationStructuresNV)(VkCommandBuffer commandBuffer, const VkBuildPartitionedAccelerationStructureInfoNV* pBuildInfo); +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPartitionedAccelerationStructuresBuildSizesNV( + VkDevice device, + const VkPartitionedAccelerationStructureInstancesInputNV* pInfo, + VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); -#define VK_GOOGLE_surfaceless_query 1 -#define VK_GOOGLE_SURFACELESS_QUERY_SPEC_VERSION 2 -#define VK_GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME "VK_GOOGLE_surfaceless_query" +VKAPI_ATTR void VKAPI_CALL vkCmdBuildPartitionedAccelerationStructuresNV( + VkCommandBuffer commandBuffer, + const VkBuildPartitionedAccelerationStructureInfoNV* pBuildInfo); +#endif + + +// VK_EXT_device_generated_commands is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_device_generated_commands 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectExecutionSetEXT) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutEXT) +#define VK_EXT_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 1 +#define VK_EXT_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_EXT_device_generated_commands" + +typedef enum VkIndirectExecutionSetInfoTypeEXT { + VK_INDIRECT_EXECUTION_SET_INFO_TYPE_PIPELINES_EXT = 0, + VK_INDIRECT_EXECUTION_SET_INFO_TYPE_SHADER_OBJECTS_EXT = 1, + VK_INDIRECT_EXECUTION_SET_INFO_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkIndirectExecutionSetInfoTypeEXT; + +typedef enum VkIndirectCommandsTokenTypeEXT { + VK_INDIRECT_COMMANDS_TOKEN_TYPE_EXECUTION_SET_EXT = 0, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_EXT = 1, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_SEQUENCE_INDEX_EXT = 2, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_EXT = 3, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_EXT = 4, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_EXT = 5, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_EXT = 6, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_COUNT_EXT = 7, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_COUNT_EXT = 8, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_EXT = 9, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV_EXT = 1000202002, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_NV_EXT = 1000202003, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_EXT = 1000328000, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_EXT = 1000328001, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_TRACE_RAYS2_EXT = 1000386004, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkIndirectCommandsTokenTypeEXT; + +typedef enum VkIndirectCommandsInputModeFlagBitsEXT { + VK_INDIRECT_COMMANDS_INPUT_MODE_VULKAN_INDEX_BUFFER_EXT = 0x00000001, + VK_INDIRECT_COMMANDS_INPUT_MODE_DXGI_INDEX_BUFFER_EXT = 0x00000002, + VK_INDIRECT_COMMANDS_INPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkIndirectCommandsInputModeFlagBitsEXT; +typedef VkFlags VkIndirectCommandsInputModeFlagsEXT; + +typedef enum VkIndirectCommandsLayoutUsageFlagBitsEXT { + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_EXT = 0x00000001, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_EXT = 0x00000002, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkIndirectCommandsLayoutUsageFlagBitsEXT; +typedef VkFlags VkIndirectCommandsLayoutUsageFlagsEXT; +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 deviceGeneratedCommands; + VkBool32 dynamicGeneratedPipelineLayout; +} VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxIndirectPipelineCount; + uint32_t maxIndirectShaderObjectCount; + uint32_t maxIndirectSequenceCount; + uint32_t maxIndirectCommandsTokenCount; + uint32_t maxIndirectCommandsTokenOffset; + uint32_t maxIndirectCommandsIndirectStride; + VkIndirectCommandsInputModeFlagsEXT supportedIndirectCommandsInputModes; + VkShaderStageFlags supportedIndirectCommandsShaderStages; + VkShaderStageFlags supportedIndirectCommandsShaderStagesPipelineBinding; + VkShaderStageFlags supportedIndirectCommandsShaderStagesShaderBinding; + VkBool32 deviceGeneratedCommandsTransformFeedback; + VkBool32 deviceGeneratedCommandsMultiDrawIndirectCount; +} VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + +typedef struct VkGeneratedCommandsMemoryRequirementsInfoEXT { + VkStructureType sType; + const void* pNext; + VkIndirectExecutionSetEXT indirectExecutionSet; + VkIndirectCommandsLayoutEXT indirectCommandsLayout; + uint32_t maxSequenceCount; + uint32_t maxDrawCount; +} VkGeneratedCommandsMemoryRequirementsInfoEXT; -#define VK_EXT_image_compression_control_swapchain 1 -#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_SPEC_VERSION 1 -#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_EXTENSION_NAME "VK_EXT_image_compression_control_swapchain" -typedef struct VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT { +typedef struct VkIndirectExecutionSetPipelineInfoEXT { VkStructureType sType; - void* pNext; - VkBool32 imageCompressionControlSwapchain; -} VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + const void* pNext; + VkPipeline initialPipeline; + uint32_t maxPipelineCount; +} VkIndirectExecutionSetPipelineInfoEXT; +typedef struct VkIndirectExecutionSetShaderLayoutInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t setLayoutCount; + const VkDescriptorSetLayout* pSetLayouts; +} VkIndirectExecutionSetShaderLayoutInfoEXT; +typedef struct VkIndirectExecutionSetShaderInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t shaderCount; + const VkShaderEXT* pInitialShaders; + const VkIndirectExecutionSetShaderLayoutInfoEXT* pSetLayoutInfos; + uint32_t maxShaderCount; + uint32_t pushConstantRangeCount; + const VkPushConstantRange* pPushConstantRanges; +} VkIndirectExecutionSetShaderInfoEXT; + +typedef union VkIndirectExecutionSetInfoEXT { + const VkIndirectExecutionSetPipelineInfoEXT* pPipelineInfo; + const VkIndirectExecutionSetShaderInfoEXT* pShaderInfo; +} VkIndirectExecutionSetInfoEXT; + +typedef struct VkIndirectExecutionSetCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkIndirectExecutionSetInfoTypeEXT type; + VkIndirectExecutionSetInfoEXT info; +} VkIndirectExecutionSetCreateInfoEXT; -#define VK_QCOM_image_processing 1 -#define VK_QCOM_IMAGE_PROCESSING_SPEC_VERSION 1 -#define VK_QCOM_IMAGE_PROCESSING_EXTENSION_NAME "VK_QCOM_image_processing" -typedef struct VkImageViewSampleWeightCreateInfoQCOM { +typedef struct VkGeneratedCommandsInfoEXT { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags shaderStages; + VkIndirectExecutionSetEXT indirectExecutionSet; + VkIndirectCommandsLayoutEXT indirectCommandsLayout; + VkDeviceAddress indirectAddress; + VkDeviceSize indirectAddressSize; + VkDeviceAddress preprocessAddress; + VkDeviceSize preprocessSize; + uint32_t maxSequenceCount; + VkDeviceAddress sequenceCountAddress; + uint32_t maxDrawCount; +} VkGeneratedCommandsInfoEXT; + +typedef struct VkWriteIndirectExecutionSetPipelineEXT { VkStructureType sType; const void* pNext; - VkOffset2D filterCenter; - VkExtent2D filterSize; - uint32_t numPhases; -} VkImageViewSampleWeightCreateInfoQCOM; + uint32_t index; + VkPipeline pipeline; +} VkWriteIndirectExecutionSetPipelineEXT; -typedef struct VkPhysicalDeviceImageProcessingFeaturesQCOM { - VkStructureType sType; - void* pNext; - VkBool32 textureSampleWeighted; - VkBool32 textureBoxFilter; - VkBool32 textureBlockMatch; -} VkPhysicalDeviceImageProcessingFeaturesQCOM; +typedef struct VkIndirectCommandsPushConstantTokenEXT { + VkPushConstantRange updateRange; +} VkIndirectCommandsPushConstantTokenEXT; -typedef struct VkPhysicalDeviceImageProcessingPropertiesQCOM { - VkStructureType sType; - void* pNext; - uint32_t maxWeightFilterPhases; - VkExtent2D maxWeightFilterDimension; - VkExtent2D maxBlockMatchRegion; - VkExtent2D maxBoxFilterBlockSize; -} VkPhysicalDeviceImageProcessingPropertiesQCOM; +typedef struct VkIndirectCommandsVertexBufferTokenEXT { + uint32_t vertexBindingUnit; +} VkIndirectCommandsVertexBufferTokenEXT; +typedef struct VkIndirectCommandsIndexBufferTokenEXT { + VkIndirectCommandsInputModeFlagBitsEXT mode; +} VkIndirectCommandsIndexBufferTokenEXT; +typedef struct VkIndirectCommandsExecutionSetTokenEXT { + VkIndirectExecutionSetInfoTypeEXT type; + VkShaderStageFlags shaderStages; +} VkIndirectCommandsExecutionSetTokenEXT; -#define VK_EXT_subpass_merge_feedback 1 -#define VK_EXT_SUBPASS_MERGE_FEEDBACK_SPEC_VERSION 2 -#define VK_EXT_SUBPASS_MERGE_FEEDBACK_EXTENSION_NAME "VK_EXT_subpass_merge_feedback" +typedef union VkIndirectCommandsTokenDataEXT { + const VkIndirectCommandsPushConstantTokenEXT* pPushConstant; + const VkIndirectCommandsVertexBufferTokenEXT* pVertexBuffer; + const VkIndirectCommandsIndexBufferTokenEXT* pIndexBuffer; + const VkIndirectCommandsExecutionSetTokenEXT* pExecutionSet; +} VkIndirectCommandsTokenDataEXT; -typedef enum VkSubpassMergeStatusEXT { - VK_SUBPASS_MERGE_STATUS_MERGED_EXT = 0, - VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT = 1, - VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT = 2, - VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT = 3, - VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT = 4, - VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT = 5, - VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT = 6, - VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT = 7, - VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT = 8, - VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT = 9, - VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT = 10, - VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT = 11, - VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT = 12, - VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT = 13, - VK_SUBPASS_MERGE_STATUS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkSubpassMergeStatusEXT; -typedef struct VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT { +typedef struct VkIndirectCommandsLayoutTokenEXT { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsTokenTypeEXT type; + VkIndirectCommandsTokenDataEXT data; + uint32_t offset; +} VkIndirectCommandsLayoutTokenEXT; + +typedef struct VkIndirectCommandsLayoutCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsLayoutUsageFlagsEXT flags; + VkShaderStageFlags shaderStages; + uint32_t indirectStride; + VkPipelineLayout pipelineLayout; + uint32_t tokenCount; + const VkIndirectCommandsLayoutTokenEXT* pTokens; +} VkIndirectCommandsLayoutCreateInfoEXT; + +typedef struct VkDrawIndirectCountIndirectCommandEXT { + VkDeviceAddress bufferAddress; + uint32_t stride; + uint32_t commandCount; +} VkDrawIndirectCountIndirectCommandEXT; + +typedef struct VkBindVertexBufferIndirectCommandEXT { + VkDeviceAddress bufferAddress; + uint32_t size; + uint32_t stride; +} VkBindVertexBufferIndirectCommandEXT; + +typedef struct VkBindIndexBufferIndirectCommandEXT { + VkDeviceAddress bufferAddress; + uint32_t size; + VkIndexType indexType; +} VkBindIndexBufferIndirectCommandEXT; + +typedef struct VkGeneratedCommandsPipelineInfoEXT { VkStructureType sType; void* pNext; - VkBool32 subpassMergeFeedback; -} VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + VkPipeline pipeline; +} VkGeneratedCommandsPipelineInfoEXT; -typedef struct VkRenderPassCreationControlEXT { +typedef struct VkGeneratedCommandsShaderInfoEXT { + VkStructureType sType; + void* pNext; + uint32_t shaderCount; + const VkShaderEXT* pShaders; +} VkGeneratedCommandsShaderInfoEXT; + +typedef struct VkWriteIndirectExecutionSetShaderEXT { VkStructureType sType; const void* pNext; - VkBool32 disallowMerging; -} VkRenderPassCreationControlEXT; + uint32_t index; + VkShaderEXT shader; +} VkWriteIndirectExecutionSetShaderEXT; + +typedef void (VKAPI_PTR *PFN_vkGetGeneratedCommandsMemoryRequirementsEXT)(VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoEXT* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdPreprocessGeneratedCommandsEXT)(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo, VkCommandBuffer stateCommandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteGeneratedCommandsEXT)(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutEXT)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutEXT* pIndirectCommandsLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutEXT)(VkDevice device, VkIndirectCommandsLayoutEXT indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectExecutionSetEXT)(VkDevice device, const VkIndirectExecutionSetCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectExecutionSetEXT* pIndirectExecutionSet); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectExecutionSetEXT)(VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateIndirectExecutionSetPipelineEXT)(VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, const VkWriteIndirectExecutionSetPipelineEXT* pExecutionSetWrites); +typedef void (VKAPI_PTR *PFN_vkUpdateIndirectExecutionSetShaderEXT)(VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, const VkWriteIndirectExecutionSetShaderEXT* pExecutionSetWrites); -typedef struct VkRenderPassCreationFeedbackInfoEXT { - uint32_t postMergeSubpassCount; -} VkRenderPassCreationFeedbackInfoEXT; +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetGeneratedCommandsMemoryRequirementsEXT( + VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoEXT* pInfo, + VkMemoryRequirements2* pMemoryRequirements); -typedef struct VkRenderPassCreationFeedbackCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkRenderPassCreationFeedbackInfoEXT* pRenderPassFeedback; -} VkRenderPassCreationFeedbackCreateInfoEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdPreprocessGeneratedCommandsEXT( + VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo, + VkCommandBuffer stateCommandBuffer); -typedef struct VkRenderPassSubpassFeedbackInfoEXT { - VkSubpassMergeStatusEXT subpassMergeStatus; - char description[VK_MAX_DESCRIPTION_SIZE]; - uint32_t postMergeIndex; -} VkRenderPassSubpassFeedbackInfoEXT; +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteGeneratedCommandsEXT( + VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo); -typedef struct VkRenderPassSubpassFeedbackCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkRenderPassSubpassFeedbackInfoEXT* pSubpassFeedback; -} VkRenderPassSubpassFeedbackCreateInfoEXT; +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutEXT( + VkDevice device, + const VkIndirectCommandsLayoutCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutEXT* pIndirectCommandsLayout); +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutEXT( + VkDevice device, + VkIndirectCommandsLayoutEXT indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator); +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectExecutionSetEXT( + VkDevice device, + const VkIndirectExecutionSetCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectExecutionSetEXT* pIndirectExecutionSet); -#define VK_EXT_shader_module_identifier 1 -#define VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT 32U -#define VK_EXT_SHADER_MODULE_IDENTIFIER_SPEC_VERSION 1 -#define VK_EXT_SHADER_MODULE_IDENTIFIER_EXTENSION_NAME "VK_EXT_shader_module_identifier" -typedef struct VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT { +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectExecutionSetEXT( + VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateIndirectExecutionSetPipelineEXT( + VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetPipelineEXT* pExecutionSetWrites); + +VKAPI_ATTR void VKAPI_CALL vkUpdateIndirectExecutionSetShaderEXT( + VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetShaderEXT* pExecutionSetWrites); +#endif + + +// VK_MESA_image_alignment_control is a preprocessor guard. Do not pass it to API calls. +#define VK_MESA_image_alignment_control 1 +#define VK_MESA_IMAGE_ALIGNMENT_CONTROL_SPEC_VERSION 1 +#define VK_MESA_IMAGE_ALIGNMENT_CONTROL_EXTENSION_NAME "VK_MESA_image_alignment_control" +typedef struct VkPhysicalDeviceImageAlignmentControlFeaturesMESA { VkStructureType sType; void* pNext; - VkBool32 shaderModuleIdentifier; -} VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT; + VkBool32 imageAlignmentControl; +} VkPhysicalDeviceImageAlignmentControlFeaturesMESA; -typedef struct VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT { +typedef struct VkPhysicalDeviceImageAlignmentControlPropertiesMESA { VkStructureType sType; void* pNext; - uint8_t shaderModuleIdentifierAlgorithmUUID[VK_UUID_SIZE]; -} VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT; + uint32_t supportedImageAlignmentMask; +} VkPhysicalDeviceImageAlignmentControlPropertiesMESA; -typedef struct VkPipelineShaderStageModuleIdentifierCreateInfoEXT { +typedef struct VkImageAlignmentControlCreateInfoMESA { VkStructureType sType; const void* pNext; - uint32_t identifierSize; - const uint8_t* pIdentifier; -} VkPipelineShaderStageModuleIdentifierCreateInfoEXT; - -typedef struct VkShaderModuleIdentifierEXT { - VkStructureType sType; - void* pNext; - uint32_t identifierSize; - uint8_t identifier[VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT]; -} VkShaderModuleIdentifierEXT; + uint32_t maximumRequestedAlignment; +} VkImageAlignmentControlCreateInfoMESA; -typedef void (VKAPI_PTR *PFN_vkGetShaderModuleIdentifierEXT)(VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT* pIdentifier); -typedef void (VKAPI_PTR *PFN_vkGetShaderModuleCreateInfoIdentifierEXT)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, VkShaderModuleIdentifierEXT* pIdentifier); -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleIdentifierEXT( - VkDevice device, - VkShaderModule shaderModule, - VkShaderModuleIdentifierEXT* pIdentifier); -VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleCreateInfoIdentifierEXT( - VkDevice device, - const VkShaderModuleCreateInfo* pCreateInfo, - VkShaderModuleIdentifierEXT* pIdentifier); -#endif +// VK_EXT_depth_clamp_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_clamp_control 1 +#define VK_EXT_DEPTH_CLAMP_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLAMP_CONTROL_EXTENSION_NAME "VK_EXT_depth_clamp_control" +typedef struct VkPhysicalDeviceDepthClampControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthClampControl; +} VkPhysicalDeviceDepthClampControlFeaturesEXT; +typedef struct VkPipelineViewportDepthClampControlCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDepthClampModeEXT depthClampMode; + const VkDepthClampRangeEXT* pDepthClampRange; +} VkPipelineViewportDepthClampControlCreateInfoEXT; -#define VK_EXT_rasterization_order_attachment_access 1 -#define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1 -#define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_EXT_rasterization_order_attachment_access" -#define VK_EXT_legacy_dithering 1 -#define VK_EXT_LEGACY_DITHERING_SPEC_VERSION 1 -#define VK_EXT_LEGACY_DITHERING_EXTENSION_NAME "VK_EXT_legacy_dithering" -typedef struct VkPhysicalDeviceLegacyDitheringFeaturesEXT { +// VK_HUAWEI_hdr_vivid is a preprocessor guard. Do not pass it to API calls. +#define VK_HUAWEI_hdr_vivid 1 +#define VK_HUAWEI_HDR_VIVID_SPEC_VERSION 1 +#define VK_HUAWEI_HDR_VIVID_EXTENSION_NAME "VK_HUAWEI_hdr_vivid" +typedef struct VkPhysicalDeviceHdrVividFeaturesHUAWEI { VkStructureType sType; void* pNext; - VkBool32 legacyDithering; -} VkPhysicalDeviceLegacyDitheringFeaturesEXT; + VkBool32 hdrVivid; +} VkPhysicalDeviceHdrVividFeaturesHUAWEI; +typedef struct VkHdrVividDynamicMetadataHUAWEI { + VkStructureType sType; + const void* pNext; + size_t dynamicMetadataSize; + const void* pDynamicMetadata; +} VkHdrVividDynamicMetadataHUAWEI; -#define VK_QCOM_tile_properties 1 -#define VK_QCOM_TILE_PROPERTIES_SPEC_VERSION 1 -#define VK_QCOM_TILE_PROPERTIES_EXTENSION_NAME "VK_QCOM_tile_properties" -typedef struct VkPhysicalDeviceTilePropertiesFeaturesQCOM { + +// VK_NV_cooperative_matrix2 is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_cooperative_matrix2 1 +#define VK_NV_COOPERATIVE_MATRIX_2_SPEC_VERSION 1 +#define VK_NV_COOPERATIVE_MATRIX_2_EXTENSION_NAME "VK_NV_cooperative_matrix2" +typedef struct VkCooperativeMatrixFlexibleDimensionsPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t MGranularity; + uint32_t NGranularity; + uint32_t KGranularity; + VkComponentTypeKHR AType; + VkComponentTypeKHR BType; + VkComponentTypeKHR CType; + VkComponentTypeKHR ResultType; + VkBool32 saturatingAccumulation; + VkScopeKHR scope; + uint32_t workgroupInvocations; +} VkCooperativeMatrixFlexibleDimensionsPropertiesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrix2FeaturesNV { VkStructureType sType; void* pNext; - VkBool32 tileProperties; -} VkPhysicalDeviceTilePropertiesFeaturesQCOM; + VkBool32 cooperativeMatrixWorkgroupScope; + VkBool32 cooperativeMatrixFlexibleDimensions; + VkBool32 cooperativeMatrixReductions; + VkBool32 cooperativeMatrixConversions; + VkBool32 cooperativeMatrixPerElementOperations; + VkBool32 cooperativeMatrixTensorAddressing; + VkBool32 cooperativeMatrixBlockLoads; +} VkPhysicalDeviceCooperativeMatrix2FeaturesNV; -typedef struct VkTilePropertiesQCOM { +typedef struct VkPhysicalDeviceCooperativeMatrix2PropertiesNV { VkStructureType sType; void* pNext; - VkExtent3D tileSize; - VkExtent2D apronSize; - VkOffset2D origin; -} VkTilePropertiesQCOM; + uint32_t cooperativeMatrixWorkgroupScopeMaxWorkgroupSize; + uint32_t cooperativeMatrixFlexibleDimensionsMaxDimension; + uint32_t cooperativeMatrixWorkgroupScopeReservedSharedMemory; +} VkPhysicalDeviceCooperativeMatrix2PropertiesNV; -typedef VkResult (VKAPI_PTR *PFN_vkGetFramebufferTilePropertiesQCOM)(VkDevice device, VkFramebuffer framebuffer, uint32_t* pPropertiesCount, VkTilePropertiesQCOM* pProperties); -typedef VkResult (VKAPI_PTR *PFN_vkGetDynamicRenderingTilePropertiesQCOM)(VkDevice device, const VkRenderingInfo* pRenderingInfo, VkTilePropertiesQCOM* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixFlexibleDimensionsPropertiesNV* pProperties); #ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetFramebufferTilePropertiesQCOM( - VkDevice device, - VkFramebuffer framebuffer, - uint32_t* pPropertiesCount, - VkTilePropertiesQCOM* pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDynamicRenderingTilePropertiesQCOM( - VkDevice device, - const VkRenderingInfo* pRenderingInfo, - VkTilePropertiesQCOM* pProperties); +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeMatrixFlexibleDimensionsPropertiesNV* pProperties); #endif -#define VK_SEC_amigo_profiling 1 -#define VK_SEC_AMIGO_PROFILING_SPEC_VERSION 1 -#define VK_SEC_AMIGO_PROFILING_EXTENSION_NAME "VK_SEC_amigo_profiling" -typedef struct VkPhysicalDeviceAmigoProfilingFeaturesSEC { +// VK_ARM_pipeline_opacity_micromap is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_pipeline_opacity_micromap 1 +#define VK_ARM_PIPELINE_OPACITY_MICROMAP_SPEC_VERSION 1 +#define VK_ARM_PIPELINE_OPACITY_MICROMAP_EXTENSION_NAME "VK_ARM_pipeline_opacity_micromap" +typedef struct VkPhysicalDevicePipelineOpacityMicromapFeaturesARM { VkStructureType sType; void* pNext; - VkBool32 amigoProfiling; -} VkPhysicalDeviceAmigoProfilingFeaturesSEC; + VkBool32 pipelineOpacityMicromap; +} VkPhysicalDevicePipelineOpacityMicromapFeaturesARM; -typedef struct VkAmigoProfilingSubmitInfoSEC { - VkStructureType sType; - const void* pNext; - uint64_t firstDrawTimestamp; - uint64_t swapBufferTimestamp; -} VkAmigoProfilingSubmitInfoSEC; +// VK_EXT_vertex_attribute_robustness is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_vertex_attribute_robustness 1 +#define VK_EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_SPEC_VERSION 1 +#define VK_EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_vertex_attribute_robustness" +typedef struct VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 vertexAttributeRobustness; +} VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT; -#define VK_EXT_mutable_descriptor_type 1 -#define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1 -#define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_EXT_mutable_descriptor_type" +// VK_KHR_acceleration_structure is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_acceleration_structure 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR) #define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 13 #define VK_KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME "VK_KHR_acceleration_structure" @@ -14619,30 +21358,13 @@ typedef enum VkBuildAccelerationStructureModeKHR { VK_BUILD_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF } VkBuildAccelerationStructureModeKHR; -typedef enum VkAccelerationStructureBuildTypeKHR { - VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR = 0, - VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR = 1, - VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR = 2, - VK_ACCELERATION_STRUCTURE_BUILD_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkAccelerationStructureBuildTypeKHR; - -typedef enum VkAccelerationStructureCompatibilityKHR { - VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR = 0, - VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR = 1, - VK_ACCELERATION_STRUCTURE_COMPATIBILITY_MAX_ENUM_KHR = 0x7FFFFFFF -} VkAccelerationStructureCompatibilityKHR; - typedef enum VkAccelerationStructureCreateFlagBitsKHR { VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = 0x00000001, + VK_ACCELERATION_STRUCTURE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000008, VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV = 0x00000004, VK_ACCELERATION_STRUCTURE_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF } VkAccelerationStructureCreateFlagBitsKHR; typedef VkFlags VkAccelerationStructureCreateFlagsKHR; -typedef union VkDeviceOrHostAddressKHR { - VkDeviceAddress deviceAddress; - void* hostAddress; -} VkDeviceOrHostAddressKHR; - typedef struct VkAccelerationStructureBuildRangeInfoKHR { uint32_t primitiveCount; uint32_t primitiveOffset; @@ -14781,14 +21503,6 @@ typedef struct VkCopyAccelerationStructureInfoKHR { VkCopyAccelerationStructureModeKHR mode; } VkCopyAccelerationStructureInfoKHR; -typedef struct VkAccelerationStructureBuildSizesInfoKHR { - VkStructureType sType; - const void* pNext; - VkDeviceSize accelerationStructureSize; - VkDeviceSize updateScratchSize; - VkDeviceSize buildScratchSize; -} VkAccelerationStructureBuildSizesInfoKHR; - typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureKHR)(VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure); typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureKHR)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator); typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructuresKHR)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); @@ -14901,6 +21615,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureBuildSizesKHR( #endif +// VK_KHR_ray_tracing_pipeline is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_ray_tracing_pipeline 1 #define VK_KHR_RAY_TRACING_PIPELINE_SPEC_VERSION 1 #define VK_KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME "VK_KHR_ray_tracing_pipeline" @@ -14970,12 +21685,6 @@ typedef struct VkPhysicalDeviceRayTracingPipelinePropertiesKHR { uint32_t maxRayHitAttributeSize; } VkPhysicalDeviceRayTracingPipelinePropertiesKHR; -typedef struct VkStridedDeviceAddressRegionKHR { - VkDeviceAddress deviceAddress; - VkDeviceSize stride; - VkDeviceSize size; -} VkStridedDeviceAddressRegionKHR; - typedef struct VkTraceRaysIndirectCommandKHR { uint32_t width; uint32_t height; @@ -15037,6 +21746,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetRayTracingPipelineStackSizeKHR( #endif +// VK_KHR_ray_query is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_ray_query 1 #define VK_KHR_RAY_QUERY_SPEC_VERSION 1 #define VK_KHR_RAY_QUERY_EXTENSION_NAME "VK_KHR_ray_query" @@ -15048,6 +21758,7 @@ typedef struct VkPhysicalDeviceRayQueryFeaturesKHR { +// VK_EXT_mesh_shader is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_mesh_shader 1 #define VK_EXT_MESH_SHADER_SPEC_VERSION 1 #define VK_EXT_MESH_SHADER_EXTENSION_NAME "VK_EXT_mesh_shader" diff --git a/src/libraries/vulkanheaders/vulkan_directfb.h b/src/libraries/vulkanheaders/vulkan_directfb.h index ab3504efa..26a436316 100644 --- a/src/libraries/vulkanheaders/vulkan_directfb.h +++ b/src/libraries/vulkanheaders/vulkan_directfb.h @@ -2,7 +2,7 @@ #define VULKAN_DIRECTFB_H_ 1 /* -** Copyright 2015-2022 The Khronos Group Inc. +** Copyright 2015-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -19,6 +19,7 @@ extern "C" { +// VK_EXT_directfb_surface is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_directfb_surface 1 #define VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION 1 #define VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME "VK_EXT_directfb_surface" diff --git a/src/libraries/vulkanheaders/vulkan_enums.hpp b/src/libraries/vulkanheaders/vulkan_enums.hpp index 1bd58294f..61029121c 100644 --- a/src/libraries/vulkanheaders/vulkan_enums.hpp +++ b/src/libraries/vulkanheaders/vulkan_enums.hpp @@ -1,4 +1,4 @@ -// Copyright 2015-2022 The Khronos Group Inc. +// Copyright 2015-2025 The Khronos Group Inc. // // SPDX-License-Identifier: Apache-2.0 OR MIT // @@ -8,19 +8,217 @@ #ifndef VULKAN_ENUMS_HPP #define VULKAN_ENUMS_HPP +// include-what-you-use: make sure, vulkan.hpp is used by code-completers +// IWYU pragma: private; include "vulkan.hpp" + namespace VULKAN_HPP_NAMESPACE { - template - struct CppType + template + struct FlagTraits { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = false; }; - template - struct isVulkanHandleType + template + class Flags { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false; + public: + using MaskType = typename std::underlying_type::type; + + // constructors + VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT : m_mask( 0 ) {} + + VULKAN_HPP_CONSTEXPR Flags( BitType bit ) VULKAN_HPP_NOEXCEPT : m_mask( static_cast( bit ) ) {} + + VULKAN_HPP_CONSTEXPR Flags( Flags const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VULKAN_HPP_CONSTEXPR explicit Flags( MaskType flags ) VULKAN_HPP_NOEXCEPT : m_mask( flags ) {} + + // relational operators +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( Flags const & ) const = default; +#else + VULKAN_HPP_CONSTEXPR bool operator<( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask < rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator<=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask <= rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator>( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask > rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator>=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask >= rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator==( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask == rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator!=( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_mask != rhs.m_mask; + } +#endif + + // logical operator + VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT + { + return !m_mask; + } + + // bitwise operators + VULKAN_HPP_CONSTEXPR Flags operator&( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return Flags( m_mask & rhs.m_mask ); + } + + VULKAN_HPP_CONSTEXPR Flags operator|( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return Flags( m_mask | rhs.m_mask ); + } + + VULKAN_HPP_CONSTEXPR Flags operator^( Flags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return Flags( m_mask ^ rhs.m_mask ); + } + + VULKAN_HPP_CONSTEXPR Flags operator~() const VULKAN_HPP_NOEXCEPT + { + return Flags( m_mask ^ FlagTraits::allFlags.m_mask ); + } + + // assignment operators + VULKAN_HPP_CONSTEXPR_14 Flags & operator=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VULKAN_HPP_CONSTEXPR_14 Flags & operator|=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT + { + m_mask |= rhs.m_mask; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Flags & operator&=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT + { + m_mask &= rhs.m_mask; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 Flags & operator^=( Flags const & rhs ) VULKAN_HPP_NOEXCEPT + { + m_mask ^= rhs.m_mask; + return *this; + } + + // cast operators + explicit VULKAN_HPP_CONSTEXPR operator bool() const VULKAN_HPP_NOEXCEPT + { + return !!m_mask; + } + + explicit VULKAN_HPP_CONSTEXPR operator MaskType() const VULKAN_HPP_NOEXCEPT + { + return m_mask; + } + +#if defined( VULKAN_HPP_FLAGS_MASK_TYPE_AS_PUBLIC ) + public: +#else + private: +#endif + MaskType m_mask; }; +#if !defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + // relational operators only needed for pre C++20 + template + VULKAN_HPP_CONSTEXPR bool operator<( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator>( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator<=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator>=( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator>( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator<( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator>=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator<=( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator==( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator==( bit ); + } + + template + VULKAN_HPP_CONSTEXPR bool operator!=( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator!=( bit ); + } +#endif + + // bitwise operators + template + VULKAN_HPP_CONSTEXPR Flags operator&( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator&( bit ); + } + + template + VULKAN_HPP_CONSTEXPR Flags operator|( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator|( bit ); + } + + template + VULKAN_HPP_CONSTEXPR Flags operator^( BitType bit, Flags const & flags ) VULKAN_HPP_NOEXCEPT + { + return flags.operator^( bit ); + } + + // bitwise operators on BitType + template ::isBitmask, bool>::type = true> + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags operator&( BitType lhs, BitType rhs ) VULKAN_HPP_NOEXCEPT + { + return Flags( lhs ) & rhs; + } + + template ::isBitmask, bool>::type = true> + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags operator|( BitType lhs, BitType rhs ) VULKAN_HPP_NOEXCEPT + { + return Flags( lhs ) | rhs; + } + + template ::isBitmask, bool>::type = true> + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags operator^( BitType lhs, BitType rhs ) VULKAN_HPP_NOEXCEPT + { + return Flags( lhs ) ^ rhs; + } + + template ::isBitmask, bool>::type = true> + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags operator~( BitType bit ) VULKAN_HPP_NOEXCEPT + { + return ~( Flags( bit ) ); + } + //============= //=== ENUMs === //============= @@ -29,294 +227,568 @@ namespace VULKAN_HPP_NAMESPACE enum class Result { - eSuccess = VK_SUCCESS, - eNotReady = VK_NOT_READY, - eTimeout = VK_TIMEOUT, - eEventSet = VK_EVENT_SET, - eEventReset = VK_EVENT_RESET, - eIncomplete = VK_INCOMPLETE, - eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY, - eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY, - eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED, - eErrorDeviceLost = VK_ERROR_DEVICE_LOST, - eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED, - eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT, - eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT, - eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT, - eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER, - eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS, - eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED, - eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL, - eErrorUnknown = VK_ERROR_UNKNOWN, - eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY, - eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE, - eErrorFragmentation = VK_ERROR_FRAGMENTATION, - eErrorInvalidOpaqueCaptureAddress = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, - ePipelineCompileRequired = VK_PIPELINE_COMPILE_REQUIRED, - eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR, - eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR, - eSuboptimalKHR = VK_SUBOPTIMAL_KHR, - eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR, - eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, - eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT, - eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eErrorImageUsageNotSupportedKHR = VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR, - eErrorVideoPictureLayoutNotSupportedKHR = VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR, - eErrorVideoProfileOperationNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR, - eErrorVideoProfileFormatNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR, - eErrorVideoProfileCodecNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR, - eErrorVideoStdVersionNotSupportedKHR = VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, + eSuccess = VK_SUCCESS, + eNotReady = VK_NOT_READY, + eTimeout = VK_TIMEOUT, + eEventSet = VK_EVENT_SET, + eEventReset = VK_EVENT_RESET, + eIncomplete = VK_INCOMPLETE, + eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY, + eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY, + eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED, + eErrorDeviceLost = VK_ERROR_DEVICE_LOST, + eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED, + eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT, + eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT, + eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT, + eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER, + eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS, + eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED, + eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL, + eErrorUnknown = VK_ERROR_UNKNOWN, + eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY, + eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR, + eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE, + eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR, + eErrorFragmentation = VK_ERROR_FRAGMENTATION, + eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT, + eErrorInvalidOpaqueCaptureAddress = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, + eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT, + eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR, + ePipelineCompileRequired = VK_PIPELINE_COMPILE_REQUIRED, + ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT, + eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT, + eErrorNotPermitted = VK_ERROR_NOT_PERMITTED, + eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT, eErrorNotPermittedKHR = VK_ERROR_NOT_PERMITTED_KHR, + eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR, + eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR, + eSuboptimalKHR = VK_SUBOPTIMAL_KHR, + eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR, + eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, + eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT, + eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV, + eErrorImageUsageNotSupportedKHR = VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR, + eErrorVideoPictureLayoutNotSupportedKHR = VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR, + eErrorVideoProfileOperationNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR, + eErrorVideoProfileFormatNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR, + eErrorVideoProfileCodecNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR, + eErrorVideoStdVersionNotSupportedKHR = VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR, + eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, #if defined( VK_USE_PLATFORM_WIN32_KHR ) eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT, #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - eThreadIdleKHR = VK_THREAD_IDLE_KHR, - eThreadDoneKHR = VK_THREAD_DONE_KHR, - eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR, - eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR, - eErrorCompressionExhaustedEXT = VK_ERROR_COMPRESSION_EXHAUSTED_EXT, - eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT, - eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT, - eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR, - eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR, - eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT, - eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR, - eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT, - ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT + eThreadIdleKHR = VK_THREAD_IDLE_KHR, + eThreadDoneKHR = VK_THREAD_DONE_KHR, + eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR, + eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR, + eErrorInvalidVideoStdParametersKHR = VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR, + eErrorCompressionExhaustedEXT = VK_ERROR_COMPRESSION_EXHAUSTED_EXT, + eIncompatibleShaderBinaryEXT = VK_INCOMPATIBLE_SHADER_BINARY_EXT, + eErrorIncompatibleShaderBinaryEXT = VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT, + ePipelineBinaryMissingKHR = VK_PIPELINE_BINARY_MISSING_KHR, + eErrorNotEnoughSpaceKHR = VK_ERROR_NOT_ENOUGH_SPACE_KHR }; enum class StructureType { - eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO, - eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, - eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, - eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, - eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO, - eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, - eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, - eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, - eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, - eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, - eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, - eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, - eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, - eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, - eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, - eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, - eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, - ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, - ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, - ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, - ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, - ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, - ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, - ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, - ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, - ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO, - ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, - ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, - eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, - eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, - ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, - eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, - eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, - eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, - eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, - eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, - eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, - eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, - eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, - eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, - eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, - eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, - eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, - eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, - eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, - eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, - eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER, - eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO, - eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, - ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES, - eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, - eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, - ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, - eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, - eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, - eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, - eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, - eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, - eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, - eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, - eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, - eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, - ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, - eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, - eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, - eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, - eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, - eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, - eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, - ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, - ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, - eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, - eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, - ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, - eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, - ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, - eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, - ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, - ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, - eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, - eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, - ePipelineTessellationDomainOriginStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, - eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, - ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, - ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, - ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, - eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO, - ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, - ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES, - eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2, - eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, - eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, - eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, - eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, - ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, - eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, - eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, - ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, - eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, - ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, - eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, - ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, - eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, - eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, - eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, - ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, - eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, - eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, - eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, - ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, - eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, - ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, - eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, - ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, - ePhysicalDeviceVulkan11Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES, - ePhysicalDeviceVulkan11Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES, - ePhysicalDeviceVulkan12Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES, - ePhysicalDeviceVulkan12Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES, - eImageFormatListCreateInfo = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, - eAttachmentDescription2 = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2, - eAttachmentReference2 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2, - eSubpassDescription2 = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2, - eSubpassDependency2 = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2, - eRenderPassCreateInfo2 = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2, - eSubpassBeginInfo = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, - eSubpassEndInfo = VK_STRUCTURE_TYPE_SUBPASS_END_INFO, - ePhysicalDevice8BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES, - ePhysicalDeviceDriverProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES, - ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, - ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, - ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, - eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO, - ePhysicalDeviceDescriptorIndexingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES, - ePhysicalDeviceDescriptorIndexingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, - eDescriptorSetVariableDescriptorCountAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, - eDescriptorSetVariableDescriptorCountLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, - ePhysicalDeviceDepthStencilResolveProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES, - eSubpassDescriptionDepthStencilResolve = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, - ePhysicalDeviceScalarBlockLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES, - eImageStencilUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO, - ePhysicalDeviceSamplerFilterMinmaxProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES, - eSamplerReductionModeCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO, - ePhysicalDeviceVulkanMemoryModelFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, - ePhysicalDeviceImagelessFramebufferFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, - eFramebufferAttachmentsCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO, - eFramebufferAttachmentImageInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO, - eRenderPassAttachmentBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO, - ePhysicalDeviceUniformBufferStandardLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, - ePhysicalDeviceShaderSubgroupExtendedTypesFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, - ePhysicalDeviceSeparateDepthStencilLayoutsFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, - eAttachmentReferenceStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT, - eAttachmentDescriptionStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT, - ePhysicalDeviceHostQueryResetFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, - ePhysicalDeviceTimelineSemaphoreFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, - ePhysicalDeviceTimelineSemaphoreProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES, - eSemaphoreTypeCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO, - eTimelineSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, - eSemaphoreWaitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, - eSemaphoreSignalInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, - ePhysicalDeviceBufferDeviceAddressFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES, - eBufferDeviceAddressInfo = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, - eBufferOpaqueCaptureAddressCreateInfo = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO, - eMemoryOpaqueCaptureAddressAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, - eDeviceMemoryOpaqueCaptureAddressInfo = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, - ePhysicalDeviceVulkan13Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES, - ePhysicalDeviceVulkan13Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES, - ePipelineCreationFeedbackCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO, - ePhysicalDeviceShaderTerminateInvocationFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES, - ePhysicalDeviceToolProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES, - ePhysicalDeviceShaderDemoteToHelperInvocationFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES, - ePhysicalDevicePrivateDataFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES, - eDevicePrivateDataCreateInfo = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO, - ePrivateDataSlotCreateInfo = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO, - ePhysicalDevicePipelineCreationCacheControlFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES, - eMemoryBarrier2 = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2, - eBufferMemoryBarrier2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2, - eImageMemoryBarrier2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2, - eDependencyInfo = VK_STRUCTURE_TYPE_DEPENDENCY_INFO, - eSubmitInfo2 = VK_STRUCTURE_TYPE_SUBMIT_INFO_2, - eSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO, - eCommandBufferSubmitInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO, - ePhysicalDeviceSynchronization2Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES, - ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES, - ePhysicalDeviceImageRobustnessFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES, - eCopyBufferInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2, - eCopyImageInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2, - eCopyBufferToImageInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2, - eCopyImageToBufferInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2, - eBlitImageInfo2 = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2, - eResolveImageInfo2 = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2, - eBufferCopy2 = VK_STRUCTURE_TYPE_BUFFER_COPY_2, - eImageCopy2 = VK_STRUCTURE_TYPE_IMAGE_COPY_2, - eImageBlit2 = VK_STRUCTURE_TYPE_IMAGE_BLIT_2, - eBufferImageCopy2 = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2, - eImageResolve2 = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2, - ePhysicalDeviceSubgroupSizeControlProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES, - ePipelineShaderStageRequiredSubgroupSizeCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO, - ePhysicalDeviceSubgroupSizeControlFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES, - ePhysicalDeviceInlineUniformBlockFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES, - ePhysicalDeviceInlineUniformBlockProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES, - eWriteDescriptorSetInlineUniformBlock = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK, - eDescriptorPoolInlineUniformBlockCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO, - ePhysicalDeviceTextureCompressionAstcHdrFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES, - eRenderingInfo = VK_STRUCTURE_TYPE_RENDERING_INFO, - eRenderingAttachmentInfo = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO, - ePipelineRenderingCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO, - ePhysicalDeviceDynamicRenderingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES, - eCommandBufferInheritanceRenderingInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO, - ePhysicalDeviceShaderIntegerDotProductFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES, - ePhysicalDeviceShaderIntegerDotProductProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES, - ePhysicalDeviceTexelBufferAlignmentProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES, - eFormatProperties3 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3, - ePhysicalDeviceMaintenance4Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES, - ePhysicalDeviceMaintenance4Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES, - eDeviceBufferMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS, - eDeviceImageMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS, - eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, - ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, - eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, - eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR, - eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, - eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR, - eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR, - eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, - eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR, - eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR, - eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR, + eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO, + eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, + eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, + eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, + eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO, + eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, + eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, + eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, + eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, + eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, + eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, + eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, + eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, + eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, + eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, + eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, + eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, + ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, + ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, + ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, + ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, + ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, + ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, + ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, + ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, + ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO, + ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, + ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, + eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, + eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, + ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, + eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, + eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, + eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, + eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, + eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, + eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, + eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, + eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, + eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, + eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, + eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, + eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, + eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, + eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, + eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, + eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER, + eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO, + eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, + ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES, + eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR, + eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR, + ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, + eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR, + eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, + eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR, + eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR, + eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR, + eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR, + eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR, + eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR, + eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR, + ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR, + eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR, + eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, + eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, + eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR, + eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR, + eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR, + ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR, + ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR, + eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR, + eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR, + ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR, + eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR, + ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR, + eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR, + ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR, + ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR, + eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR, + eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, + ePipelineTessellationDomainOriginStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR, + eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR, + ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR, + ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR, + ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, + ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR, + eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO, + ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, + ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES, + eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2, + eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR, + eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR, + eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR, + eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR, + ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR, + eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR, + eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, + ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR, + eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR, + ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, + eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, + ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR, + eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, + eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR, + eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, + ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, + eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, + eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, + eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, + ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, + eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, + ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR, + eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR, + ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, + ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES, + ePhysicalDeviceVulkan11Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES, + ePhysicalDeviceVulkan11Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES, + ePhysicalDeviceVulkan12Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES, + ePhysicalDeviceVulkan12Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES, + eImageFormatListCreateInfo = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, + eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, + eAttachmentDescription2 = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2, + eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR, + eAttachmentReference2 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2, + eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, + eSubpassDescription2 = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2, + eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR, + eSubpassDependency2 = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2, + eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR, + eRenderPassCreateInfo2 = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2, + eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, + eSubpassBeginInfo = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, + eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, + eSubpassEndInfo = VK_STRUCTURE_TYPE_SUBPASS_END_INFO, + eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, + ePhysicalDevice8BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES, + ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, + ePhysicalDeviceDriverProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES, + ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR, + ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, + ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, + ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, + ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, + ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR, + ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, + ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR, + eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO, + eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT, + ePhysicalDeviceDescriptorIndexingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES, + ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, + ePhysicalDeviceDescriptorIndexingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, + ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, + eDescriptorSetVariableDescriptorCountAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, + eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT, + eDescriptorSetVariableDescriptorCountLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, + eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT, + ePhysicalDeviceDepthStencilResolveProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES, + ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR, + eSubpassDescriptionDepthStencilResolve = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, + eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR, + ePhysicalDeviceScalarBlockLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES, + ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT, + eImageStencilUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO, + eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, + ePhysicalDeviceSamplerFilterMinmaxProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES, + ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, + eSamplerReductionModeCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO, + eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, + ePhysicalDeviceVulkanMemoryModelFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, + ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, + ePhysicalDeviceImagelessFramebufferFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, + ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR, + eFramebufferAttachmentsCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO, + eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR, + eFramebufferAttachmentImageInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO, + eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR, + eRenderPassAttachmentBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO, + eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, + ePhysicalDeviceUniformBufferStandardLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, + ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR, + ePhysicalDeviceShaderSubgroupExtendedTypesFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, + ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR, + ePhysicalDeviceSeparateDepthStencilLayoutsFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, + ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR, + eAttachmentReferenceStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT, + eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR, + eAttachmentDescriptionStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT, + eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR, + ePhysicalDeviceHostQueryResetFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, + ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT, + ePhysicalDeviceTimelineSemaphoreFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, + ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR, + ePhysicalDeviceTimelineSemaphoreProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES, + ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR, + eSemaphoreTypeCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO, + eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR, + eTimelineSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, + eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR, + eSemaphoreWaitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, + eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR, + eSemaphoreSignalInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, + eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR, + ePhysicalDeviceBufferDeviceAddressFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES, + ePhysicalDeviceBufferDeviceAddressFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR, + eBufferDeviceAddressInfo = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, + eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT, + eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR, + eBufferOpaqueCaptureAddressCreateInfo = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO, + eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR, + eMemoryOpaqueCaptureAddressAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, + eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR, + eDeviceMemoryOpaqueCaptureAddressInfo = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, + eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR, + ePhysicalDeviceVulkan13Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES, + ePhysicalDeviceVulkan13Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES, + ePipelineCreationFeedbackCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO, + ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT, + ePhysicalDeviceShaderTerminateInvocationFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES, + ePhysicalDeviceShaderTerminateInvocationFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR, + ePhysicalDeviceToolProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES, + ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT, + ePhysicalDeviceShaderDemoteToHelperInvocationFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES, + ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, + ePhysicalDevicePrivateDataFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES, + ePhysicalDevicePrivateDataFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT, + eDevicePrivateDataCreateInfo = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO, + eDevicePrivateDataCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT, + ePrivateDataSlotCreateInfo = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO, + ePrivateDataSlotCreateInfoEXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT, + ePhysicalDevicePipelineCreationCacheControlFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES, + ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT, + eMemoryBarrier2 = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2, + eMemoryBarrier2KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR, + eBufferMemoryBarrier2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2, + eBufferMemoryBarrier2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR, + eImageMemoryBarrier2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2, + eImageMemoryBarrier2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR, + eDependencyInfo = VK_STRUCTURE_TYPE_DEPENDENCY_INFO, + eDependencyInfoKHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR, + eSubmitInfo2 = VK_STRUCTURE_TYPE_SUBMIT_INFO_2, + eSubmitInfo2KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR, + eSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO, + eSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR, + eCommandBufferSubmitInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO, + eCommandBufferSubmitInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR, + ePhysicalDeviceSynchronization2Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES, + ePhysicalDeviceSynchronization2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR, + ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES, + ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR, + ePhysicalDeviceImageRobustnessFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES, + ePhysicalDeviceImageRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT, + eCopyBufferInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2, + eCopyBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR, + eCopyImageInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2, + eCopyImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR, + eCopyBufferToImageInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2, + eCopyBufferToImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR, + eCopyImageToBufferInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2, + eCopyImageToBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR, + eBlitImageInfo2 = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2, + eBlitImageInfo2KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR, + eResolveImageInfo2 = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2, + eResolveImageInfo2KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR, + eBufferCopy2 = VK_STRUCTURE_TYPE_BUFFER_COPY_2, + eBufferCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR, + eImageCopy2 = VK_STRUCTURE_TYPE_IMAGE_COPY_2, + eImageCopy2KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR, + eImageBlit2 = VK_STRUCTURE_TYPE_IMAGE_BLIT_2, + eImageBlit2KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR, + eBufferImageCopy2 = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2, + eBufferImageCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR, + eImageResolve2 = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2, + eImageResolve2KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR, + ePhysicalDeviceSubgroupSizeControlProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES, + ePhysicalDeviceSubgroupSizeControlPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT, + ePipelineShaderStageRequiredSubgroupSizeCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO, + ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, + eShaderRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, + ePhysicalDeviceSubgroupSizeControlFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES, + ePhysicalDeviceSubgroupSizeControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT, + ePhysicalDeviceInlineUniformBlockFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES, + ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT, + ePhysicalDeviceInlineUniformBlockProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES, + ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT, + eWriteDescriptorSetInlineUniformBlock = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK, + eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT, + eDescriptorPoolInlineUniformBlockCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO, + eDescriptorPoolInlineUniformBlockCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT, + ePhysicalDeviceTextureCompressionAstcHdrFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES, + ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT, + eRenderingInfo = VK_STRUCTURE_TYPE_RENDERING_INFO, + eRenderingInfoKHR = VK_STRUCTURE_TYPE_RENDERING_INFO_KHR, + eRenderingAttachmentInfo = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO, + eRenderingAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO_KHR, + ePipelineRenderingCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO, + ePipelineRenderingCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR, + ePhysicalDeviceDynamicRenderingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES, + ePhysicalDeviceDynamicRenderingFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR, + eCommandBufferInheritanceRenderingInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO, + eCommandBufferInheritanceRenderingInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR, + ePhysicalDeviceShaderIntegerDotProductFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES, + ePhysicalDeviceShaderIntegerDotProductFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR, + ePhysicalDeviceShaderIntegerDotProductProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES, + ePhysicalDeviceShaderIntegerDotProductPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR, + ePhysicalDeviceTexelBufferAlignmentProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES, + ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT, + eFormatProperties3 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3, + eFormatProperties3KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR, + ePhysicalDeviceMaintenance4Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES, + ePhysicalDeviceMaintenance4FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR, + ePhysicalDeviceMaintenance4Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES, + ePhysicalDeviceMaintenance4PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR, + eDeviceBufferMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS, + eDeviceBufferMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR, + eDeviceImageMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS, + eDeviceImageMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR, + ePhysicalDeviceVulkan14Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_4_FEATURES, + ePhysicalDeviceVulkan14Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_4_PROPERTIES, + eDeviceQueueGlobalPriorityCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO, + eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, + eDeviceQueueGlobalPriorityCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR, + ePhysicalDeviceGlobalPriorityQueryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES, + ePhysicalDeviceGlobalPriorityQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR, + ePhysicalDeviceGlobalPriorityQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT, + eQueueFamilyGlobalPriorityProperties = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES, + eQueueFamilyGlobalPriorityPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR, + eQueueFamilyGlobalPriorityPropertiesEXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT, + ePhysicalDeviceShaderSubgroupRotateFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES, + ePhysicalDeviceShaderSubgroupRotateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR, + ePhysicalDeviceShaderFloatControls2Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES, + ePhysicalDeviceShaderFloatControls2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES_KHR, + ePhysicalDeviceShaderExpectAssumeFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES, + ePhysicalDeviceShaderExpectAssumeFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR, + ePhysicalDeviceLineRasterizationFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES, + ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, + ePhysicalDeviceLineRasterizationFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR, + ePipelineRasterizationLineStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO, + ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, + ePipelineRasterizationLineStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR, + ePhysicalDeviceLineRasterizationProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES, + ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, + ePhysicalDeviceLineRasterizationPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR, + ePhysicalDeviceVertexAttributeDivisorProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES, + ePhysicalDeviceVertexAttributeDivisorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR, + ePipelineVertexInputDivisorStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO, + ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, + ePipelineVertexInputDivisorStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR, + ePhysicalDeviceVertexAttributeDivisorFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES, + ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, + ePhysicalDeviceVertexAttributeDivisorFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR, + ePhysicalDeviceIndexTypeUint8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES, + ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, + ePhysicalDeviceIndexTypeUint8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR, + eMemoryMapInfo = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO, + eMemoryMapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR, + eMemoryUnmapInfo = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO, + eMemoryUnmapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR, + ePhysicalDeviceMaintenance5Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES, + ePhysicalDeviceMaintenance5FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR, + ePhysicalDeviceMaintenance5Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES, + ePhysicalDeviceMaintenance5PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR, + eRenderingAreaInfo = VK_STRUCTURE_TYPE_RENDERING_AREA_INFO, + eRenderingAreaInfoKHR = VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR, + eDeviceImageSubresourceInfo = VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO, + eDeviceImageSubresourceInfoKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR, + eSubresourceLayout2 = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2, + eSubresourceLayout2EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT, + eSubresourceLayout2KHR = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR, + eImageSubresource2 = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2, + eImageSubresource2EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT, + eImageSubresource2KHR = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR, + ePipelineCreateFlags2CreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO, + ePipelineCreateFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR, + eBufferUsageFlags2CreateInfo = VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO, + eBufferUsageFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR, + ePhysicalDevicePushDescriptorProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES, + ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR, + ePhysicalDeviceDynamicRenderingLocalReadFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES, + ePhysicalDeviceDynamicRenderingLocalReadFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR, + eRenderingAttachmentLocationInfo = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO, + eRenderingAttachmentLocationInfoKHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO_KHR, + eRenderingInputAttachmentIndexInfo = VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO, + eRenderingInputAttachmentIndexInfoKHR = VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO_KHR, + ePhysicalDeviceMaintenance6Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES, + ePhysicalDeviceMaintenance6FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR, + ePhysicalDeviceMaintenance6Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES, + ePhysicalDeviceMaintenance6PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR, + eBindMemoryStatus = VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS, + eBindMemoryStatusKHR = VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS_KHR, + eBindDescriptorSetsInfo = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO, + eBindDescriptorSetsInfoKHR = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO_KHR, + ePushConstantsInfo = VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO, + ePushConstantsInfoKHR = VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO_KHR, + ePushDescriptorSetInfo = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO, + ePushDescriptorSetInfoKHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO_KHR, + ePushDescriptorSetWithTemplateInfo = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO, + ePushDescriptorSetWithTemplateInfoKHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR, + ePhysicalDevicePipelineProtectedAccessFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES, + ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT, + ePipelineRobustnessCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO, + ePipelineRobustnessCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT, + ePhysicalDevicePipelineRobustnessFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES, + ePhysicalDevicePipelineRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT, + ePhysicalDevicePipelineRobustnessProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES, + ePhysicalDevicePipelineRobustnessPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT, + ePhysicalDeviceHostImageCopyFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES, + ePhysicalDeviceHostImageCopyFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT, + ePhysicalDeviceHostImageCopyProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES, + ePhysicalDeviceHostImageCopyPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT, + eMemoryToImageCopy = VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY, + eMemoryToImageCopyEXT = VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT, + eImageToMemoryCopy = VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY, + eImageToMemoryCopyEXT = VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT, + eCopyImageToMemoryInfo = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO, + eCopyImageToMemoryInfoEXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT, + eCopyMemoryToImageInfo = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO, + eCopyMemoryToImageInfoEXT = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT, + eHostImageLayoutTransitionInfo = VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO, + eHostImageLayoutTransitionInfoEXT = VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT, + eCopyImageToImageInfo = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO, + eCopyImageToImageInfoEXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT, + eSubresourceHostMemcpySize = VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE, + eSubresourceHostMemcpySizeEXT = VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT, + eHostImageCopyDevicePerformanceQuery = VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY, + eHostImageCopyDevicePerformanceQueryEXT = VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT, + eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, + ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, + eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, + eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR, + eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, + eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR, + eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR, + eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, + eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR, + eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR, + eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR, #if defined( VK_USE_PLATFORM_XLIB_KHR ) eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, #endif /*VK_USE_PLATFORM_XLIB_KHR*/ @@ -333,79 +805,78 @@ namespace VULKAN_HPP_NAMESPACE eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, #endif /*VK_USE_PLATFORM_WIN32_KHR*/ eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, + eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT, ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT, eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT, eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR, - eVideoCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR, - eVideoPictureResourceInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR, - eVideoSessionMemoryRequirementsKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR, - eBindVideoSessionMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR, - eVideoSessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR, - eVideoSessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR, - eVideoSessionParametersUpdateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR, - eVideoBeginCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR, - eVideoEndCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR, - eVideoCodingControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR, - eVideoReferenceSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR, - eQueueFamilyVideoPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR, - eVideoProfileListInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR, - ePhysicalDeviceVideoFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR, - eVideoFormatPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR, - eQueueFamilyQueryResultStatusPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR, - eVideoDecodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR, - eVideoDecodeCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR, - eVideoDecodeUsageInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV, - eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV, - eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV, - ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT, - ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT, - ePipelineRasterizationStateStreamCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT, - eCuModuleCreateInfoNVX = VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX, - eCuFunctionCreateInfoNVX = VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX, - eCuLaunchInfoNVX = VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX, - eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX, - eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoEncodeH264CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT, - eVideoEncodeH264SessionParametersCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT, - eVideoEncodeH264SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT, - eVideoEncodeH264VclFrameInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT, - eVideoEncodeH264DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT, - eVideoEncodeH264NaluSliceInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_EXT, - eVideoEncodeH264EmitPictureParametersInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_INFO_EXT, - eVideoEncodeH264ProfileInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_EXT, - eVideoEncodeH264RateControlInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_EXT, - eVideoEncodeH264RateControlLayerInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_EXT, - eVideoEncodeH264ReferenceListsInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_REFERENCE_LISTS_INFO_EXT, - eVideoEncodeH265CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_EXT, - eVideoEncodeH265SessionParametersCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT, - eVideoEncodeH265SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT, - eVideoEncodeH265VclFrameInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_VCL_FRAME_INFO_EXT, - eVideoEncodeH265DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_EXT, - eVideoEncodeH265NaluSliceSegmentInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_EXT, - eVideoEncodeH265EmitPictureParametersInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_INFO_EXT, - eVideoEncodeH265ProfileInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_EXT, - eVideoEncodeH265ReferenceListsInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_REFERENCE_LISTS_INFO_EXT, - eVideoEncodeH265RateControlInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_EXT, - eVideoEncodeH265RateControlLayerInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_EXT, - eVideoDecodeH264CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_EXT, - eVideoDecodeH264PictureInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_EXT, - eVideoDecodeH264MvcInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_MVC_INFO_EXT, - eVideoDecodeH264ProfileInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_EXT, - eVideoDecodeH264SessionParametersCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT, - eVideoDecodeH264SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT, - eVideoDecodeH264DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_EXT, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD, - eRenderingFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, - eRenderingFragmentDensityMapAttachmentInfoEXT = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT, - eAttachmentSampleCountInfoAMD = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, - eMultiviewPerViewAttributesInfoNVX = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX, + eVideoProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR, + eVideoCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR, + eVideoPictureResourceInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR, + eVideoSessionMemoryRequirementsKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR, + eBindVideoSessionMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR, + eVideoSessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR, + eVideoSessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoSessionParametersUpdateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR, + eVideoBeginCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR, + eVideoEndCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR, + eVideoCodingControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR, + eVideoReferenceSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR, + eQueueFamilyVideoPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR, + eVideoProfileListInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR, + ePhysicalDeviceVideoFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR, + eVideoFormatPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR, + eQueueFamilyQueryResultStatusPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR, + eVideoDecodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR, + eVideoDecodeCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR, + eVideoDecodeUsageInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR, + eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV, + eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV, + eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV, + ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT, + ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT, + ePipelineRasterizationStateStreamCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT, + eCuModuleCreateInfoNVX = VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX, + eCuFunctionCreateInfoNVX = VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX, + eCuLaunchInfoNVX = VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX, + eCuModuleTexturingModeCreateInfoNVX = VK_STRUCTURE_TYPE_CU_MODULE_TEXTURING_MODE_CREATE_INFO_NVX, + eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX, + eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX, + eVideoEncodeH264CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_KHR, + eVideoEncodeH264SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoEncodeH264SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR, + eVideoEncodeH264PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PICTURE_INFO_KHR, + eVideoEncodeH264DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_KHR, + eVideoEncodeH264NaluSliceInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_KHR, + eVideoEncodeH264GopRemainingFrameInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_GOP_REMAINING_FRAME_INFO_KHR, + eVideoEncodeH264ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_KHR, + eVideoEncodeH264RateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_KHR, + eVideoEncodeH264RateControlLayerInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_KHR, + eVideoEncodeH264SessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_KHR, + eVideoEncodeH264QualityLevelPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUALITY_LEVEL_PROPERTIES_KHR, + eVideoEncodeH264SessionParametersGetInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_GET_INFO_KHR, + eVideoEncodeH264SessionParametersFeedbackInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_FEEDBACK_INFO_KHR, + eVideoEncodeH265CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_KHR, + eVideoEncodeH265SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoEncodeH265SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR, + eVideoEncodeH265PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PICTURE_INFO_KHR, + eVideoEncodeH265DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_KHR, + eVideoEncodeH265NaluSliceSegmentInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_KHR, + eVideoEncodeH265GopRemainingFrameInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_GOP_REMAINING_FRAME_INFO_KHR, + eVideoEncodeH265ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_KHR, + eVideoEncodeH265RateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_KHR, + eVideoEncodeH265RateControlLayerInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_KHR, + eVideoEncodeH265SessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_CREATE_INFO_KHR, + eVideoEncodeH265QualityLevelPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUALITY_LEVEL_PROPERTIES_KHR, + eVideoEncodeH265SessionParametersGetInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_GET_INFO_KHR, + eVideoEncodeH265SessionParametersFeedbackInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_FEEDBACK_INFO_KHR, + eVideoDecodeH264CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_KHR, + eVideoDecodeH264PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_KHR, + eVideoDecodeH264ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_KHR, + eVideoDecodeH264SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoDecodeH264SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR, + eVideoDecodeH264DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR, + eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD, #if defined( VK_USE_PLATFORM_GGP ) eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP, #endif /*VK_USE_PLATFORM_GGP*/ @@ -421,11 +892,8 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_USE_PLATFORM_VI_NN ) eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN, #endif /*VK_USE_PLATFORM_VI_NN*/ - eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT, - ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, - ePipelineRobustnessCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT, - ePhysicalDevicePipelineRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT, - ePhysicalDevicePipelineRobustnessPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT, + eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT, + ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, #if defined( VK_USE_PLATFORM_WIN32_KHR ) eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR, @@ -444,7 +912,6 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_WIN32_KHR*/ eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, - ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR, eCommandBufferInheritanceConditionalRenderingInfoEXT = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT, ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT, @@ -457,6 +924,7 @@ namespace VULKAN_HPP_NAMESPACE eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE, ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX, + eMultiviewPerViewAttributesInfoNVX = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX, ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT, ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, @@ -465,6 +933,7 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT, ePipelineRasterizationDepthClipStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT, eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT, + ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RELAXED_LINE_RASTERIZATION_FEATURES_IMG, eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR, #if defined( VK_USE_PLATFORM_WIN32_KHR ) eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, @@ -508,6 +977,15 @@ namespace VULKAN_HPP_NAMESPACE eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, eAndroidHardwareBufferFormatProperties2ANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID, #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + ePhysicalDeviceShaderEnqueueFeaturesAMDX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX, + ePhysicalDeviceShaderEnqueuePropertiesAMDX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX, + eExecutionGraphPipelineScratchSizeAMDX = VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX, + eExecutionGraphPipelineCreateInfoAMDX = VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX, + ePipelineShaderStageNodeCreateInfoAMDX = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eAttachmentSampleCountInfoAMD = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, + eAttachmentSampleCountInfoNV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV, eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT, ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT, @@ -577,27 +1055,18 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT, ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR, ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, - eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoDecodeH265CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_EXT, - eVideoDecodeH265SessionParametersCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT, - eVideoDecodeH265SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT, - eVideoDecodeH265ProfileInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_EXT, - eVideoDecodeH265PictureInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_EXT, - eVideoDecodeH265DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_EXT, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eDeviceQueueGlobalPriorityCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR, - ePhysicalDeviceGlobalPriorityQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR, - eQueueFamilyGlobalPriorityPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR, - eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, - ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, - ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, - ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, + eVideoDecodeH265CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR, + eVideoDecodeH265SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoDecodeH265SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR, + eVideoDecodeH265ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR, + eVideoDecodeH265PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR, + eVideoDecodeH265DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR, + eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, + ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, #if defined( VK_USE_PLATFORM_GGP ) ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP, #endif /*VK_USE_PLATFORM_GGP*/ - ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV, ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV, ePhysicalDeviceShaderImageFootprintFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV, @@ -605,8 +1074,11 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV, eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV, eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV, + eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV, + eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV, ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL, eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, + eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL, eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL, ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL, ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL, @@ -624,20 +1096,24 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT, ePhysicalDeviceFragmentDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT, eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, + eRenderingFragmentDensityMapAttachmentInfoEXT = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT, eFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, ePipelineFragmentShadingRateStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR, ePhysicalDeviceFragmentShadingRatePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR, ePhysicalDeviceFragmentShadingRateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR, ePhysicalDeviceFragmentShadingRateKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR, + eRenderingFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD, ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD, ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT, + ePhysicalDeviceShaderQuadControlFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_QUAD_CONTROL_FEATURES_KHR, ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT, ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT, eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT, eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR, ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV, ePhysicalDeviceBufferDeviceAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, + ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT, eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT, eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT, ePhysicalDevicePresentWaitFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR, @@ -658,19 +1134,28 @@ namespace VULKAN_HPP_NAMESPACE eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT, #endif /*VK_USE_PLATFORM_WIN32_KHR*/ eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT, - ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, - ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, - ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, ePhysicalDeviceShaderAtomicFloatFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT, - ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, ePhysicalDeviceExtendedDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT, ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR, ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, + ePipelineInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_INFO_EXT, ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR, ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR, ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR, + ePhysicalDeviceMapMemoryPlacedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_FEATURES_EXT, + ePhysicalDeviceMapMemoryPlacedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_PROPERTIES_EXT, + eMemoryMapPlacedInfoEXT = VK_STRUCTURE_TYPE_MEMORY_MAP_PLACED_INFO_EXT, ePhysicalDeviceShaderAtomicFloat2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT, + eSurfacePresentModeEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT, + eSurfacePresentScalingCapabilitiesEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT, + eSurfacePresentModeCompatibilityEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT, + ePhysicalDeviceSwapchainMaintenance1FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT, + eSwapchainPresentFenceInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT, + eSwapchainPresentModesCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT, + eSwapchainPresentModeInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT, + eSwapchainPresentScalingCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT, + eReleaseSwapchainImagesInfoEXT = VK_STRUCTURE_TYPE_RELEASE_SWAPCHAIN_IMAGES_INFO_EXT, ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV, eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV, eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV, @@ -684,6 +1169,9 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, eCommandBufferInheritanceRenderPassTransformInfoQCOM = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM, eRenderPassTransformBeginInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM, + ePhysicalDeviceDepthBiasControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_BIAS_CONTROL_FEATURES_EXT, + eDepthBiasInfoEXT = VK_STRUCTURE_TYPE_DEPTH_BIAS_INFO_EXT, + eDepthBiasRepresentationInfoEXT = VK_STRUCTURE_TYPE_DEPTH_BIAS_REPRESENTATION_INFO_EXT, ePhysicalDeviceDeviceMemoryReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT, eDeviceDeviceMemoryReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT, eDeviceMemoryReportCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT, @@ -693,17 +1181,32 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceCustomBorderColorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT, ePhysicalDeviceCustomBorderColorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT, ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR, + ePhysicalDevicePresentBarrierFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV, + eSurfaceCapabilitiesPresentBarrierNV = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV, + eSwapchainPresentBarrierCreateInfoNV = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV, ePresentIdKHR = VK_STRUCTURE_TYPE_PRESENT_ID_KHR, ePhysicalDevicePresentIdFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR, + eVideoEncodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR, + eVideoEncodeRateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR, + eVideoEncodeRateControlLayerInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR, + eVideoEncodeCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR, + eVideoEncodeUsageInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR, + eQueryPoolVideoEncodeFeedbackCreateInfoKHR = VK_STRUCTURE_TYPE_QUERY_POOL_VIDEO_ENCODE_FEEDBACK_CREATE_INFO_KHR, + ePhysicalDeviceVideoEncodeQualityLevelInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR, + eVideoEncodeQualityLevelPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_PROPERTIES_KHR, + eVideoEncodeQualityLevelInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR, + eVideoEncodeSessionParametersGetInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_GET_INFO_KHR, + eVideoEncodeSessionParametersFeedbackInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_FEEDBACK_INFO_KHR, + ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV, + eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV, #if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoEncodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR, - eVideoEncodeRateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR, - eVideoEncodeRateControlLayerInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR, - eVideoEncodeCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR, - eVideoEncodeUsageInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR, + eCudaModuleCreateInfoNV = VK_STRUCTURE_TYPE_CUDA_MODULE_CREATE_INFO_NV, + eCudaFunctionCreateInfoNV = VK_STRUCTURE_TYPE_CUDA_FUNCTION_CREATE_INFO_NV, + eCudaLaunchInfoNV = VK_STRUCTURE_TYPE_CUDA_LAUNCH_INFO_NV, + ePhysicalDeviceCudaKernelLaunchFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV, + ePhysicalDeviceCudaKernelLaunchPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV, #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV, - eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV, + eQueryLowLatencySupportNV = VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV, #if defined( VK_USE_PLATFORM_METAL_EXT ) eExportMetalObjectCreateInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT, eExportMetalObjectsInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT, @@ -718,13 +1221,25 @@ namespace VULKAN_HPP_NAMESPACE eExportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT, eImportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT, #endif /*VK_USE_PLATFORM_METAL_EXT*/ - eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV, - eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV, + ePhysicalDeviceDescriptorBufferPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT, + ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT, + ePhysicalDeviceDescriptorBufferFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT, + eDescriptorAddressInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT, + eDescriptorGetInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT, + eBufferCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + eImageCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + eImageViewCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + eSamplerCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + eOpaqueCaptureDescriptorDataCreateInfoEXT = VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT, + eDescriptorBufferBindingInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT, + eDescriptorBufferBindingPushDescriptorBufferHandleEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT, + eAccelerationStructureCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT, ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT, eGraphicsPipelineLibraryCreateInfoEXT = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT, ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD, ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR, + ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR, ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR, ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV, @@ -742,11 +1257,12 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR, ePhysicalDeviceImageCompressionControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT, eImageCompressionControlEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT, - eSubresourceLayout2EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT, - eImageSubresource2EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT, eImageCompressionPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT, ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT, ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT, + ePhysicalDeviceFaultFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT, + eDeviceFaultCountsEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT, + eDeviceFaultInfoEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT, ePhysicalDeviceRgba10X6FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT, #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) eDirectfbSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT, @@ -755,9 +1271,12 @@ namespace VULKAN_HPP_NAMESPACE eVertexInputBindingDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT, eVertexInputAttributeDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT, ePhysicalDeviceDrmPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT, + ePhysicalDeviceAddressBindingReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT, + eDeviceAddressBindingCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT, ePhysicalDeviceDepthClipControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT, ePipelineViewportDepthClipControlCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT, ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT, + ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_EXT, #if defined( VK_USE_PLATFORM_FUCHSIA ) eImportMemoryZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA, eMemoryZirconHandlePropertiesFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA, @@ -783,6 +1302,8 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceExternalMemoryRdmaFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV, ePipelinePropertiesIdentifierEXT = VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT, ePhysicalDevicePipelinePropertiesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT, + ePhysicalDeviceFrameBoundaryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAME_BOUNDARY_FEATURES_EXT, + eFrameBoundaryEXT = VK_STRUCTURE_TYPE_FRAME_BOUNDARY_EXT, ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT, eSubpassResolvePerformanceQueryEXT = VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT, eMultisampledRenderToSingleSampledInfoEXT = VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT, @@ -790,219 +1311,278 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_USE_PLATFORM_SCREEN_QNX ) eScreenSurfaceCreateInfoQNX = VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX, #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - ePhysicalDeviceColorWriteEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT, - ePipelineColorWriteCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT, - ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT, - ePhysicalDeviceRayTracingMaintenance1FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR, - ePhysicalDeviceImageViewMinLodFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT, - eImageViewMinLodCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT, - ePhysicalDeviceMultiDrawFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT, - ePhysicalDeviceMultiDrawPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT, - ePhysicalDeviceImage2DViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT, + ePhysicalDeviceColorWriteEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT, + ePipelineColorWriteCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT, + ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT, + ePhysicalDeviceRayTracingMaintenance1FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR, + ePhysicalDeviceImageViewMinLodFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT, + eImageViewMinLodCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT, + ePhysicalDeviceMultiDrawFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT, + ePhysicalDeviceMultiDrawPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT, + ePhysicalDeviceImage2DViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT, + ePhysicalDeviceShaderTileImageFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT, + ePhysicalDeviceShaderTileImagePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT, + eMicromapBuildInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT, + eMicromapVersionInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT, + eCopyMicromapInfoEXT = VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT, + eCopyMicromapToMemoryInfoEXT = VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT, + eCopyMemoryToMicromapInfoEXT = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT, + ePhysicalDeviceOpacityMicromapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT, + ePhysicalDeviceOpacityMicromapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT, + eMicromapCreateInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT, + eMicromapBuildSizesInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT, + eAccelerationStructureTrianglesOpacityMicromapEXT = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + ePhysicalDeviceDisplacementMicromapFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_FEATURES_NV, + ePhysicalDeviceDisplacementMicromapPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_PROPERTIES_NV, + eAccelerationStructureTrianglesDisplacementMicromapNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_DISPLACEMENT_MICROMAP_NV, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI, + ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI, + ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI, ePhysicalDeviceBorderColorSwizzleFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT, eSamplerBorderColorComponentMappingCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT, ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT, + ePhysicalDeviceShaderCorePropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM, + eDeviceQueueShaderCoreControlCreateInfoARM = VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM, + ePhysicalDeviceSchedulingControlsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM, + ePhysicalDeviceSchedulingControlsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM, + ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT, + eImageViewSlicedCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT, ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE, eDescriptorSetBindingReferenceVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE, eDescriptorSetLayoutHostMappingInfoVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE, - ePhysicalDeviceDepthClampZeroOneFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT, ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT, + ePhysicalDeviceRenderPassStripedFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM, + ePhysicalDeviceRenderPassStripedPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM, + eRenderPassStripeBeginInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_BEGIN_INFO_ARM, + eRenderPassStripeInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_INFO_ARM, + eRenderPassStripeSubmitInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_SUBMIT_INFO_ARM, ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM, ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM, eSubpassFragmentDensityMapOffsetEndInfoQCOM = VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM, + ePhysicalDeviceCopyMemoryIndirectFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV, + ePhysicalDeviceCopyMemoryIndirectPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV, + ePhysicalDeviceMemoryDecompressionFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV, + ePhysicalDeviceMemoryDecompressionPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV, + ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV, + eComputePipelineIndirectBufferInfoNV = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV, + ePipelineIndirectDeviceAddressInfoNV = VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV, + ePhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_LINEAR_SWEPT_SPHERES_FEATURES_NV, + eAccelerationStructureGeometryLinearSweptSpheresDataNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_LINEAR_SWEPT_SPHERES_DATA_NV, + eAccelerationStructureGeometrySpheresDataNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_SPHERES_DATA_NV, ePhysicalDeviceLinearColorAttachmentFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV, + ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MAXIMAL_RECONVERGENCE_FEATURES_KHR, ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT, ePhysicalDeviceImageProcessingFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM, ePhysicalDeviceImageProcessingPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM, eImageViewSampleWeightCreateInfoQCOM = VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM, + ePhysicalDeviceNestedCommandBufferFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT, + ePhysicalDeviceNestedCommandBufferPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT, + eExternalMemoryAcquireUnmodifiedEXT = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT, + ePhysicalDeviceExtendedDynamicState3FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT, + ePhysicalDeviceExtendedDynamicState3PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT, ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT, eRenderPassCreationControlEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT, eRenderPassCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT, eRenderPassSubpassFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT, + eDirectDriverLoadingInfoLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG, + eDirectDriverLoadingListLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG, ePhysicalDeviceShaderModuleIdentifierFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT, ePhysicalDeviceShaderModuleIdentifierPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT, ePipelineShaderStageModuleIdentifierCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT, eShaderModuleIdentifierEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT, ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT, + ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM, + ePhysicalDeviceOpticalFlowFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV, + ePhysicalDeviceOpticalFlowPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV, + eOpticalFlowImageFormatInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV, + eOpticalFlowImageFormatPropertiesNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV, + eOpticalFlowSessionCreateInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV, + eOpticalFlowExecuteInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV, + eOpticalFlowSessionCreatePrivateDataInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV, ePhysicalDeviceLegacyDitheringFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + ePhysicalDeviceExternalFormatResolveFeaturesANDROID = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID, + ePhysicalDeviceExternalFormatResolvePropertiesANDROID = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID, + eAndroidHardwareBufferFormatResolvePropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_RESOLVE_PROPERTIES_ANDROID, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + ePhysicalDeviceAntiLagFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ANTI_LAG_FEATURES_AMD, + eAntiLagDataAMD = VK_STRUCTURE_TYPE_ANTI_LAG_DATA_AMD, + eAntiLagPresentationInfoAMD = VK_STRUCTURE_TYPE_ANTI_LAG_PRESENTATION_INFO_AMD, + ePhysicalDeviceRayTracingPositionFetchFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR, + ePhysicalDeviceShaderObjectFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT, + ePhysicalDeviceShaderObjectPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT, + eShaderCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT, + ePhysicalDevicePipelineBinaryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_FEATURES_KHR, + ePipelineBinaryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_CREATE_INFO_KHR, + ePipelineBinaryInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_INFO_KHR, + ePipelineBinaryKeyKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_KEY_KHR, + ePhysicalDevicePipelineBinaryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_PROPERTIES_KHR, + eReleaseCapturedPipelineDataInfoKHR = VK_STRUCTURE_TYPE_RELEASE_CAPTURED_PIPELINE_DATA_INFO_KHR, + ePipelineBinaryDataInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_DATA_INFO_KHR, + ePipelineCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_CREATE_INFO_KHR, + eDevicePipelineBinaryInternalCacheControlKHR = VK_STRUCTURE_TYPE_DEVICE_PIPELINE_BINARY_INTERNAL_CACHE_CONTROL_KHR, + ePipelineBinaryHandlesInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_HANDLES_INFO_KHR, ePhysicalDeviceTilePropertiesFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM, eTilePropertiesQCOM = VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM, ePhysicalDeviceAmigoProfilingFeaturesSEC = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC, eAmigoProfilingSubmitInfoSEC = VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC, + ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM, + ePhysicalDeviceRayTracingInvocationReorderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV, + ePhysicalDeviceRayTracingInvocationReorderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV, + ePhysicalDeviceCooperativeVectorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_VECTOR_FEATURES_NV, + ePhysicalDeviceCooperativeVectorPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_VECTOR_PROPERTIES_NV, + eCooperativeVectorPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_VECTOR_PROPERTIES_NV, + eConvertCooperativeVectorMatrixInfoNV = VK_STRUCTURE_TYPE_CONVERT_COOPERATIVE_VECTOR_MATRIX_INFO_NV, + ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_FEATURES_NV, + ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_PROPERTIES_NV, ePhysicalDeviceMutableDescriptorTypeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT, + ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE, eMutableDescriptorTypeCreateInfoEXT = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT, - eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR, - eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR, - eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, - eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR, - eAttachmentSampleCountInfoNV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV, - eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR, - eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR, - eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR, - eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR, - eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR, - eBlitImageInfo2KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR, - eBufferCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR, - eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT, - eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR, - eBufferImageCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR, - eBufferMemoryBarrier2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR, - eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, - eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR, - eCommandBufferInheritanceRenderingInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR, - eCommandBufferSubmitInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR, - eCopyBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR, - eCopyBufferToImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR, - eCopyImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR, - eCopyImageToBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR, - eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT, - eDependencyInfoKHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR, - eDescriptorPoolInlineUniformBlockCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT, - eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT, - eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR, - eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT, - eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT, - eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, - eDeviceBufferMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR, - eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR, - eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR, - eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR, - eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR, - eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR, - eDeviceImageMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR, - eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR, - eDevicePrivateDataCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT, - eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, - eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, - eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, - eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, - eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, - eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, - eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR, - eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, - eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR, - eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, - eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR, - eFormatProperties3KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR, - eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR, - eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR, - eImageBlit2KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR, - eImageCopy2KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR, - eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, - eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR, - eImageMemoryBarrier2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR, - eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, - eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR, - eImageResolve2KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR, - eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR, - eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, - eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, - eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR, - eMemoryBarrier2KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR, - eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, - eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR, - eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR, - eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR, eMutableDescriptorTypeCreateInfoVALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE, - ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, - ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, - ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT, - ePhysicalDeviceBufferDeviceAddressFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR, - ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR, - ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, - ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, - ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR, - ePhysicalDeviceDynamicRenderingFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR, - ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, - ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, - ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR, - ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, - ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR, - ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR, - ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR, - ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, - ePhysicalDeviceGlobalPriorityQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT, - ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR, - ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT, - ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR, - ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR, - ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR, - ePhysicalDeviceImageRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT, - ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT, - ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT, - ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR, - ePhysicalDeviceMaintenance4FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR, - ePhysicalDeviceMaintenance4PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR, - ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR, - ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR, - ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR, - ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE, - ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT, - ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR, - ePhysicalDevicePrivateDataFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT, - ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR, - ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM, - ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, - ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR, - ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT, - ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR, - ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, - ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, - ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES, - ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, - ePhysicalDeviceShaderIntegerDotProductFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR, - ePhysicalDeviceShaderIntegerDotProductPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR, - ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR, - ePhysicalDeviceShaderTerminateInvocationFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR, - ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR, - ePhysicalDeviceSubgroupSizeControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT, - ePhysicalDeviceSubgroupSizeControlPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT, - ePhysicalDeviceSynchronization2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR, - ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT, - ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT, - ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR, - ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR, - ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT, - ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR, - ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, - ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, - ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR, - ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, - ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR, - ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT, - ePipelineInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_INFO_EXT, - ePipelineRenderingCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR, - ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, - ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR, - ePrivateDataSlotCreateInfoEXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT, - eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL, - eQueueFamilyGlobalPriorityPropertiesEXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT, - eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR, - eRenderingAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO_KHR, - eRenderingInfoKHR = VK_STRUCTURE_TYPE_RENDERING_INFO_KHR, - eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, - eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, - eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR, - eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR, - eResolveImageInfo2KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR, - eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, - eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR, - eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR, - eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR, - eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR, - eSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR, - eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR, - eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR, - eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR, - eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR, - eSubmitInfo2KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR, - eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, - eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR, - eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR, - eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR, - eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, - eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR, - eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT + ePhysicalDeviceLegacyVertexAttributesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_FEATURES_EXT, + ePhysicalDeviceLegacyVertexAttributesPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_PROPERTIES_EXT, + eLayerSettingsCreateInfoEXT = VK_STRUCTURE_TYPE_LAYER_SETTINGS_CREATE_INFO_EXT, + ePhysicalDeviceShaderCoreBuiltinsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM, + ePhysicalDeviceShaderCoreBuiltinsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM, + ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_LIBRARY_GROUP_HANDLES_FEATURES_EXT, + ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT, + eLatencySleepModeInfoNV = VK_STRUCTURE_TYPE_LATENCY_SLEEP_MODE_INFO_NV, + eLatencySleepInfoNV = VK_STRUCTURE_TYPE_LATENCY_SLEEP_INFO_NV, + eSetLatencyMarkerInfoNV = VK_STRUCTURE_TYPE_SET_LATENCY_MARKER_INFO_NV, + eGetLatencyMarkerInfoNV = VK_STRUCTURE_TYPE_GET_LATENCY_MARKER_INFO_NV, + eLatencyTimingsFrameReportNV = VK_STRUCTURE_TYPE_LATENCY_TIMINGS_FRAME_REPORT_NV, + eLatencySubmissionPresentIdNV = VK_STRUCTURE_TYPE_LATENCY_SUBMISSION_PRESENT_ID_NV, + eOutOfBandQueueTypeInfoNV = VK_STRUCTURE_TYPE_OUT_OF_BAND_QUEUE_TYPE_INFO_NV, + eSwapchainLatencyCreateInfoNV = VK_STRUCTURE_TYPE_SWAPCHAIN_LATENCY_CREATE_INFO_NV, + eLatencySurfaceCapabilitiesNV = VK_STRUCTURE_TYPE_LATENCY_SURFACE_CAPABILITIES_NV, + ePhysicalDeviceCooperativeMatrixFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR, + eCooperativeMatrixPropertiesKHR = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_KHR, + ePhysicalDeviceCooperativeMatrixPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR, + ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_RENDER_AREAS_FEATURES_QCOM, + eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM, + ePhysicalDeviceComputeShaderDerivativesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR, + ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, + ePhysicalDeviceComputeShaderDerivativesPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_PROPERTIES_KHR, + eVideoDecodeAv1CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_CAPABILITIES_KHR, + eVideoDecodeAv1PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PICTURE_INFO_KHR, + eVideoDecodeAv1ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PROFILE_INFO_KHR, + eVideoDecodeAv1SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoDecodeAv1DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_DPB_SLOT_INFO_KHR, + eVideoEncodeAv1CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_CAPABILITIES_KHR, + eVideoEncodeAv1SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoEncodeAv1PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_PICTURE_INFO_KHR, + eVideoEncodeAv1DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_DPB_SLOT_INFO_KHR, + ePhysicalDeviceVideoEncodeAv1FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_AV1_FEATURES_KHR, + eVideoEncodeAv1ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_PROFILE_INFO_KHR, + eVideoEncodeAv1RateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_RATE_CONTROL_INFO_KHR, + eVideoEncodeAv1RateControlLayerInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_RATE_CONTROL_LAYER_INFO_KHR, + eVideoEncodeAv1QualityLevelPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUALITY_LEVEL_PROPERTIES_KHR, + eVideoEncodeAv1SessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_CREATE_INFO_KHR, + eVideoEncodeAv1GopRemainingFrameInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_GOP_REMAINING_FRAME_INFO_KHR, + ePhysicalDeviceVideoMaintenance1FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR, + eVideoInlineQueryInfoKHR = VK_STRUCTURE_TYPE_VIDEO_INLINE_QUERY_INFO_KHR, + ePhysicalDevicePerStageDescriptorSetFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV, + ePhysicalDeviceImageProcessing2FeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_FEATURES_QCOM, + ePhysicalDeviceImageProcessing2PropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_PROPERTIES_QCOM, + eSamplerBlockMatchWindowCreateInfoQCOM = VK_STRUCTURE_TYPE_SAMPLER_BLOCK_MATCH_WINDOW_CREATE_INFO_QCOM, + eSamplerCubicWeightsCreateInfoQCOM = VK_STRUCTURE_TYPE_SAMPLER_CUBIC_WEIGHTS_CREATE_INFO_QCOM, + ePhysicalDeviceCubicWeightsFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_WEIGHTS_FEATURES_QCOM, + eBlitImageCubicWeightsInfoQCOM = VK_STRUCTURE_TYPE_BLIT_IMAGE_CUBIC_WEIGHTS_INFO_QCOM, + ePhysicalDeviceYcbcrDegammaFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_DEGAMMA_FEATURES_QCOM, + eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM, + ePhysicalDeviceCubicClampFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM, + ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT, +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + eScreenBufferPropertiesQNX = VK_STRUCTURE_TYPE_SCREEN_BUFFER_PROPERTIES_QNX, + eScreenBufferFormatPropertiesQNX = VK_STRUCTURE_TYPE_SCREEN_BUFFER_FORMAT_PROPERTIES_QNX, + eImportScreenBufferInfoQNX = VK_STRUCTURE_TYPE_IMPORT_SCREEN_BUFFER_INFO_QNX, + eExternalFormatQNX = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_QNX, + ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX, +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + ePhysicalDeviceLayeredDriverPropertiesMSFT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT, + eCalibratedTimestampInfoKHR = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR, + eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, + eSetDescriptorBufferOffsetsInfoEXT = VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT, + eBindDescriptorBufferEmbeddedSamplersInfoEXT = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT, + ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV, + eDisplaySurfaceStereoCreateInfoNV = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_STEREO_CREATE_INFO_NV, + eDisplayModeStereoPropertiesNV = VK_STRUCTURE_TYPE_DISPLAY_MODE_STEREO_PROPERTIES_NV, + eVideoEncodeQuantizationMapCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_CAPABILITIES_KHR, + eVideoFormatQuantizationMapPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_QUANTIZATION_MAP_PROPERTIES_KHR, + eVideoEncodeQuantizationMapInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_INFO_KHR, + eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_SESSION_PARAMETERS_CREATE_INFO_KHR, + ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUANTIZATION_MAP_FEATURES_KHR, + eVideoEncodeH264QuantizationMapCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUANTIZATION_MAP_CAPABILITIES_KHR, + eVideoEncodeH265QuantizationMapCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUANTIZATION_MAP_CAPABILITIES_KHR, + eVideoFormatH265QuantizationMapPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_H265_QUANTIZATION_MAP_PROPERTIES_KHR, + eVideoEncodeAv1QuantizationMapCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUANTIZATION_MAP_CAPABILITIES_KHR, + eVideoFormatAv1QuantizationMapPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_AV1_QUANTIZATION_MAP_PROPERTIES_KHR, + ePhysicalDeviceRawAccessChainsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV, + ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_RELAXED_EXTENDED_INSTRUCTION_FEATURES_KHR, + ePhysicalDeviceCommandBufferInheritanceFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMMAND_BUFFER_INHERITANCE_FEATURES_NV, + ePhysicalDeviceMaintenance7FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_FEATURES_KHR, + ePhysicalDeviceMaintenance7PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_PROPERTIES_KHR, + ePhysicalDeviceLayeredApiPropertiesListKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_LIST_KHR, + ePhysicalDeviceLayeredApiPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_KHR, + ePhysicalDeviceLayeredApiVulkanPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_VULKAN_PROPERTIES_KHR, + ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV, + ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT, + ePhysicalDeviceRayTracingValidationFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV, + ePhysicalDeviceClusterAccelerationStructureFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_ACCELERATION_STRUCTURE_FEATURES_NV, + ePhysicalDeviceClusterAccelerationStructurePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_ACCELERATION_STRUCTURE_PROPERTIES_NV, + eClusterAccelerationStructureClustersBottomLevelInputNV = VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_CLUSTERS_BOTTOM_LEVEL_INPUT_NV, + eClusterAccelerationStructureTriangleClusterInputNV = VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_TRIANGLE_CLUSTER_INPUT_NV, + eClusterAccelerationStructureMoveObjectsInputNV = VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_MOVE_OBJECTS_INPUT_NV, + eClusterAccelerationStructureInputInfoNV = VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_INPUT_INFO_NV, + eClusterAccelerationStructureCommandsInfoNV = VK_STRUCTURE_TYPE_CLUSTER_ACCELERATION_STRUCTURE_COMMANDS_INFO_NV, + eRayTracingPipelineClusterAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CLUSTER_ACCELERATION_STRUCTURE_CREATE_INFO_NV, + ePhysicalDevicePartitionedAccelerationStructureFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PARTITIONED_ACCELERATION_STRUCTURE_FEATURES_NV, + ePhysicalDevicePartitionedAccelerationStructurePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PARTITIONED_ACCELERATION_STRUCTURE_PROPERTIES_NV, + eWriteDescriptorSetPartitionedAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_PARTITIONED_ACCELERATION_STRUCTURE_NV, + ePartitionedAccelerationStructureInstancesInputNV = VK_STRUCTURE_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCES_INPUT_NV, + eBuildPartitionedAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_BUILD_PARTITIONED_ACCELERATION_STRUCTURE_INFO_NV, + ePartitionedAccelerationStructureFlagsNV = VK_STRUCTURE_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_FLAGS_NV, + ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_EXT, + ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_EXT, + eGeneratedCommandsMemoryRequirementsInfoEXT = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_EXT, + eIndirectExecutionSetCreateInfoEXT = VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_CREATE_INFO_EXT, + eGeneratedCommandsInfoEXT = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_EXT, + eIndirectCommandsLayoutCreateInfoEXT = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_EXT, + eIndirectCommandsLayoutTokenEXT = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_EXT, + eWriteIndirectExecutionSetPipelineEXT = VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_PIPELINE_EXT, + eWriteIndirectExecutionSetShaderEXT = VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_SHADER_EXT, + eIndirectExecutionSetPipelineInfoEXT = VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_PIPELINE_INFO_EXT, + eIndirectExecutionSetShaderInfoEXT = VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_INFO_EXT, + eIndirectExecutionSetShaderLayoutInfoEXT = VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_LAYOUT_INFO_EXT, + eGeneratedCommandsPipelineInfoEXT = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_PIPELINE_INFO_EXT, + eGeneratedCommandsShaderInfoEXT = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_SHADER_INFO_EXT, + ePhysicalDeviceMaintenance8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_8_FEATURES_KHR, + eMemoryBarrierAccessFlags3KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_ACCESS_FLAGS_3_KHR, + ePhysicalDeviceImageAlignmentControlFeaturesMESA = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_FEATURES_MESA, + ePhysicalDeviceImageAlignmentControlPropertiesMESA = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_PROPERTIES_MESA, + eImageAlignmentControlCreateInfoMESA = VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA, + ePhysicalDeviceDepthClampControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT, + ePipelineViewportDepthClampControlCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT, + ePhysicalDeviceVideoMaintenance2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_2_FEATURES_KHR, + eVideoDecodeH264InlineSessionParametersInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_INLINE_SESSION_PARAMETERS_INFO_KHR, + eVideoDecodeH265InlineSessionParametersInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_INLINE_SESSION_PARAMETERS_INFO_KHR, + eVideoDecodeAv1InlineSessionParametersInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_INLINE_SESSION_PARAMETERS_INFO_KHR, + ePhysicalDeviceHdrVividFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HDR_VIVID_FEATURES_HUAWEI, + eHdrVividDynamicMetadataHUAWEI = VK_STRUCTURE_TYPE_HDR_VIVID_DYNAMIC_METADATA_HUAWEI, + ePhysicalDeviceCooperativeMatrix2FeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_FEATURES_NV, + eCooperativeMatrixFlexibleDimensionsPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_FLEXIBLE_DIMENSIONS_PROPERTIES_NV, + ePhysicalDeviceCooperativeMatrix2PropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV, + ePhysicalDevicePipelineOpacityMicromapFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_OPACITY_MICROMAP_FEATURES_ARM, +#if defined( VK_USE_PLATFORM_METAL_EXT ) + eImportMemoryMetalHandleInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_METAL_HANDLE_INFO_EXT, + eMemoryMetalHandlePropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_METAL_HANDLE_PROPERTIES_EXT, + eMemoryGetMetalHandleInfoEXT = VK_STRUCTURE_TYPE_MEMORY_GET_METAL_HANDLE_INFO_EXT, +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + ePhysicalDeviceDepthClampZeroOneFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_KHR, + ePhysicalDeviceDepthClampZeroOneFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT, + ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_ROBUSTNESS_FEATURES_EXT }; enum class PipelineCacheHeaderVersion @@ -1012,44 +1592,45 @@ namespace VULKAN_HPP_NAMESPACE enum class ObjectType { - eUnknown = VK_OBJECT_TYPE_UNKNOWN, - eInstance = VK_OBJECT_TYPE_INSTANCE, - ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE, - eDevice = VK_OBJECT_TYPE_DEVICE, - eQueue = VK_OBJECT_TYPE_QUEUE, - eSemaphore = VK_OBJECT_TYPE_SEMAPHORE, - eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER, - eFence = VK_OBJECT_TYPE_FENCE, - eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY, - eBuffer = VK_OBJECT_TYPE_BUFFER, - eImage = VK_OBJECT_TYPE_IMAGE, - eEvent = VK_OBJECT_TYPE_EVENT, - eQueryPool = VK_OBJECT_TYPE_QUERY_POOL, - eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW, - eImageView = VK_OBJECT_TYPE_IMAGE_VIEW, - eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE, - ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE, - ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT, - eRenderPass = VK_OBJECT_TYPE_RENDER_PASS, - ePipeline = VK_OBJECT_TYPE_PIPELINE, - eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, - eSampler = VK_OBJECT_TYPE_SAMPLER, - eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL, - eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET, - eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER, - eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL, - eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, - eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, - ePrivateDataSlot = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT, - eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR, - eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR, - eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR, - eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR, - eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoSessionKHR = VK_OBJECT_TYPE_VIDEO_SESSION_KHR, - eVideoSessionParametersKHR = VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eUnknown = VK_OBJECT_TYPE_UNKNOWN, + eInstance = VK_OBJECT_TYPE_INSTANCE, + ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE, + eDevice = VK_OBJECT_TYPE_DEVICE, + eQueue = VK_OBJECT_TYPE_QUEUE, + eSemaphore = VK_OBJECT_TYPE_SEMAPHORE, + eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER, + eFence = VK_OBJECT_TYPE_FENCE, + eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY, + eBuffer = VK_OBJECT_TYPE_BUFFER, + eImage = VK_OBJECT_TYPE_IMAGE, + eEvent = VK_OBJECT_TYPE_EVENT, + eQueryPool = VK_OBJECT_TYPE_QUERY_POOL, + eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW, + eImageView = VK_OBJECT_TYPE_IMAGE_VIEW, + eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE, + ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE, + ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT, + eRenderPass = VK_OBJECT_TYPE_RENDER_PASS, + ePipeline = VK_OBJECT_TYPE_PIPELINE, + eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, + eSampler = VK_OBJECT_TYPE_SAMPLER, + eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL, + eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET, + eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER, + eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL, + eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR, + eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR, + ePrivateDataSlot = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT, + ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT, + eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR, + eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR, + eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR, + eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR, + eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, + eVideoSessionKHR = VK_OBJECT_TYPE_VIDEO_SESSION_KHR, + eVideoSessionParametersKHR = VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR, eCuModuleNVX = VK_OBJECT_TYPE_CU_MODULE_NVX, eCuFunctionNVX = VK_OBJECT_TYPE_CU_FUNCTION_NVX, eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, @@ -1059,22 +1640,31 @@ namespace VULKAN_HPP_NAMESPACE ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL, eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR, eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eCudaModuleNV = VK_OBJECT_TYPE_CUDA_MODULE_NV, + eCudaFunctionNV = VK_OBJECT_TYPE_CUDA_FUNCTION_NV, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) eBufferCollectionFUCHSIA = VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA, #endif /*VK_USE_PLATFORM_FUCHSIA*/ - eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR, - ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT, - eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR + eMicromapEXT = VK_OBJECT_TYPE_MICROMAP_EXT, + eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV, + eShaderEXT = VK_OBJECT_TYPE_SHADER_EXT, + ePipelineBinaryKHR = VK_OBJECT_TYPE_PIPELINE_BINARY_KHR, + eIndirectCommandsLayoutEXT = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_EXT, + eIndirectExecutionSetEXT = VK_OBJECT_TYPE_INDIRECT_EXECUTION_SET_EXT }; enum class VendorId { + eKhronos = VK_VENDOR_ID_KHRONOS, eVIV = VK_VENDOR_ID_VIV, eVSI = VK_VENDOR_ID_VSI, eKazan = VK_VENDOR_ID_KAZAN, eCodeplay = VK_VENDOR_ID_CODEPLAY, eMESA = VK_VENDOR_ID_MESA, - ePocl = VK_VENDOR_ID_POCL + ePocl = VK_VENDOR_ID_POCL, + eMobileye = VK_VENDOR_ID_MOBILEYE }; enum class Format @@ -1265,59 +1855,117 @@ namespace VULKAN_HPP_NAMESPACE eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK, eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM, + eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR, eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM, + eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR, eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR, eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR, eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR, eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR, eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16, + eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR, eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR, eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR, eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR, eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR, eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR, eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR, eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR, eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR, eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR, eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16, + eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR, eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR, eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR, eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR, eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR, eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR, eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR, eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR, eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR, eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR, eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM, + eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR, eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM, + eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR, eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR, eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR, eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR, eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR, eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR, eG8B8R82Plane444Unorm = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM, + eG8B8R82Plane444UnormEXT = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT, eG10X6B10X6R10X62Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16, + eG10X6B10X6R10X62Plane444Unorm3Pack16EXT = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT, eG12X4B12X4R12X42Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16, + eG12X4B12X4R12X42Plane444Unorm3Pack16EXT = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT, eG16B16R162Plane444Unorm = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM, + eG16B16R162Plane444UnormEXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT, eA4R4G4B4UnormPack16 = VK_FORMAT_A4R4G4B4_UNORM_PACK16, + eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT, eA4B4G4R4UnormPack16 = VK_FORMAT_A4B4G4R4_UNORM_PACK16, + eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT, eAstc4x4SfloatBlock = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK, + eAstc4x4SfloatBlockEXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT, eAstc5x4SfloatBlock = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK, + eAstc5x4SfloatBlockEXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT, eAstc5x5SfloatBlock = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK, + eAstc5x5SfloatBlockEXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT, eAstc6x5SfloatBlock = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK, + eAstc6x5SfloatBlockEXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT, eAstc6x6SfloatBlock = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK, + eAstc6x6SfloatBlockEXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT, eAstc8x5SfloatBlock = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK, + eAstc8x5SfloatBlockEXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT, eAstc8x6SfloatBlock = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK, + eAstc8x6SfloatBlockEXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT, eAstc8x8SfloatBlock = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK, + eAstc8x8SfloatBlockEXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT, eAstc10x5SfloatBlock = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK, + eAstc10x5SfloatBlockEXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT, eAstc10x6SfloatBlock = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK, + eAstc10x6SfloatBlockEXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT, eAstc10x8SfloatBlock = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK, + eAstc10x8SfloatBlockEXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT, eAstc10x10SfloatBlock = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK, + eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, eAstc12x10SfloatBlock = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK, + eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, eAstc12x12SfloatBlock = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK, + eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT, + eA1B5G5R5UnormPack16 = VK_FORMAT_A1B5G5R5_UNORM_PACK16, + eA1B5G5R5UnormPack16KHR = VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR, + eA8Unorm = VK_FORMAT_A8_UNORM, + eA8UnormKHR = VK_FORMAT_A8_UNORM_KHR, ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, @@ -1326,112 +1974,78 @@ namespace VULKAN_HPP_NAMESPACE ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG, - eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT, - eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT, - eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, - eAstc10x5SfloatBlockEXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT, - eAstc10x6SfloatBlockEXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT, - eAstc10x8SfloatBlockEXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT, - eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, - eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT, - eAstc4x4SfloatBlockEXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT, - eAstc5x4SfloatBlockEXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT, - eAstc5x5SfloatBlockEXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT, - eAstc6x5SfloatBlockEXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT, - eAstc6x6SfloatBlockEXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT, - eAstc8x5SfloatBlockEXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT, - eAstc8x6SfloatBlockEXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT, - eAstc8x8SfloatBlockEXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT, - eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR, - eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR, - eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR, - eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR, - eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR, - eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR, - eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR, - eG10X6B10X6R10X62Plane444Unorm3Pack16EXT = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT, - eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR, - eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR, - eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR, - eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR, - eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR, - eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR, - eG12X4B12X4R12X42Plane444Unorm3Pack16EXT = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT, - eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR, - eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR, - eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR, - eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR, - eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR, - eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR, - eG16B16R162Plane444UnormEXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT, - eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR, - eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR, - eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR, - eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR, - eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR, - eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR, - eG8B8R82Plane444UnormEXT = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT, - eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR, - eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, - eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR, - eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR, - eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR, - eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR, - eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR, - eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR, - eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR + eR16G16Sfixed5NV = VK_FORMAT_R16G16_SFIXED5_NV, + eR16G16S105NV = VK_FORMAT_R16G16_S10_5_NV }; enum class FormatFeatureFlagBits : VkFormatFeatureFlags { - eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, - eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT, - eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT, - eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT, - eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT, - eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, - eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT, - eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, - eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, - eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT, - eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT, - eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT, - eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT, - eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, - eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, - eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, - eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, - eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, - eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, + eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT, + eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT, + eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT, + eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, + eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT, + eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, + eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, + eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT, + eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT, + eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT, + eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT, + eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR, + eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR, + eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR, + eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR, + eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR, + eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR, eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, - eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT, - eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, - eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR, - eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR, + eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT, + eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR, + eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR, + eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, + eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT, + eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR, + eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR, eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR, eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, + eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT, eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoEncodeInputKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR, - eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR, - eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR, - eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR, - eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, - eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT, - eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR, - eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = - VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR, - eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR, - eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR, - eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR, - eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR + eVideoEncodeInputKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR, + eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR + }; + + using FormatFeatureFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FormatFeatureFlags allFlags = + FormatFeatureFlagBits::eSampledImage | FormatFeatureFlagBits::eStorageImage | FormatFeatureFlagBits::eStorageImageAtomic | + FormatFeatureFlagBits::eUniformTexelBuffer | FormatFeatureFlagBits::eStorageTexelBuffer | FormatFeatureFlagBits::eStorageTexelBufferAtomic | + FormatFeatureFlagBits::eVertexBuffer | FormatFeatureFlagBits::eColorAttachment | FormatFeatureFlagBits::eColorAttachmentBlend | + FormatFeatureFlagBits::eDepthStencilAttachment | FormatFeatureFlagBits::eBlitSrc | FormatFeatureFlagBits::eBlitDst | + FormatFeatureFlagBits::eSampledImageFilterLinear | FormatFeatureFlagBits::eTransferSrc | FormatFeatureFlagBits::eTransferDst | + FormatFeatureFlagBits::eMidpointChromaSamples | FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter | + FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter | + FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit | + FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable | FormatFeatureFlagBits::eDisjoint | + FormatFeatureFlagBits::eCositedChromaSamples | FormatFeatureFlagBits::eSampledImageFilterMinmax | FormatFeatureFlagBits::eVideoDecodeOutputKHR | + FormatFeatureFlagBits::eVideoDecodeDpbKHR | FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR | + FormatFeatureFlagBits::eSampledImageFilterCubicEXT | FormatFeatureFlagBits::eFragmentDensityMapEXT | + FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR | FormatFeatureFlagBits::eVideoEncodeInputKHR | FormatFeatureFlagBits::eVideoEncodeDpbKHR; }; enum class ImageCreateFlagBits : VkImageCreateFlags @@ -1442,29 +2056,47 @@ namespace VULKAN_HPP_NAMESPACE eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, eAlias = VK_IMAGE_CREATE_ALIAS_BIT, + eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR, eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR, eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, eProtected = VK_IMAGE_CREATE_PROTECTED_BIT, eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT, + eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR, eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT, eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT, + eDescriptorBufferCaptureReplayEXT = VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT, eMultisampledRenderToSingleSampledEXT = VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT, e2DViewCompatibleEXT = VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT, eFragmentDensityMapOffsetQCOM = VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM, - e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR, - eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR, - eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, - eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR, - eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, - eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR + eVideoProfileIndependentKHR = VK_IMAGE_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR }; - enum class ImageTiling + using ImageCreateFlags = Flags; + + template <> + struct FlagTraits { - eOptimal = VK_IMAGE_TILING_OPTIMAL, + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageCreateFlags allFlags = + ImageCreateFlagBits::eSparseBinding | ImageCreateFlagBits::eSparseResidency | ImageCreateFlagBits::eSparseAliased | ImageCreateFlagBits::eMutableFormat | + ImageCreateFlagBits::eCubeCompatible | ImageCreateFlagBits::eAlias | ImageCreateFlagBits::eSplitInstanceBindRegions | + ImageCreateFlagBits::e2DArrayCompatible | ImageCreateFlagBits::eBlockTexelViewCompatible | ImageCreateFlagBits::eExtendedUsage | + ImageCreateFlagBits::eProtected | ImageCreateFlagBits::eDisjoint | ImageCreateFlagBits::eCornerSampledNV | + ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT | ImageCreateFlagBits::eSubsampledEXT | ImageCreateFlagBits::eDescriptorBufferCaptureReplayEXT | + ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT | ImageCreateFlagBits::e2DViewCompatibleEXT | + ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM | ImageCreateFlagBits::eVideoProfileIndependentKHR; + }; + + enum class ImageTiling + { + eOptimal = VK_IMAGE_TILING_OPTIMAL, eLinear = VK_IMAGE_TILING_LINEAR, eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT }; @@ -1478,31 +2110,48 @@ namespace VULKAN_HPP_NAMESPACE enum class ImageUsageFlagBits : VkImageUsageFlags { - eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, - eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, - eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, - eStorage = VK_IMAGE_USAGE_STORAGE_BIT, - eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, - eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, - eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, - eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoDecodeDstKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR, - eVideoDecodeSrcKHR = VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR, - eVideoDecodeDpbKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, - eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoEncodeDstKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR, - eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, - eVideoEncodeDpbKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eAttachmentFeedbackLoopEXT = VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, - eInvocationMaskHUAWEI = VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI, - eSampleWeightQCOM = VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM, - eSampleBlockMatchQCOM = VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM, - eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV + eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, + eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, + eStorage = VK_IMAGE_USAGE_STORAGE_BIT, + eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, + eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, + eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, + eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, + eHostTransfer = VK_IMAGE_USAGE_HOST_TRANSFER_BIT, + eHostTransferEXT = VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT, + eVideoDecodeDstKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR, + eVideoDecodeSrcKHR = VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDpbKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR, + eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, + eVideoEncodeDstKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, + eVideoEncodeDpbKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR, + eAttachmentFeedbackLoopEXT = VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eInvocationMaskHUAWEI = VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI, + eSampleWeightQCOM = VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM, + eSampleBlockMatchQCOM = VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM, + eVideoEncodeQuantizationDeltaMapKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR, + eVideoEncodeEmphasisMapKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR + }; + + using ImageUsageFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageUsageFlags allFlags = + ImageUsageFlagBits::eTransferSrc | ImageUsageFlagBits::eTransferDst | ImageUsageFlagBits::eSampled | ImageUsageFlagBits::eStorage | + ImageUsageFlagBits::eColorAttachment | ImageUsageFlagBits::eDepthStencilAttachment | ImageUsageFlagBits::eTransientAttachment | + ImageUsageFlagBits::eInputAttachment | ImageUsageFlagBits::eHostTransfer | ImageUsageFlagBits::eVideoDecodeDstKHR | + ImageUsageFlagBits::eVideoDecodeSrcKHR | ImageUsageFlagBits::eVideoDecodeDpbKHR | ImageUsageFlagBits::eFragmentDensityMapEXT | + ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR | ImageUsageFlagBits::eVideoEncodeDstKHR | ImageUsageFlagBits::eVideoEncodeSrcKHR | + ImageUsageFlagBits::eVideoEncodeDpbKHR | ImageUsageFlagBits::eAttachmentFeedbackLoopEXT | ImageUsageFlagBits::eInvocationMaskHUAWEI | + ImageUsageFlagBits::eSampleWeightQCOM | ImageUsageFlagBits::eSampleBlockMatchQCOM | ImageUsageFlagBits::eVideoEncodeQuantizationDeltaMapKHR | + ImageUsageFlagBits::eVideoEncodeEmphasisMapKHR; }; enum class InstanceCreateFlagBits : VkInstanceCreateFlags @@ -1510,6 +2159,15 @@ namespace VULKAN_HPP_NAMESPACE eEnumeratePortabilityKHR = VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR }; + using InstanceCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR InstanceCreateFlags allFlags = InstanceCreateFlagBits::eEnumeratePortabilityKHR; + }; + enum class InternalAllocationType { eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE @@ -1522,6 +2180,15 @@ namespace VULKAN_HPP_NAMESPACE eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR }; + using MemoryHeapFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryHeapFlags allFlags = MemoryHeapFlagBits::eDeviceLocal | MemoryHeapFlagBits::eMultiInstance; + }; + enum class MemoryPropertyFlagBits : VkMemoryPropertyFlags { eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, @@ -1535,6 +2202,18 @@ namespace VULKAN_HPP_NAMESPACE eRdmaCapableNV = VK_MEMORY_PROPERTY_RDMA_CAPABLE_BIT_NV }; + using MemoryPropertyFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryPropertyFlags allFlags = + MemoryPropertyFlagBits::eDeviceLocal | MemoryPropertyFlagBits::eHostVisible | MemoryPropertyFlagBits::eHostCoherent | + MemoryPropertyFlagBits::eHostCached | MemoryPropertyFlagBits::eLazilyAllocated | MemoryPropertyFlagBits::eProtected | + MemoryPropertyFlagBits::eDeviceCoherentAMD | MemoryPropertyFlagBits::eDeviceUncachedAMD | MemoryPropertyFlagBits::eRdmaCapableNV; + }; + enum class PhysicalDeviceType { eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER, @@ -1546,15 +2225,25 @@ namespace VULKAN_HPP_NAMESPACE enum class QueueFlagBits : VkQueueFlags { - eGraphics = VK_QUEUE_GRAPHICS_BIT, - eCompute = VK_QUEUE_COMPUTE_BIT, - eTransfer = VK_QUEUE_TRANSFER_BIT, - eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT, - eProtected = VK_QUEUE_PROTECTED_BIT, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eGraphics = VK_QUEUE_GRAPHICS_BIT, + eCompute = VK_QUEUE_COMPUTE_BIT, + eTransfer = VK_QUEUE_TRANSFER_BIT, + eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT, + eProtected = VK_QUEUE_PROTECTED_BIT, eVideoDecodeKHR = VK_QUEUE_VIDEO_DECODE_BIT_KHR, - eVideoEncodeKHR = VK_QUEUE_VIDEO_ENCODE_BIT_KHR -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eVideoEncodeKHR = VK_QUEUE_VIDEO_ENCODE_BIT_KHR, + eOpticalFlowNV = VK_QUEUE_OPTICAL_FLOW_BIT_NV + }; + + using QueueFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR QueueFlags allFlags = QueueFlagBits::eGraphics | QueueFlagBits::eCompute | QueueFlagBits::eTransfer | + QueueFlagBits::eSparseBinding | QueueFlagBits::eProtected | QueueFlagBits::eVideoDecodeKHR | + QueueFlagBits::eVideoEncodeKHR | QueueFlagBits::eOpticalFlowNV; }; enum class SampleCountFlagBits : VkSampleCountFlags @@ -1568,6 +2257,17 @@ namespace VULKAN_HPP_NAMESPACE e64 = VK_SAMPLE_COUNT_64_BIT }; + using SampleCountFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SampleCountFlags allFlags = SampleCountFlagBits::e1 | SampleCountFlagBits::e2 | SampleCountFlagBits::e4 | + SampleCountFlagBits::e8 | SampleCountFlagBits::e16 | SampleCountFlagBits::e32 | + SampleCountFlagBits::e64; + }; + enum class SystemAllocationScope { eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, @@ -1577,8 +2277,31 @@ namespace VULKAN_HPP_NAMESPACE eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE }; - enum class DeviceCreateFlagBits + enum class DeviceCreateFlagBits : VkDeviceCreateFlags + { + }; + + using DeviceCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceCreateFlags allFlags = {}; + }; + + enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags + { + eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT + }; + + using DeviceQueueCreateFlags = Flags; + + template <> + struct FlagTraits { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceQueueCreateFlags allFlags = DeviceQueueCreateFlagBits::eProtected; }; enum class PipelineStageFlagBits : VkPipelineStageFlags @@ -1601,25 +2324,54 @@ namespace VULKAN_HPP_NAMESPACE eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, eNone = VK_PIPELINE_STAGE_NONE, + eNoneKHR = VK_PIPELINE_STAGE_NONE_KHR, eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT, eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, + eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, + eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV, eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT, eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV, eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, + eCommandPreprocessEXT = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_EXT, eTaskShaderEXT = VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT, + eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, eMeshShaderEXT = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT, - eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, - eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, - eNoneKHR = VK_PIPELINE_STAGE_NONE_KHR, - eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV, - eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV, - eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV + eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV + }; + + using PipelineStageFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineStageFlags allFlags = + PipelineStageFlagBits::eTopOfPipe | PipelineStageFlagBits::eDrawIndirect | PipelineStageFlagBits::eVertexInput | PipelineStageFlagBits::eVertexShader | + PipelineStageFlagBits::eTessellationControlShader | PipelineStageFlagBits::eTessellationEvaluationShader | PipelineStageFlagBits::eGeometryShader | + PipelineStageFlagBits::eFragmentShader | PipelineStageFlagBits::eEarlyFragmentTests | PipelineStageFlagBits::eLateFragmentTests | + PipelineStageFlagBits::eColorAttachmentOutput | PipelineStageFlagBits::eComputeShader | PipelineStageFlagBits::eTransfer | + PipelineStageFlagBits::eBottomOfPipe | PipelineStageFlagBits::eHost | PipelineStageFlagBits::eAllGraphics | PipelineStageFlagBits::eAllCommands | + PipelineStageFlagBits::eNone | PipelineStageFlagBits::eTransformFeedbackEXT | PipelineStageFlagBits::eConditionalRenderingEXT | + PipelineStageFlagBits::eAccelerationStructureBuildKHR | PipelineStageFlagBits::eRayTracingShaderKHR | PipelineStageFlagBits::eFragmentDensityProcessEXT | + PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR | PipelineStageFlagBits::eCommandPreprocessNV | PipelineStageFlagBits::eTaskShaderEXT | + PipelineStageFlagBits::eMeshShaderEXT; }; enum class MemoryMapFlagBits : VkMemoryMapFlags { + ePlacedEXT = VK_MEMORY_MAP_PLACED_BIT_EXT + }; + + using MemoryMapFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryMapFlags allFlags = MemoryMapFlagBits::ePlacedEXT; }; enum class ImageAspectFlagBits : VkImageAspectFlags @@ -1629,17 +2381,30 @@ namespace VULKAN_HPP_NAMESPACE eStencil = VK_IMAGE_ASPECT_STENCIL_BIT, eMetadata = VK_IMAGE_ASPECT_METADATA_BIT, ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT, + ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT, + ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT, + ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR, eNone = VK_IMAGE_ASPECT_NONE, + eNoneKHR = VK_IMAGE_ASPECT_NONE_KHR, eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT, eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT, eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT, - eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT, - eNoneKHR = VK_IMAGE_ASPECT_NONE_KHR, - ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, - ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, - ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR + eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT + }; + + using ImageAspectFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageAspectFlags allFlags = ImageAspectFlagBits::eColor | ImageAspectFlagBits::eDepth | ImageAspectFlagBits::eStencil | + ImageAspectFlagBits::eMetadata | ImageAspectFlagBits::ePlane0 | + ImageAspectFlagBits::ePlane1 | ImageAspectFlagBits::ePlane2 | ImageAspectFlagBits::eNone | + ImageAspectFlagBits::eMemoryPlane0EXT | ImageAspectFlagBits::eMemoryPlane1EXT | + ImageAspectFlagBits::eMemoryPlane2EXT | ImageAspectFlagBits::eMemoryPlane3EXT; }; enum class SparseImageFormatFlagBits : VkSparseImageFormatFlags @@ -1649,26 +2414,72 @@ namespace VULKAN_HPP_NAMESPACE eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT }; + using SparseImageFormatFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SparseImageFormatFlags allFlags = + SparseImageFormatFlagBits::eSingleMiptail | SparseImageFormatFlagBits::eAlignedMipSize | SparseImageFormatFlagBits::eNonstandardBlockSize; + }; + enum class SparseMemoryBindFlagBits : VkSparseMemoryBindFlags { eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT }; + using SparseMemoryBindFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SparseMemoryBindFlags allFlags = SparseMemoryBindFlagBits::eMetadata; + }; + enum class FenceCreateFlagBits : VkFenceCreateFlags { eSignaled = VK_FENCE_CREATE_SIGNALED_BIT }; + using FenceCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FenceCreateFlags allFlags = FenceCreateFlagBits::eSignaled; + }; + enum class SemaphoreCreateFlagBits : VkSemaphoreCreateFlags { }; + using SemaphoreCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SemaphoreCreateFlags allFlags = {}; + }; + enum class EventCreateFlagBits : VkEventCreateFlags { eDeviceOnly = VK_EVENT_CREATE_DEVICE_ONLY_BIT, eDeviceOnlyKHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR }; + using EventCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR EventCreateFlags allFlags = EventCreateFlagBits::eDeviceOnly; + }; + enum class QueryPipelineStatisticFlagBits : VkQueryPipelineStatisticFlags { eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT, @@ -1683,7 +2494,24 @@ namespace VULKAN_HPP_NAMESPACE eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT, eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT, eTaskShaderInvocationsEXT = VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT, - eMeshShaderInvocationsEXT = VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT + eMeshShaderInvocationsEXT = VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT, + eClusterCullingShaderInvocationsHUAWEI = VK_QUERY_PIPELINE_STATISTIC_CLUSTER_CULLING_SHADER_INVOCATIONS_BIT_HUAWEI + }; + + using QueryPipelineStatisticFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR QueryPipelineStatisticFlags allFlags = + QueryPipelineStatisticFlagBits::eInputAssemblyVertices | QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives | + QueryPipelineStatisticFlagBits::eVertexShaderInvocations | QueryPipelineStatisticFlagBits::eGeometryShaderInvocations | + QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives | QueryPipelineStatisticFlagBits::eClippingInvocations | + QueryPipelineStatisticFlagBits::eClippingPrimitives | QueryPipelineStatisticFlagBits::eFragmentShaderInvocations | + QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches | QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations | + QueryPipelineStatisticFlagBits::eComputeShaderInvocations | QueryPipelineStatisticFlagBits::eTaskShaderInvocationsEXT | + QueryPipelineStatisticFlagBits::eMeshShaderInvocationsEXT | QueryPipelineStatisticFlagBits::eClusterCullingShaderInvocationsHUAWEI; }; enum class QueryResultFlagBits : VkQueryResultFlags @@ -1692,78 +2520,133 @@ namespace VULKAN_HPP_NAMESPACE eWait = VK_QUERY_RESULT_WAIT_BIT, eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, ePartial = VK_QUERY_RESULT_PARTIAL_BIT, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eWithStatusKHR = VK_QUERY_RESULT_WITH_STATUS_BIT_KHR -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eWithStatusKHR = VK_QUERY_RESULT_WITH_STATUS_BIT_KHR + }; + + using QueryResultFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR QueryResultFlags allFlags = QueryResultFlagBits::e64 | QueryResultFlagBits::eWait | + QueryResultFlagBits::eWithAvailability | QueryResultFlagBits::ePartial | + QueryResultFlagBits::eWithStatusKHR; }; enum class QueryType { - eOcclusion = VK_QUERY_TYPE_OCCLUSION, - ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS, - eTimestamp = VK_QUERY_TYPE_TIMESTAMP, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eResultStatusOnlyKHR = VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT, - ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, - eAccelerationStructureCompactedSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, - eAccelerationStructureSerializationSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR, - eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV, - ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoEncodeBitstreamBufferRangeKHR = VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eOcclusion = VK_QUERY_TYPE_OCCLUSION, + ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS, + eTimestamp = VK_QUERY_TYPE_TIMESTAMP, + eResultStatusOnlyKHR = VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR, + eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT, + ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, + eAccelerationStructureCompactedSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR, + eAccelerationStructureSerializationSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR, + eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV, + ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL, + eVideoEncodeFeedbackKHR = VK_QUERY_TYPE_VIDEO_ENCODE_FEEDBACK_KHR, eMeshPrimitivesGeneratedEXT = VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT, ePrimitivesGeneratedEXT = VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT, eAccelerationStructureSerializationBottomLevelPointersKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR, - eAccelerationStructureSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR + eAccelerationStructureSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR, + eMicromapSerializationSizeEXT = VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT, + eMicromapCompactedSizeEXT = VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT + }; + + enum class QueryPoolCreateFlagBits : VkQueryPoolCreateFlags + { }; - enum class QueryPoolCreateFlagBits + using QueryPoolCreateFlags = Flags; + + template <> + struct FlagTraits { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR QueryPoolCreateFlags allFlags = {}; }; enum class BufferCreateFlagBits : VkBufferCreateFlags { - eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT, - eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, - eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, - eProtected = VK_BUFFER_CREATE_PROTECTED_BIT, - eDeviceAddressCaptureReplay = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, - eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT, - eDeviceAddressCaptureReplayKHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR + eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, + eProtected = VK_BUFFER_CREATE_PROTECTED_BIT, + eDeviceAddressCaptureReplay = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT, + eDeviceAddressCaptureReplayKHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR, + eDescriptorBufferCaptureReplayEXT = VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT, + eVideoProfileIndependentKHR = VK_BUFFER_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR + }; + + using BufferCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BufferCreateFlags allFlags = + BufferCreateFlagBits::eSparseBinding | BufferCreateFlagBits::eSparseResidency | BufferCreateFlagBits::eSparseAliased | BufferCreateFlagBits::eProtected | + BufferCreateFlagBits::eDeviceAddressCaptureReplay | BufferCreateFlagBits::eDescriptorBufferCaptureReplayEXT | + BufferCreateFlagBits::eVideoProfileIndependentKHR; }; enum class BufferUsageFlagBits : VkBufferUsageFlags { - eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, - eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT, - eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, - eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, - eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, - eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, - eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT, - eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, - eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, - eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, + eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT, + eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, + eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, + eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, + eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT, + eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, + eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, + eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, + eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT, + eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR, + eVideoDecodeSrcKHR = VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDstKHR = VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR, + eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, + eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, + eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT, #if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoDecodeSrcKHR = VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR, - eVideoDecodeDstKHR = VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR, + eExecutionGraphScratchAMDX = VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX, #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, - eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, - eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT, eAccelerationStructureBuildInputReadOnlyKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR, eAccelerationStructureStorageKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR, eShaderBindingTableKHR = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR, + eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, + eVideoEncodeDstKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrcKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, + eSamplerDescriptorBufferEXT = VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT, + eResourceDescriptorBufferEXT = VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT, + ePushDescriptorsDescriptorBufferEXT = VK_BUFFER_USAGE_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT, + eMicromapBuildInputReadOnlyEXT = VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT, + eMicromapStorageEXT = VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT + }; + + using BufferUsageFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BufferUsageFlags allFlags = + BufferUsageFlagBits::eTransferSrc | BufferUsageFlagBits::eTransferDst | BufferUsageFlagBits::eUniformTexelBuffer | + BufferUsageFlagBits::eStorageTexelBuffer | BufferUsageFlagBits::eUniformBuffer | BufferUsageFlagBits::eStorageBuffer | BufferUsageFlagBits::eIndexBuffer | + BufferUsageFlagBits::eVertexBuffer | BufferUsageFlagBits::eIndirectBuffer | BufferUsageFlagBits::eShaderDeviceAddress | + BufferUsageFlagBits::eVideoDecodeSrcKHR | BufferUsageFlagBits::eVideoDecodeDstKHR | BufferUsageFlagBits::eTransformFeedbackBufferEXT | + BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT | BufferUsageFlagBits::eConditionalRenderingEXT #if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoEncodeDstKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR, - eVideoEncodeSrcKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, + | BufferUsageFlagBits::eExecutionGraphScratchAMDX #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, - eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT, - eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR + | BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR | BufferUsageFlagBits::eAccelerationStructureStorageKHR | + BufferUsageFlagBits::eShaderBindingTableKHR | BufferUsageFlagBits::eVideoEncodeDstKHR | BufferUsageFlagBits::eVideoEncodeSrcKHR | + BufferUsageFlagBits::eSamplerDescriptorBufferEXT | BufferUsageFlagBits::eResourceDescriptorBufferEXT | + BufferUsageFlagBits::ePushDescriptorsDescriptorBufferEXT | BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT | BufferUsageFlagBits::eMicromapStorageEXT; }; enum class SharingMode @@ -1776,49 +2659,57 @@ namespace VULKAN_HPP_NAMESPACE { }; + using BufferViewCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BufferViewCreateFlags allFlags = {}; + }; + enum class ImageLayout { - eUndefined = VK_IMAGE_LAYOUT_UNDEFINED, - eGeneral = VK_IMAGE_LAYOUT_GENERAL, - eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, - eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, - eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, - eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, - eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, - eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, - ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED, - eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, - eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, - eDepthAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, - eDepthReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, - eStencilAttachmentOptimal = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, - eStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, - eReadOnlyOptimal = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL, - eAttachmentOptimal = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL, - ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoDecodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR, - eVideoDecodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR, - eVideoDecodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, - eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, - eFragmentShadingRateAttachmentOptimalKHR = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoEncodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR, - eVideoEncodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR, - eVideoEncodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eAttachmentFeedbackLoopOptimalEXT = VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT, - eAttachmentOptimalKHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR, - eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, + eUndefined = VK_IMAGE_LAYOUT_UNDEFINED, + eGeneral = VK_IMAGE_LAYOUT_GENERAL, + eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, + eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, + eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, + eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, + eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, + ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED, + eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR, + eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR, + eDepthAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, + eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR, + eDepthReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR, - eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR, + eStencilAttachmentOptimal = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, + eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, + eStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, + eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR, + eReadOnlyOptimal = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL, eReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR, + eAttachmentOptimal = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL, + eAttachmentOptimalKHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR, + eRenderingLocalRead = VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ, + eRenderingLocalReadKHR = VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ_KHR, + ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, + eVideoDecodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR, + eVideoDecodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR, + eVideoDecodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR, + eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, + eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, + eFragmentShadingRateAttachmentOptimalKHR = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR, eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, - eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR, - eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR + eVideoEncodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR, + eVideoEncodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR, + eVideoEncodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR, + eAttachmentFeedbackLoopOptimalEXT = VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT, + eVideoEncodeQuantizationMapKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_QUANTIZATION_MAP_KHR }; enum class ComponentSwizzle @@ -1834,8 +2725,20 @@ namespace VULKAN_HPP_NAMESPACE enum class ImageViewCreateFlagBits : VkImageViewCreateFlags { - eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT, - eFragmentDensityMapDeferredEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT + eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT, + eDescriptorBufferCaptureReplayEXT = VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT, + eFragmentDensityMapDeferredEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT + }; + + using ImageViewCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageViewCreateFlags allFlags = ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT | + ImageViewCreateFlagBits::eDescriptorBufferCaptureReplayEXT | + ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT; }; enum class ImageViewType @@ -1853,6 +2756,32 @@ namespace VULKAN_HPP_NAMESPACE { }; + using ShaderModuleCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderModuleCreateFlags allFlags = {}; + }; + + enum class PipelineCacheCreateFlagBits : VkPipelineCacheCreateFlags + { + eExternallySynchronized = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT, + eExternallySynchronizedEXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT, + eInternallySynchronizedMergeKHR = VK_PIPELINE_CACHE_CREATE_INTERNALLY_SYNCHRONIZED_MERGE_BIT_KHR + }; + + using PipelineCacheCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCacheCreateFlags allFlags = + PipelineCacheCreateFlagBits::eExternallySynchronized | PipelineCacheCreateFlagBits::eInternallySynchronizedMergeKHR; + }; + enum class BlendFactor { eZero = VK_BLEND_FACTOR_ZERO, @@ -1939,6 +2868,16 @@ namespace VULKAN_HPP_NAMESPACE eA = VK_COLOR_COMPONENT_A_BIT }; + using ColorComponentFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ColorComponentFlags allFlags = + ColorComponentFlagBits::eR | ColorComponentFlagBits::eG | ColorComponentFlagBits::eB | ColorComponentFlagBits::eA; + }; + enum class CompareOp { eNever = VK_COMPARE_OP_NEVER, @@ -1959,60 +2898,108 @@ namespace VULKAN_HPP_NAMESPACE eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK }; + using CullModeFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CullModeFlags allFlags = + CullModeFlagBits::eNone | CullModeFlagBits::eFront | CullModeFlagBits::eBack | CullModeFlagBits::eFrontAndBack; + }; + enum class DynamicState { - eViewport = VK_DYNAMIC_STATE_VIEWPORT, - eScissor = VK_DYNAMIC_STATE_SCISSOR, - eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH, - eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS, - eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS, - eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS, - eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, - eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, - eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE, - eCullMode = VK_DYNAMIC_STATE_CULL_MODE, - eFrontFace = VK_DYNAMIC_STATE_FRONT_FACE, - ePrimitiveTopology = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY, - eViewportWithCount = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT, - eScissorWithCount = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT, - eVertexInputBindingStride = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE, - eDepthTestEnable = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE, - eDepthWriteEnable = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE, - eDepthCompareOp = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP, - eDepthBoundsTestEnable = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE, - eStencilTestEnable = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE, - eStencilOp = VK_DYNAMIC_STATE_STENCIL_OP, - eRasterizerDiscardEnable = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE, - eDepthBiasEnable = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE, - ePrimitiveRestartEnable = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE, - eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, - eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, - eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, - eRayTracingPipelineStackSizeKHR = VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR, - eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, - eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV, - eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, - eFragmentShadingRateKHR = VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR, - eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, - eVertexInputEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_EXT, - ePatchControlPointsEXT = VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT, - eLogicOpEXT = VK_DYNAMIC_STATE_LOGIC_OP_EXT, - eColorWriteEnableEXT = VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT, - eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT, - eDepthBiasEnableEXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT, - eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT, - eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT, - eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT, - eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT, - eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT, - ePrimitiveRestartEnableEXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT, - ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT, - eRasterizerDiscardEnableEXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT, - eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT, - eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT, - eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT, - eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT, - eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT + eViewport = VK_DYNAMIC_STATE_VIEWPORT, + eScissor = VK_DYNAMIC_STATE_SCISSOR, + eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH, + eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS, + eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS, + eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS, + eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, + eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, + eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE, + eCullMode = VK_DYNAMIC_STATE_CULL_MODE, + eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT, + eFrontFace = VK_DYNAMIC_STATE_FRONT_FACE, + eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT, + ePrimitiveTopology = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY, + ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT, + eViewportWithCount = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT, + eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT, + eScissorWithCount = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT, + eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT, + eVertexInputBindingStride = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE, + eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT, + eDepthTestEnable = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE, + eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT, + eDepthWriteEnable = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE, + eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT, + eDepthCompareOp = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP, + eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT, + eDepthBoundsTestEnable = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE, + eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT, + eStencilTestEnable = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE, + eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT, + eStencilOp = VK_DYNAMIC_STATE_STENCIL_OP, + eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT, + eRasterizerDiscardEnable = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE, + eRasterizerDiscardEnableEXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT, + eDepthBiasEnable = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE, + eDepthBiasEnableEXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT, + ePrimitiveRestartEnable = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE, + ePrimitiveRestartEnableEXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT, + eLineStipple = VK_DYNAMIC_STATE_LINE_STIPPLE, + eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, + eLineStippleKHR = VK_DYNAMIC_STATE_LINE_STIPPLE_KHR, + eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, + eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, + eDiscardRectangleEnableEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_ENABLE_EXT, + eDiscardRectangleModeEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_MODE_EXT, + eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, + eRayTracingPipelineStackSizeKHR = VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR, + eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, + eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV, + eExclusiveScissorEnableNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_ENABLE_NV, + eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, + eFragmentShadingRateKHR = VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR, + eVertexInputEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_EXT, + ePatchControlPointsEXT = VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT, + eLogicOpEXT = VK_DYNAMIC_STATE_LOGIC_OP_EXT, + eColorWriteEnableEXT = VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT, + eDepthClampEnableEXT = VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT, + ePolygonModeEXT = VK_DYNAMIC_STATE_POLYGON_MODE_EXT, + eRasterizationSamplesEXT = VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT, + eSampleMaskEXT = VK_DYNAMIC_STATE_SAMPLE_MASK_EXT, + eAlphaToCoverageEnableEXT = VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT, + eAlphaToOneEnableEXT = VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT, + eLogicOpEnableEXT = VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT, + eColorBlendEnableEXT = VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT, + eColorBlendEquationEXT = VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT, + eColorWriteMaskEXT = VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, + eTessellationDomainOriginEXT = VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT, + eRasterizationStreamEXT = VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT, + eConservativeRasterizationModeEXT = VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT, + eExtraPrimitiveOverestimationSizeEXT = VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT, + eDepthClipEnableEXT = VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT, + eSampleLocationsEnableEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT, + eColorBlendAdvancedEXT = VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT, + eProvokingVertexModeEXT = VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT, + eLineRasterizationModeEXT = VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT, + eLineStippleEnableEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT, + eDepthClipNegativeOneToOneEXT = VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT, + eViewportWScalingEnableNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV, + eViewportSwizzleNV = VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV, + eCoverageToColorEnableNV = VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV, + eCoverageToColorLocationNV = VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV, + eCoverageModulationModeNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV, + eCoverageModulationTableEnableNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV, + eCoverageModulationTableNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV, + eShadingRateImageEnableNV = VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV, + eRepresentativeFragmentTestEnableNV = VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV, + eCoverageReductionModeNV = VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV, + eAttachmentFeedbackLoopEnableEXT = VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT, + eDepthClampRangeEXT = VK_DYNAMIC_STATE_DEPTH_CLAMP_RANGE_EXT }; enum class FrontFace @@ -2047,11 +3034,17 @@ namespace VULKAN_HPP_NAMESPACE eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT, eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT, eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, + eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR, eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT, + eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT, eEarlyReturnOnFailure = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT, - eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, - eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT, + eNoProtectedAccess = VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT, + eNoProtectedAccessEXT = VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT, + eProtectedAccessOnly = VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT, + eProtectedAccessOnlyEXT = VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT, eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, @@ -2060,31 +3053,70 @@ namespace VULKAN_HPP_NAMESPACE eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR, eRayTracingShaderGroupHandleCaptureReplayKHR = VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR, eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV, + eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eVkPipelineRasterizationStateCreateFragmentDensityMapAttachmentEXT = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eVkPipelineRasterizationStateCreateFragmentShadingRateAttachmentKHR = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR, eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR, + eDescriptorBufferEXT = VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT, eRetainLinkTimeOptimizationInfoEXT = VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT, eLinkTimeOptimizationEXT = VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT, eRayTracingAllowMotionNV = VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV, eColorAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, eDepthStencilAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, - eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR, - eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT, - eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT, - eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, - eVkPipelineRasterizationStateCreateFragmentDensityMapAttachmentEXT = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, - eVkPipelineRasterizationStateCreateFragmentShadingRateAttachmentKHR = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR + eRayTracingOpacityMicromapEXT = VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eRayTracingDisplacementMicromapNV = VK_PIPELINE_CREATE_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ }; - enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags - { - eAllowVaryingSubgroupSize = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT, - eRequireFullSubgroups = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT, + using PipelineCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags allFlags = + PipelineCreateFlagBits::eDisableOptimization | PipelineCreateFlagBits::eAllowDerivatives | PipelineCreateFlagBits::eDerivative | + PipelineCreateFlagBits::eViewIndexFromDeviceIndex | PipelineCreateFlagBits::eDispatchBase | PipelineCreateFlagBits::eFailOnPipelineCompileRequired | + PipelineCreateFlagBits::eEarlyReturnOnFailure | PipelineCreateFlagBits::eNoProtectedAccess | PipelineCreateFlagBits::eProtectedAccessOnly | + PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR | PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR | + PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR | PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR | + PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR | PipelineCreateFlagBits::eRayTracingSkipAabbsKHR | + PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR | PipelineCreateFlagBits::eDeferCompileNV | + PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT | PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR | + PipelineCreateFlagBits::eCaptureStatisticsKHR | PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR | PipelineCreateFlagBits::eIndirectBindableNV | + PipelineCreateFlagBits::eLibraryKHR | PipelineCreateFlagBits::eDescriptorBufferEXT | PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT | + PipelineCreateFlagBits::eLinkTimeOptimizationEXT | PipelineCreateFlagBits::eRayTracingAllowMotionNV | + PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT | PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT | + PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | PipelineCreateFlagBits::eRayTracingDisplacementMicromapNV +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + ; + }; + + enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags + { + eAllowVaryingSubgroupSize = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT, eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT, + eRequireFullSubgroups = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT, eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT }; + using PipelineShaderStageCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineShaderStageCreateFlags allFlags = + PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize | PipelineShaderStageCreateFlagBits::eRequireFullSubgroups; + }; + enum class PolygonMode { eFill = VK_POLYGON_MODE_FILL, @@ -2119,22 +3151,37 @@ namespace VULKAN_HPP_NAMESPACE eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS, eAll = VK_SHADER_STAGE_ALL, eRaygenKHR = VK_SHADER_STAGE_RAYGEN_BIT_KHR, + eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV, eAnyHitKHR = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, + eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV, eClosestHitKHR = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, + eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, eMissKHR = VK_SHADER_STAGE_MISS_BIT_KHR, + eMissNV = VK_SHADER_STAGE_MISS_BIT_NV, eIntersectionKHR = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, + eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV, eCallableKHR = VK_SHADER_STAGE_CALLABLE_BIT_KHR, + eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV, eTaskEXT = VK_SHADER_STAGE_TASK_BIT_EXT, + eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV, eMeshEXT = VK_SHADER_STAGE_MESH_BIT_EXT, - eSubpassShadingHUAWEI = VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI, - eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV, - eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV, - eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, - eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV, eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV, - eMissNV = VK_SHADER_STAGE_MISS_BIT_NV, - eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV, - eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV + eSubpassShadingHUAWEI = VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI, + eClusterCullingHUAWEI = VK_SHADER_STAGE_CLUSTER_CULLING_BIT_HUAWEI + }; + + using ShaderStageFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderStageFlags allFlags = + ShaderStageFlagBits::eVertex | ShaderStageFlagBits::eTessellationControl | ShaderStageFlagBits::eTessellationEvaluation | ShaderStageFlagBits::eGeometry | + ShaderStageFlagBits::eFragment | ShaderStageFlagBits::eCompute | ShaderStageFlagBits::eAllGraphics | ShaderStageFlagBits::eAll | + ShaderStageFlagBits::eRaygenKHR | ShaderStageFlagBits::eAnyHitKHR | ShaderStageFlagBits::eClosestHitKHR | ShaderStageFlagBits::eMissKHR | + ShaderStageFlagBits::eIntersectionKHR | ShaderStageFlagBits::eCallableKHR | ShaderStageFlagBits::eTaskEXT | ShaderStageFlagBits::eMeshEXT | + ShaderStageFlagBits::eSubpassShadingHUAWEI | ShaderStageFlagBits::eClusterCullingHUAWEI; }; enum class StencilOp @@ -2155,34 +3202,146 @@ namespace VULKAN_HPP_NAMESPACE eInstance = VK_VERTEX_INPUT_RATE_INSTANCE }; + enum class PipelineColorBlendStateCreateFlagBits : VkPipelineColorBlendStateCreateFlags + { + eRasterizationOrderAttachmentAccessEXT = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT, + eRasterizationOrderAttachmentAccessARM = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM + }; + + using PipelineColorBlendStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineColorBlendStateCreateFlags allFlags = + PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessEXT; + }; + + enum class PipelineDepthStencilStateCreateFlagBits : VkPipelineDepthStencilStateCreateFlags + { + eRasterizationOrderAttachmentDepthAccessEXT = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT, + eRasterizationOrderAttachmentDepthAccessARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM, + eRasterizationOrderAttachmentStencilAccessEXT = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT, + eRasterizationOrderAttachmentStencilAccessARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM + }; + + using PipelineDepthStencilStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineDepthStencilStateCreateFlags allFlags = + PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessEXT | + PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessEXT; + }; + enum class PipelineDynamicStateCreateFlagBits : VkPipelineDynamicStateCreateFlags { }; + using PipelineDynamicStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineDynamicStateCreateFlags allFlags = {}; + }; + enum class PipelineInputAssemblyStateCreateFlagBits : VkPipelineInputAssemblyStateCreateFlags { }; + using PipelineInputAssemblyStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineInputAssemblyStateCreateFlags allFlags = {}; + }; + + enum class PipelineLayoutCreateFlagBits : VkPipelineLayoutCreateFlags + { + eIndependentSetsEXT = VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT + }; + + using PipelineLayoutCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineLayoutCreateFlags allFlags = PipelineLayoutCreateFlagBits::eIndependentSetsEXT; + }; + enum class PipelineMultisampleStateCreateFlagBits : VkPipelineMultisampleStateCreateFlags { }; + using PipelineMultisampleStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineMultisampleStateCreateFlags allFlags = {}; + }; + enum class PipelineRasterizationStateCreateFlagBits : VkPipelineRasterizationStateCreateFlags { }; + using PipelineRasterizationStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationStateCreateFlags allFlags = {}; + }; + enum class PipelineTessellationStateCreateFlagBits : VkPipelineTessellationStateCreateFlags { }; + using PipelineTessellationStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineTessellationStateCreateFlags allFlags = {}; + }; + enum class PipelineVertexInputStateCreateFlagBits : VkPipelineVertexInputStateCreateFlags { }; + using PipelineVertexInputStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineVertexInputStateCreateFlags allFlags = {}; + }; + enum class PipelineViewportStateCreateFlagBits : VkPipelineViewportStateCreateFlags { }; + using PipelineViewportStateCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineViewportStateCreateFlags allFlags = {}; + }; + enum class BorderColor { eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, @@ -2217,10 +3376,22 @@ namespace VULKAN_HPP_NAMESPACE { eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT, + eDescriptorBufferCaptureReplayEXT = VK_SAMPLER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT, eNonSeamlessCubeMapEXT = VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT, eImageProcessingQCOM = VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM }; + using SamplerCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SamplerCreateFlags allFlags = + SamplerCreateFlagBits::eSubsampledEXT | SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT | + SamplerCreateFlagBits::eDescriptorBufferCaptureReplayEXT | SamplerCreateFlagBits::eNonSeamlessCubeMapEXT | SamplerCreateFlagBits::eImageProcessingQCOM; + }; + enum class SamplerMipmapMode { eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST, @@ -2229,49 +3400,90 @@ namespace VULKAN_HPP_NAMESPACE enum class DescriptorPoolCreateFlagBits : VkDescriptorPoolCreateFlags { - eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, - eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, - eHostOnlyEXT = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT, - eHostOnlyVALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE, - eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT + eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, + eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, + eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT, + eHostOnlyEXT = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT, + eHostOnlyVALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE, + eAllowOverallocationSetsNV = VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_SETS_BIT_NV, + eAllowOverallocationPoolsNV = VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_POOLS_BIT_NV + }; + + using DescriptorPoolCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorPoolCreateFlags allFlags = + DescriptorPoolCreateFlagBits::eFreeDescriptorSet | DescriptorPoolCreateFlagBits::eUpdateAfterBind | DescriptorPoolCreateFlagBits::eHostOnlyEXT | + DescriptorPoolCreateFlagBits::eAllowOverallocationSetsNV | DescriptorPoolCreateFlagBits::eAllowOverallocationPoolsNV; }; enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags { - eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, - ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, - eHostOnlyPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT, - eHostOnlyPoolVALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE, - eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT + eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, + eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT, + ePushDescriptor = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT, + ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, + eDescriptorBufferEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT, + eEmbeddedImmutableSamplersEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT, + eIndirectBindableNV = VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV, + eHostOnlyPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT, + eHostOnlyPoolVALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE, + ePerStageNV = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PER_STAGE_BIT_NV + }; + + using DescriptorSetLayoutCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorSetLayoutCreateFlags allFlags = + DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool | DescriptorSetLayoutCreateFlagBits::ePushDescriptor | + DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT | DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT | + DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV | DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT | + DescriptorSetLayoutCreateFlagBits::ePerStageNV; }; enum class DescriptorType { - eSampler = VK_DESCRIPTOR_TYPE_SAMPLER, - eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, - eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, - eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, - eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, - eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, - eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, - eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, - eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, - eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, - eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, - eInlineUniformBlock = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK, - eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, - eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, - eSampleWeightImageQCOM = VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM, - eBlockMatchImageQCOM = VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM, - eMutableEXT = VK_DESCRIPTOR_TYPE_MUTABLE_EXT, - eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, - eMutableVALVE = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE + eSampler = VK_DESCRIPTOR_TYPE_SAMPLER, + eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, + eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, + eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, + eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, + eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, + eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, + eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, + eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, + eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, + eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, + eInlineUniformBlock = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK, + eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, + eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, + eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, + eSampleWeightImageQCOM = VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM, + eBlockMatchImageQCOM = VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM, + eMutableEXT = VK_DESCRIPTOR_TYPE_MUTABLE_EXT, + eMutableVALVE = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, + ePartitionedAccelerationStructureNV = VK_DESCRIPTOR_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_NV }; enum class DescriptorPoolResetFlagBits : VkDescriptorPoolResetFlags { }; + using DescriptorPoolResetFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorPoolResetFlags allFlags = {}; + }; + enum class AccessFlagBits : VkAccessFlags { eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT, @@ -2292,21 +3504,40 @@ namespace VULKAN_HPP_NAMESPACE eMemoryRead = VK_ACCESS_MEMORY_READ_BIT, eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT, eNone = VK_ACCESS_NONE, + eNoneKHR = VK_ACCESS_NONE_KHR, eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT, eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, eAccelerationStructureReadKHR = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, + eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV, eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV, eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, + eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV, eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, + eCommandPreprocessReadEXT = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_EXT, eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, - eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV, - eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV, - eNoneKHR = VK_ACCESS_NONE_KHR, - eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV + eCommandPreprocessWriteEXT = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_EXT + }; + + using AccessFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AccessFlags allFlags = + AccessFlagBits::eIndirectCommandRead | AccessFlagBits::eIndexRead | AccessFlagBits::eVertexAttributeRead | AccessFlagBits::eUniformRead | + AccessFlagBits::eInputAttachmentRead | AccessFlagBits::eShaderRead | AccessFlagBits::eShaderWrite | AccessFlagBits::eColorAttachmentRead | + AccessFlagBits::eColorAttachmentWrite | AccessFlagBits::eDepthStencilAttachmentRead | AccessFlagBits::eDepthStencilAttachmentWrite | + AccessFlagBits::eTransferRead | AccessFlagBits::eTransferWrite | AccessFlagBits::eHostRead | AccessFlagBits::eHostWrite | AccessFlagBits::eMemoryRead | + AccessFlagBits::eMemoryWrite | AccessFlagBits::eNone | AccessFlagBits::eTransformFeedbackWriteEXT | AccessFlagBits::eTransformFeedbackCounterReadEXT | + AccessFlagBits::eTransformFeedbackCounterWriteEXT | AccessFlagBits::eConditionalRenderingReadEXT | AccessFlagBits::eColorAttachmentReadNoncoherentEXT | + AccessFlagBits::eAccelerationStructureReadKHR | AccessFlagBits::eAccelerationStructureWriteKHR | AccessFlagBits::eFragmentDensityMapReadEXT | + AccessFlagBits::eFragmentShadingRateAttachmentReadKHR | AccessFlagBits::eCommandPreprocessReadNV | AccessFlagBits::eCommandPreprocessWriteNV; }; enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags @@ -2314,12 +3545,23 @@ namespace VULKAN_HPP_NAMESPACE eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT }; + using AttachmentDescriptionFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AttachmentDescriptionFlags allFlags = AttachmentDescriptionFlagBits::eMayAlias; + }; + enum class AttachmentLoadOp { eLoad = VK_ATTACHMENT_LOAD_OP_LOAD, eClear = VK_ATTACHMENT_LOAD_OP_CLEAR, eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE, - eNoneEXT = VK_ATTACHMENT_LOAD_OP_NONE_EXT + eNone = VK_ATTACHMENT_LOAD_OP_NONE, + eNoneEXT = VK_ATTACHMENT_LOAD_OP_NONE_EXT, + eNoneKHR = VK_ATTACHMENT_LOAD_OP_NONE_KHR }; enum class AttachmentStoreOp @@ -2327,19 +3569,31 @@ namespace VULKAN_HPP_NAMESPACE eStore = VK_ATTACHMENT_STORE_OP_STORE, eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE, eNone = VK_ATTACHMENT_STORE_OP_NONE, - eNoneEXT = VK_ATTACHMENT_STORE_OP_NONE_EXT, eNoneKHR = VK_ATTACHMENT_STORE_OP_NONE_KHR, - eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM + eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM, + eNoneEXT = VK_ATTACHMENT_STORE_OP_NONE_EXT }; enum class DependencyFlagBits : VkDependencyFlags { - eByRegion = VK_DEPENDENCY_BY_REGION_BIT, - eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT, - eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT, - eFeedbackLoopEXT = VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT, - eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR, - eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR + eByRegion = VK_DEPENDENCY_BY_REGION_BIT, + eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT, + eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR, + eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT, + eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR, + eFeedbackLoopEXT = VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT, + eQueueFamilyOwnershipTransferUseAllStagesKHR = VK_DEPENDENCY_QUEUE_FAMILY_OWNERSHIP_TRANSFER_USE_ALL_STAGES_BIT_KHR + }; + + using DependencyFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DependencyFlags allFlags = DependencyFlagBits::eByRegion | DependencyFlagBits::eDeviceGroup | + DependencyFlagBits::eViewLocal | DependencyFlagBits::eFeedbackLoopEXT | + DependencyFlagBits::eQueueFamilyOwnershipTransferUseAllStagesKHR; }; enum class FramebufferCreateFlagBits : VkFramebufferCreateFlags @@ -2348,13 +3602,25 @@ namespace VULKAN_HPP_NAMESPACE eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR }; + using FramebufferCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FramebufferCreateFlags allFlags = FramebufferCreateFlagBits::eImageless; + }; + enum class PipelineBindPoint { - eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, - eCompute = VK_PIPELINE_BIND_POINT_COMPUTE, + eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, + eCompute = VK_PIPELINE_BIND_POINT_COMPUTE, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eExecutionGraphAMDX = VK_PIPELINE_BIND_POINT_EXECUTION_GRAPH_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, - eSubpassShadingHUAWEI = VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI, - eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV + eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, + eSubpassShadingHUAWEI = VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI }; enum class RenderPassCreateFlagBits : VkRenderPassCreateFlags @@ -2362,6 +3628,15 @@ namespace VULKAN_HPP_NAMESPACE eTransformQCOM = VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM }; + using RenderPassCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR RenderPassCreateFlags allFlags = RenderPassCreateFlagBits::eTransformQCOM; + }; + enum class SubpassDescriptionFlagBits : VkSubpassDescriptionFlags { ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX, @@ -2369,12 +3644,25 @@ namespace VULKAN_HPP_NAMESPACE eFragmentRegionQCOM = VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM, eShaderResolveQCOM = VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM, eRasterizationOrderAttachmentColorAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT, - eRasterizationOrderAttachmentDepthAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT, - eRasterizationOrderAttachmentStencilAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT, - eEnableLegacyDitheringEXT = VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT, eRasterizationOrderAttachmentColorAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM, + eRasterizationOrderAttachmentDepthAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT, eRasterizationOrderAttachmentDepthAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM, - eRasterizationOrderAttachmentStencilAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM + eRasterizationOrderAttachmentStencilAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT, + eRasterizationOrderAttachmentStencilAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM, + eEnableLegacyDitheringEXT = VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT + }; + + using SubpassDescriptionFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SubpassDescriptionFlags allFlags = + SubpassDescriptionFlagBits::ePerViewAttributesNVX | SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX | + SubpassDescriptionFlagBits::eFragmentRegionQCOM | SubpassDescriptionFlagBits::eShaderResolveQCOM | + SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessEXT | SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessEXT | + SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessEXT | SubpassDescriptionFlagBits::eEnableLegacyDitheringEXT; }; enum class CommandPoolCreateFlagBits : VkCommandPoolCreateFlags @@ -2384,11 +3672,30 @@ namespace VULKAN_HPP_NAMESPACE eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT }; + using CommandPoolCreateFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CommandPoolCreateFlags allFlags = + CommandPoolCreateFlagBits::eTransient | CommandPoolCreateFlagBits::eResetCommandBuffer | CommandPoolCreateFlagBits::eProtected; + }; + enum class CommandPoolResetFlagBits : VkCommandPoolResetFlags { eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT }; + using CommandPoolResetFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CommandPoolResetFlags allFlags = CommandPoolResetFlagBits::eReleaseResources; + }; + enum class CommandBufferLevel { ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY, @@ -2400,6 +3707,15 @@ namespace VULKAN_HPP_NAMESPACE eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT }; + using CommandBufferResetFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CommandBufferResetFlags allFlags = CommandBufferResetFlagBits::eReleaseResources; + }; + enum class CommandBufferUsageFlagBits : VkCommandBufferUsageFlags { eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, @@ -2407,47 +3723,141 @@ namespace VULKAN_HPP_NAMESPACE eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT }; + using CommandBufferUsageFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CommandBufferUsageFlags allFlags = + CommandBufferUsageFlagBits::eOneTimeSubmit | CommandBufferUsageFlagBits::eRenderPassContinue | CommandBufferUsageFlagBits::eSimultaneousUse; + }; + enum class QueryControlFlagBits : VkQueryControlFlags { ePrecise = VK_QUERY_CONTROL_PRECISE_BIT }; + using QueryControlFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR QueryControlFlags allFlags = QueryControlFlagBits::ePrecise; + }; + enum class IndexType { eUint16 = VK_INDEX_TYPE_UINT16, eUint32 = VK_INDEX_TYPE_UINT32, - eNoneKHR = VK_INDEX_TYPE_NONE_KHR, + eUint8 = VK_INDEX_TYPE_UINT8, eUint8EXT = VK_INDEX_TYPE_UINT8_EXT, + eUint8KHR = VK_INDEX_TYPE_UINT8_KHR, + eNoneKHR = VK_INDEX_TYPE_NONE_KHR, eNoneNV = VK_INDEX_TYPE_NONE_NV }; - enum class StencilFaceFlagBits : VkStencilFaceFlags + //========================= + //=== Index Type Traits === + //========================= + + template + struct IndexTypeValue { - eFront = VK_STENCIL_FACE_FRONT_BIT, - eBack = VK_STENCIL_FACE_BACK_BIT, - eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK, - eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK }; - enum class SubpassContents + template <> + struct IndexTypeValue { - eInline = VK_SUBPASS_CONTENTS_INLINE, - eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint16; }; - //=== VK_VERSION_1_1 === - + template <> + struct CppType + { + using Type = uint16_t; + }; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint32; + }; + + template <> + struct CppType + { + using Type = uint32_t; + }; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8; + }; + + template <> + struct CppType + { + using Type = uint8_t; + }; + + enum class StencilFaceFlagBits : VkStencilFaceFlags + { + eFront = VK_STENCIL_FACE_FRONT_BIT, + eBack = VK_STENCIL_FACE_BACK_BIT, + eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK, + eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK + }; + + using StencilFaceFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StencilFaceFlags allFlags = + StencilFaceFlagBits::eFront | StencilFaceFlagBits::eBack | StencilFaceFlagBits::eFrontAndBack; + }; + + enum class SubpassContents + { + eInline = VK_SUBPASS_CONTENTS_INLINE, + eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, + eInlineAndSecondaryCommandBuffersKHR = VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR, + eInlineAndSecondaryCommandBuffersEXT = VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_EXT + }; + + //=== VK_VERSION_1_1 === + enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags { - eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT, - eVote = VK_SUBGROUP_FEATURE_VOTE_BIT, - eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT, - eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT, - eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT, - eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT, - eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT, - eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT, - ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV + eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT, + eVote = VK_SUBGROUP_FEATURE_VOTE_BIT, + eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT, + eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT, + eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT, + eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT, + eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT, + eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT, + eRotate = VK_SUBGROUP_FEATURE_ROTATE_BIT, + eRotateKHR = VK_SUBGROUP_FEATURE_ROTATE_BIT_KHR, + eRotateClustered = VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT, + eRotateClusteredKHR = VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT_KHR, + ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV + }; + + using SubgroupFeatureFlags = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SubgroupFeatureFlags allFlags = + SubgroupFeatureFlagBits::eBasic | SubgroupFeatureFlagBits::eVote | SubgroupFeatureFlagBits::eArithmetic | SubgroupFeatureFlagBits::eBallot | + SubgroupFeatureFlagBits::eShuffle | SubgroupFeatureFlagBits::eShuffleRelative | SubgroupFeatureFlagBits::eClustered | SubgroupFeatureFlagBits::eQuad | + SubgroupFeatureFlagBits::eRotate | SubgroupFeatureFlagBits::eRotateClustered | SubgroupFeatureFlagBits::ePartitionedNV; }; enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags @@ -2459,6 +3869,17 @@ namespace VULKAN_HPP_NAMESPACE }; using PeerMemoryFeatureFlagBitsKHR = PeerMemoryFeatureFlagBits; + using PeerMemoryFeatureFlags = Flags; + using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PeerMemoryFeatureFlags allFlags = PeerMemoryFeatureFlagBits::eCopySrc | PeerMemoryFeatureFlagBits::eCopyDst | + PeerMemoryFeatureFlagBits::eGenericSrc | PeerMemoryFeatureFlagBits::eGenericDst; + }; + enum class MemoryAllocateFlagBits : VkMemoryAllocateFlags { eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, @@ -2467,10 +3888,31 @@ namespace VULKAN_HPP_NAMESPACE }; using MemoryAllocateFlagBitsKHR = MemoryAllocateFlagBits; + using MemoryAllocateFlags = Flags; + using MemoryAllocateFlagsKHR = MemoryAllocateFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryAllocateFlags allFlags = + MemoryAllocateFlagBits::eDeviceMask | MemoryAllocateFlagBits::eDeviceAddress | MemoryAllocateFlagBits::eDeviceAddressCaptureReplay; + }; + enum class CommandPoolTrimFlagBits : VkCommandPoolTrimFlags { }; + using CommandPoolTrimFlags = Flags; + using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CommandPoolTrimFlags allFlags = {}; + }; + enum class PointClippingBehavior { eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, @@ -2485,11 +3927,6 @@ namespace VULKAN_HPP_NAMESPACE }; using TessellationDomainOriginKHR = TessellationDomainOrigin; - enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags - { - eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT - }; - enum class SamplerYcbcrModelConversion { eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, @@ -2516,8 +3953,8 @@ namespace VULKAN_HPP_NAMESPACE enum class DescriptorUpdateTemplateType { - eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, - ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR + eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + ePushDescriptors = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS }; using DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType; @@ -2525,6 +3962,16 @@ namespace VULKAN_HPP_NAMESPACE { }; + using DescriptorUpdateTemplateCreateFlags = Flags; + using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorUpdateTemplateCreateFlags allFlags = {}; + }; + enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags { eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, @@ -2543,10 +3990,46 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_USE_PLATFORM_FUCHSIA ) eZirconVmoFUCHSIA = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA, #endif /*VK_USE_PLATFORM_FUCHSIA*/ - eRdmaAddressNV = VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV + eRdmaAddressNV = VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV, +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + eScreenBufferQNX = VK_EXTERNAL_MEMORY_HANDLE_TYPE_SCREEN_BUFFER_BIT_QNX, +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + eMtlbufferEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLBUFFER_BIT_EXT, + eMtltextureEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLTEXTURE_BIT_EXT, + eMtlheapEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_MTLHEAP_BIT_EXT +#endif /*VK_USE_PLATFORM_METAL_EXT*/ }; using ExternalMemoryHandleTypeFlagBitsKHR = ExternalMemoryHandleTypeFlagBits; + using ExternalMemoryHandleTypeFlags = Flags; + using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryHandleTypeFlags allFlags = + ExternalMemoryHandleTypeFlagBits::eOpaqueFd | ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 | ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt | + ExternalMemoryHandleTypeFlagBits::eD3D11Texture | ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt | ExternalMemoryHandleTypeFlagBits::eD3D12Heap | + ExternalMemoryHandleTypeFlagBits::eD3D12Resource | ExternalMemoryHandleTypeFlagBits::eDmaBufEXT +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + | ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + | ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT | ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT +#if defined( VK_USE_PLATFORM_FUCHSIA ) + | ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + | ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + | ExternalMemoryHandleTypeFlagBits::eScreenBufferQNX +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + | ExternalMemoryHandleTypeFlagBits::eMtlbufferEXT | ExternalMemoryHandleTypeFlagBits::eMtltextureEXT | ExternalMemoryHandleTypeFlagBits::eMtlheapEXT +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + ; + }; + enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags { eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, @@ -2555,6 +4038,17 @@ namespace VULKAN_HPP_NAMESPACE }; using ExternalMemoryFeatureFlagBitsKHR = ExternalMemoryFeatureFlagBits; + using ExternalMemoryFeatureFlags = Flags; + using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryFeatureFlags allFlags = + ExternalMemoryFeatureFlagBits::eDedicatedOnly | ExternalMemoryFeatureFlagBits::eExportable | ExternalMemoryFeatureFlagBits::eImportable; + }; + enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags { eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, @@ -2564,6 +4058,18 @@ namespace VULKAN_HPP_NAMESPACE }; using ExternalFenceHandleTypeFlagBitsKHR = ExternalFenceHandleTypeFlagBits; + using ExternalFenceHandleTypeFlags = Flags; + using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalFenceHandleTypeFlags allFlags = + ExternalFenceHandleTypeFlagBits::eOpaqueFd | ExternalFenceHandleTypeFlagBits::eOpaqueWin32 | ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt | + ExternalFenceHandleTypeFlagBits::eSyncFd; + }; + enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags { eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, @@ -2571,32 +4077,79 @@ namespace VULKAN_HPP_NAMESPACE }; using ExternalFenceFeatureFlagBitsKHR = ExternalFenceFeatureFlagBits; + using ExternalFenceFeatureFlags = Flags; + using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalFenceFeatureFlags allFlags = + ExternalFenceFeatureFlagBits::eExportable | ExternalFenceFeatureFlagBits::eImportable; + }; + enum class FenceImportFlagBits : VkFenceImportFlags { eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT }; using FenceImportFlagBitsKHR = FenceImportFlagBits; + using FenceImportFlags = Flags; + using FenceImportFlagsKHR = FenceImportFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FenceImportFlags allFlags = FenceImportFlagBits::eTemporary; + }; + enum class SemaphoreImportFlagBits : VkSemaphoreImportFlags { eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT }; using SemaphoreImportFlagBitsKHR = SemaphoreImportFlagBits; + using SemaphoreImportFlags = Flags; + using SemaphoreImportFlagsKHR = SemaphoreImportFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SemaphoreImportFlags allFlags = SemaphoreImportFlagBits::eTemporary; + }; + enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags { eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + eD3D11Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT, eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, #if defined( VK_USE_PLATFORM_FUCHSIA ) - eZirconEventFUCHSIA = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA, + eZirconEventFUCHSIA = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA #endif /*VK_USE_PLATFORM_FUCHSIA*/ - eD3D11Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT }; using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits; + using ExternalSemaphoreHandleTypeFlags = Flags; + using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalSemaphoreHandleTypeFlags allFlags = + ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd | ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 | + ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt | ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence | ExternalSemaphoreHandleTypeFlagBits::eSyncFd +#if defined( VK_USE_PLATFORM_FUCHSIA ) + | ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + ; + }; + enum class ExternalSemaphoreFeatureFlagBits : VkExternalSemaphoreFeatureFlags { eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, @@ -2604,33 +4157,48 @@ namespace VULKAN_HPP_NAMESPACE }; using ExternalSemaphoreFeatureFlagBitsKHR = ExternalSemaphoreFeatureFlagBits; + using ExternalSemaphoreFeatureFlags = Flags; + using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalSemaphoreFeatureFlags allFlags = + ExternalSemaphoreFeatureFlagBits::eExportable | ExternalSemaphoreFeatureFlagBits::eImportable; + }; + //=== VK_VERSION_1_2 === enum class DriverId { - eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY, - eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE, - eMesaRadv = VK_DRIVER_ID_MESA_RADV, - eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY, - eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS, - eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA, - eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY, - eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY, - eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY, - eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER, - eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY, - eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY, - eMesaLlvmpipe = VK_DRIVER_ID_MESA_LLVMPIPE, - eMoltenvk = VK_DRIVER_ID_MOLTENVK, - eCoreaviProprietary = VK_DRIVER_ID_COREAVI_PROPRIETARY, - eJuiceProprietary = VK_DRIVER_ID_JUICE_PROPRIETARY, - eVerisiliconProprietary = VK_DRIVER_ID_VERISILICON_PROPRIETARY, - eMesaTurnip = VK_DRIVER_ID_MESA_TURNIP, - eMesaV3Dv = VK_DRIVER_ID_MESA_V3DV, - eMesaPanvk = VK_DRIVER_ID_MESA_PANVK, - eSamsungProprietary = VK_DRIVER_ID_SAMSUNG_PROPRIETARY, - eMesaVenus = VK_DRIVER_ID_MESA_VENUS, - eMesaDozen = VK_DRIVER_ID_MESA_DOZEN + eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY, + eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE, + eMesaRadv = VK_DRIVER_ID_MESA_RADV, + eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY, + eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS, + eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA, + eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY, + eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY, + eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY, + eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER, + eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY, + eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY, + eMesaLlvmpipe = VK_DRIVER_ID_MESA_LLVMPIPE, + eMoltenvk = VK_DRIVER_ID_MOLTENVK, + eCoreaviProprietary = VK_DRIVER_ID_COREAVI_PROPRIETARY, + eJuiceProprietary = VK_DRIVER_ID_JUICE_PROPRIETARY, + eVerisiliconProprietary = VK_DRIVER_ID_VERISILICON_PROPRIETARY, + eMesaTurnip = VK_DRIVER_ID_MESA_TURNIP, + eMesaV3Dv = VK_DRIVER_ID_MESA_V3DV, + eMesaPanvk = VK_DRIVER_ID_MESA_PANVK, + eSamsungProprietary = VK_DRIVER_ID_SAMSUNG_PROPRIETARY, + eMesaVenus = VK_DRIVER_ID_MESA_VENUS, + eMesaDozen = VK_DRIVER_ID_MESA_DOZEN, + eMesaNvk = VK_DRIVER_ID_MESA_NVK, + eImaginationOpenSourceMESA = VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA, + eMesaHoneykrisp = VK_DRIVER_ID_MESA_HONEYKRISP, + eVulkanScEmulationOnVulkan = VK_DRIVER_ID_VULKAN_SC_EMULATION_ON_VULKAN }; using DriverIdKHR = DriverId; @@ -2651,21 +4219,52 @@ namespace VULKAN_HPP_NAMESPACE }; using DescriptorBindingFlagBitsEXT = DescriptorBindingFlagBits; + using DescriptorBindingFlags = Flags; + using DescriptorBindingFlagsEXT = DescriptorBindingFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorBindingFlags allFlags = + DescriptorBindingFlagBits::eUpdateAfterBind | DescriptorBindingFlagBits::eUpdateUnusedWhilePending | DescriptorBindingFlagBits::ePartiallyBound | + DescriptorBindingFlagBits::eVariableDescriptorCount; + }; + enum class ResolveModeFlagBits : VkResolveModeFlags { eNone = VK_RESOLVE_MODE_NONE, eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT, eAverage = VK_RESOLVE_MODE_AVERAGE_BIT, eMin = VK_RESOLVE_MODE_MIN_BIT, - eMax = VK_RESOLVE_MODE_MAX_BIT + eMax = VK_RESOLVE_MODE_MAX_BIT, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + eExternalFormatDownsampleANDROID = VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ }; using ResolveModeFlagBitsKHR = ResolveModeFlagBits; + using ResolveModeFlags = Flags; + using ResolveModeFlagsKHR = ResolveModeFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ResolveModeFlags allFlags = ResolveModeFlagBits::eNone | ResolveModeFlagBits::eSampleZero | + ResolveModeFlagBits::eAverage | ResolveModeFlagBits::eMin | ResolveModeFlagBits::eMax +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + | ResolveModeFlagBits::eExternalFormatDownsampleANDROID +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + ; + }; + enum class SamplerReductionMode { - eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE, - eMin = VK_SAMPLER_REDUCTION_MODE_MIN, - eMax = VK_SAMPLER_REDUCTION_MODE_MAX + eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE, + eMin = VK_SAMPLER_REDUCTION_MODE_MIN, + eMax = VK_SAMPLER_REDUCTION_MODE_MAX, + eWeightedAverageRangeclampQCOM = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_RANGECLAMP_QCOM }; using SamplerReductionModeEXT = SamplerReductionMode; @@ -2682,6 +4281,16 @@ namespace VULKAN_HPP_NAMESPACE }; using SemaphoreWaitFlagBitsKHR = SemaphoreWaitFlagBits; + using SemaphoreWaitFlags = Flags; + using SemaphoreWaitFlagsKHR = SemaphoreWaitFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SemaphoreWaitFlags allFlags = SemaphoreWaitFlagBits::eAny; + }; + //=== VK_VERSION_1_3 === enum class PipelineCreationFeedbackFlagBits : VkPipelineCreationFeedbackFlags @@ -2692,6 +4301,18 @@ namespace VULKAN_HPP_NAMESPACE }; using PipelineCreationFeedbackFlagBitsEXT = PipelineCreationFeedbackFlagBits; + using PipelineCreationFeedbackFlags = Flags; + using PipelineCreationFeedbackFlagsEXT = PipelineCreationFeedbackFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreationFeedbackFlags allFlags = PipelineCreationFeedbackFlagBits::eValid | + PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit | + PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration; + }; + enum class ToolPurposeFlagBits : VkToolPurposeFlags { eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT, @@ -2704,126 +4325,228 @@ namespace VULKAN_HPP_NAMESPACE }; using ToolPurposeFlagBitsEXT = ToolPurposeFlagBits; + using ToolPurposeFlags = Flags; + using ToolPurposeFlagsEXT = ToolPurposeFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ToolPurposeFlags allFlags = + ToolPurposeFlagBits::eValidation | ToolPurposeFlagBits::eProfiling | ToolPurposeFlagBits::eTracing | ToolPurposeFlagBits::eAdditionalFeatures | + ToolPurposeFlagBits::eModifyingFeatures | ToolPurposeFlagBits::eDebugReportingEXT | ToolPurposeFlagBits::eDebugMarkersEXT; + }; + enum class PrivateDataSlotCreateFlagBits : VkPrivateDataSlotCreateFlags { }; using PrivateDataSlotCreateFlagBitsEXT = PrivateDataSlotCreateFlagBits; + using PrivateDataSlotCreateFlags = Flags; + using PrivateDataSlotCreateFlagsEXT = PrivateDataSlotCreateFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PrivateDataSlotCreateFlags allFlags = {}; + }; + enum class PipelineStageFlagBits2 : VkPipelineStageFlags2 { - eNone = VK_PIPELINE_STAGE_2_NONE, - eTopOfPipe = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT, - eDrawIndirect = VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT, - eVertexInput = VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT, - eVertexShader = VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT, - eTessellationControlShader = VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT, - eTessellationEvaluationShader = VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT, - eGeometryShader = VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT, - eFragmentShader = VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT, - eEarlyFragmentTests = VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT, - eLateFragmentTests = VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT, - eColorAttachmentOutput = VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT, - eComputeShader = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT, - eAllTransfer = VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT, - eBottomOfPipe = VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT, - eHost = VK_PIPELINE_STAGE_2_HOST_BIT, - eAllGraphics = VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT, - eAllCommands = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT, - eCopy = VK_PIPELINE_STAGE_2_COPY_BIT, - eResolve = VK_PIPELINE_STAGE_2_RESOLVE_BIT, - eBlit = VK_PIPELINE_STAGE_2_BLIT_BIT, - eClear = VK_PIPELINE_STAGE_2_CLEAR_BIT, - eIndexInput = VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT, - eVertexAttributeInput = VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT, - ePreRasterizationShaders = VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoDecodeKHR = VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR, - eVideoEncodeKHR = VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eNone = VK_PIPELINE_STAGE_2_NONE, + eTopOfPipe = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT, + eDrawIndirect = VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT, + eVertexInput = VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT, + eVertexShader = VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT, + eTessellationControlShader = VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT, + eTessellationEvaluationShader = VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT, + eGeometryShader = VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT, + eFragmentShader = VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT, + eEarlyFragmentTests = VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT, + eLateFragmentTests = VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT, + eColorAttachmentOutput = VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT, + eComputeShader = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT, + eAllTransfer = VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT, + eTransfer = VK_PIPELINE_STAGE_2_TRANSFER_BIT, + eBottomOfPipe = VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT, + eHost = VK_PIPELINE_STAGE_2_HOST_BIT, + eAllGraphics = VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT, + eAllCommands = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT, + eCopy = VK_PIPELINE_STAGE_2_COPY_BIT, + eResolve = VK_PIPELINE_STAGE_2_RESOLVE_BIT, + eBlit = VK_PIPELINE_STAGE_2_BLIT_BIT, + eClear = VK_PIPELINE_STAGE_2_CLEAR_BIT, + eIndexInput = VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT, + eVertexAttributeInput = VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT, + ePreRasterizationShaders = VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT, + eVideoDecodeKHR = VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR, + eVideoEncodeKHR = VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR, eTransformFeedbackEXT = VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, eConditionalRenderingEXT = VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT, eCommandPreprocessNV = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV, + eCommandPreprocessEXT = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_EXT, eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eShadingRateImageNV = VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV, eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, + eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV, eRayTracingShaderKHR = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR, + eRayTracingShaderNV = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV, eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT, eTaskShaderEXT = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT, + eTaskShaderNV = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV, eMeshShaderEXT = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT, + eMeshShaderNV = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV, + eSubpassShaderHUAWEI = VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI, eSubpassShadingHUAWEI = VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI, eInvocationMaskHUAWEI = VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI, eAccelerationStructureCopyKHR = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR, - eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV, - eMeshShaderNV = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV, - eRayTracingShaderNV = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV, - eShadingRateImageNV = VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV, - eTaskShaderNV = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV, - eTransfer = VK_PIPELINE_STAGE_2_TRANSFER_BIT + eMicromapBuildEXT = VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT, + eClusterCullingShaderHUAWEI = VK_PIPELINE_STAGE_2_CLUSTER_CULLING_SHADER_BIT_HUAWEI, + eOpticalFlowNV = VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV, + eConvertCooperativeVectorMatrixNV = VK_PIPELINE_STAGE_2_CONVERT_COOPERATIVE_VECTOR_MATRIX_BIT_NV }; using PipelineStageFlagBits2KHR = PipelineStageFlagBits2; + using PipelineStageFlags2 = Flags; + using PipelineStageFlags2KHR = PipelineStageFlags2; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineStageFlags2 allFlags = + PipelineStageFlagBits2::eNone | PipelineStageFlagBits2::eTopOfPipe | PipelineStageFlagBits2::eDrawIndirect | PipelineStageFlagBits2::eVertexInput | + PipelineStageFlagBits2::eVertexShader | PipelineStageFlagBits2::eTessellationControlShader | PipelineStageFlagBits2::eTessellationEvaluationShader | + PipelineStageFlagBits2::eGeometryShader | PipelineStageFlagBits2::eFragmentShader | PipelineStageFlagBits2::eEarlyFragmentTests | + PipelineStageFlagBits2::eLateFragmentTests | PipelineStageFlagBits2::eColorAttachmentOutput | PipelineStageFlagBits2::eComputeShader | + PipelineStageFlagBits2::eAllTransfer | PipelineStageFlagBits2::eBottomOfPipe | PipelineStageFlagBits2::eHost | PipelineStageFlagBits2::eAllGraphics | + PipelineStageFlagBits2::eAllCommands | PipelineStageFlagBits2::eCopy | PipelineStageFlagBits2::eResolve | PipelineStageFlagBits2::eBlit | + PipelineStageFlagBits2::eClear | PipelineStageFlagBits2::eIndexInput | PipelineStageFlagBits2::eVertexAttributeInput | + PipelineStageFlagBits2::ePreRasterizationShaders | PipelineStageFlagBits2::eVideoDecodeKHR | PipelineStageFlagBits2::eVideoEncodeKHR | + PipelineStageFlagBits2::eTransformFeedbackEXT | PipelineStageFlagBits2::eConditionalRenderingEXT | PipelineStageFlagBits2::eCommandPreprocessNV | + PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR | PipelineStageFlagBits2::eAccelerationStructureBuildKHR | + PipelineStageFlagBits2::eRayTracingShaderKHR | PipelineStageFlagBits2::eFragmentDensityProcessEXT | PipelineStageFlagBits2::eTaskShaderEXT | + PipelineStageFlagBits2::eMeshShaderEXT | PipelineStageFlagBits2::eSubpassShaderHUAWEI | PipelineStageFlagBits2::eInvocationMaskHUAWEI | + PipelineStageFlagBits2::eAccelerationStructureCopyKHR | PipelineStageFlagBits2::eMicromapBuildEXT | PipelineStageFlagBits2::eClusterCullingShaderHUAWEI | + PipelineStageFlagBits2::eOpticalFlowNV | PipelineStageFlagBits2::eConvertCooperativeVectorMatrixNV; + }; + enum class AccessFlagBits2 : VkAccessFlags2 { - eNone = VK_ACCESS_2_NONE, - eIndirectCommandRead = VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT, - eIndexRead = VK_ACCESS_2_INDEX_READ_BIT, - eVertexAttributeRead = VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT, - eUniformRead = VK_ACCESS_2_UNIFORM_READ_BIT, - eInputAttachmentRead = VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT, - eShaderRead = VK_ACCESS_2_SHADER_READ_BIT, - eShaderWrite = VK_ACCESS_2_SHADER_WRITE_BIT, - eColorAttachmentRead = VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT, - eColorAttachmentWrite = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT, - eDepthStencilAttachmentRead = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT, - eDepthStencilAttachmentWrite = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, - eTransferRead = VK_ACCESS_2_TRANSFER_READ_BIT, - eTransferWrite = VK_ACCESS_2_TRANSFER_WRITE_BIT, - eHostRead = VK_ACCESS_2_HOST_READ_BIT, - eHostWrite = VK_ACCESS_2_HOST_WRITE_BIT, - eMemoryRead = VK_ACCESS_2_MEMORY_READ_BIT, - eMemoryWrite = VK_ACCESS_2_MEMORY_WRITE_BIT, - eShaderSampledRead = VK_ACCESS_2_SHADER_SAMPLED_READ_BIT, - eShaderStorageRead = VK_ACCESS_2_SHADER_STORAGE_READ_BIT, - eShaderStorageWrite = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoDecodeReadKHR = VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR, - eVideoDecodeWriteKHR = VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR, - eVideoEncodeReadKHR = VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR, - eVideoEncodeWriteKHR = VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eNone = VK_ACCESS_2_NONE, + eIndirectCommandRead = VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT, + eIndexRead = VK_ACCESS_2_INDEX_READ_BIT, + eVertexAttributeRead = VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT, + eUniformRead = VK_ACCESS_2_UNIFORM_READ_BIT, + eInputAttachmentRead = VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT, + eShaderRead = VK_ACCESS_2_SHADER_READ_BIT, + eShaderWrite = VK_ACCESS_2_SHADER_WRITE_BIT, + eColorAttachmentRead = VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT, + eColorAttachmentWrite = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT, + eDepthStencilAttachmentRead = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT, + eDepthStencilAttachmentWrite = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + eTransferRead = VK_ACCESS_2_TRANSFER_READ_BIT, + eTransferWrite = VK_ACCESS_2_TRANSFER_WRITE_BIT, + eHostRead = VK_ACCESS_2_HOST_READ_BIT, + eHostWrite = VK_ACCESS_2_HOST_WRITE_BIT, + eMemoryRead = VK_ACCESS_2_MEMORY_READ_BIT, + eMemoryWrite = VK_ACCESS_2_MEMORY_WRITE_BIT, + eShaderSampledRead = VK_ACCESS_2_SHADER_SAMPLED_READ_BIT, + eShaderStorageRead = VK_ACCESS_2_SHADER_STORAGE_READ_BIT, + eShaderStorageWrite = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT, + eVideoDecodeReadKHR = VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR, + eVideoDecodeWriteKHR = VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR, + eVideoEncodeReadKHR = VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR, + eVideoEncodeWriteKHR = VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR, eTransformFeedbackWriteEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, eTransformFeedbackCounterReadEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, eTransformFeedbackCounterWriteEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, eConditionalRenderingReadEXT = VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT, eCommandPreprocessReadNV = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV, + eCommandPreprocessReadEXT = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_EXT, eCommandPreprocessWriteNV = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV, + eCommandPreprocessWriteEXT = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_EXT, eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, + eShadingRateImageReadNV = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV, eAccelerationStructureReadKHR = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR, + eAccelerationStructureReadNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV, eAccelerationStructureWriteKHR = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + eAccelerationStructureWriteNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV, eFragmentDensityMapReadEXT = VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, eColorAttachmentReadNoncoherentEXT = VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, + eDescriptorBufferReadEXT = VK_ACCESS_2_DESCRIPTOR_BUFFER_READ_BIT_EXT, eInvocationMaskReadHUAWEI = VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI, eShaderBindingTableReadKHR = VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR, - eAccelerationStructureReadNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV, - eAccelerationStructureWriteNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV, - eShadingRateImageReadNV = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV + eMicromapReadEXT = VK_ACCESS_2_MICROMAP_READ_BIT_EXT, + eMicromapWriteEXT = VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT, + eOpticalFlowReadNV = VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV, + eOpticalFlowWriteNV = VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV }; using AccessFlagBits2KHR = AccessFlagBits2; + using AccessFlags2 = Flags; + using AccessFlags2KHR = AccessFlags2; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AccessFlags2 allFlags = + AccessFlagBits2::eNone | AccessFlagBits2::eIndirectCommandRead | AccessFlagBits2::eIndexRead | AccessFlagBits2::eVertexAttributeRead | + AccessFlagBits2::eUniformRead | AccessFlagBits2::eInputAttachmentRead | AccessFlagBits2::eShaderRead | AccessFlagBits2::eShaderWrite | + AccessFlagBits2::eColorAttachmentRead | AccessFlagBits2::eColorAttachmentWrite | AccessFlagBits2::eDepthStencilAttachmentRead | + AccessFlagBits2::eDepthStencilAttachmentWrite | AccessFlagBits2::eTransferRead | AccessFlagBits2::eTransferWrite | AccessFlagBits2::eHostRead | + AccessFlagBits2::eHostWrite | AccessFlagBits2::eMemoryRead | AccessFlagBits2::eMemoryWrite | AccessFlagBits2::eShaderSampledRead | + AccessFlagBits2::eShaderStorageRead | AccessFlagBits2::eShaderStorageWrite | AccessFlagBits2::eVideoDecodeReadKHR | + AccessFlagBits2::eVideoDecodeWriteKHR | AccessFlagBits2::eVideoEncodeReadKHR | AccessFlagBits2::eVideoEncodeWriteKHR | + AccessFlagBits2::eTransformFeedbackWriteEXT | AccessFlagBits2::eTransformFeedbackCounterReadEXT | AccessFlagBits2::eTransformFeedbackCounterWriteEXT | + AccessFlagBits2::eConditionalRenderingReadEXT | AccessFlagBits2::eCommandPreprocessReadNV | AccessFlagBits2::eCommandPreprocessWriteNV | + AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR | AccessFlagBits2::eAccelerationStructureReadKHR | + AccessFlagBits2::eAccelerationStructureWriteKHR | AccessFlagBits2::eFragmentDensityMapReadEXT | AccessFlagBits2::eColorAttachmentReadNoncoherentEXT | + AccessFlagBits2::eDescriptorBufferReadEXT | AccessFlagBits2::eInvocationMaskReadHUAWEI | AccessFlagBits2::eShaderBindingTableReadKHR | + AccessFlagBits2::eMicromapReadEXT | AccessFlagBits2::eMicromapWriteEXT | AccessFlagBits2::eOpticalFlowReadNV | AccessFlagBits2::eOpticalFlowWriteNV; + }; + enum class SubmitFlagBits : VkSubmitFlags { eProtected = VK_SUBMIT_PROTECTED_BIT }; using SubmitFlagBitsKHR = SubmitFlagBits; + using SubmitFlags = Flags; + using SubmitFlagsKHR = SubmitFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SubmitFlags allFlags = SubmitFlagBits::eProtected; + }; + enum class RenderingFlagBits : VkRenderingFlags { eContentsSecondaryCommandBuffers = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT, eSuspending = VK_RENDERING_SUSPENDING_BIT, eResuming = VK_RENDERING_RESUMING_BIT, - eEnableLegacyDitheringEXT = VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT + eEnableLegacyDitheringEXT = VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT, + eContentsInlineKHR = VK_RENDERING_CONTENTS_INLINE_BIT_KHR, + eContentsInlineEXT = VK_RENDERING_CONTENTS_INLINE_BIT_EXT }; using RenderingFlagBitsKHR = RenderingFlagBits; + using RenderingFlags = Flags; + using RenderingFlagsKHR = RenderingFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR RenderingFlags allFlags = RenderingFlagBits::eContentsSecondaryCommandBuffers | RenderingFlagBits::eSuspending | + RenderingFlagBits::eResuming | RenderingFlagBits::eEnableLegacyDitheringEXT | + RenderingFlagBits::eContentsInlineKHR; + }; + enum class FormatFeatureFlagBits2 : VkFormatFeatureFlags2 { eSampledImage = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT, @@ -2839,7 +4562,6 @@ namespace VULKAN_HPP_NAMESPACE eBlitSrc = VK_FORMAT_FEATURE_2_BLIT_SRC_BIT, eBlitDst = VK_FORMAT_FEATURE_2_BLIT_DST_BIT, eSampledImageFilterLinear = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT, - eSampledImageFilterCubic = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT, eTransferSrc = VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT, eTransferDst = VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT, eSampledImageFilterMinmax = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT, @@ -2849,35 +4571,282 @@ namespace VULKAN_HPP_NAMESPACE eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, - eDisjoint = VK_FORMAT_FEATURE_2_DISJOINT_BIT, - eCositedChromaSamples = VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT, - eStorageReadWithoutFormat = VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT, - eStorageWriteWithoutFormat = VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT, - eSampledImageDepthComparison = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR, - eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eDisjoint = VK_FORMAT_FEATURE_2_DISJOINT_BIT, + eCositedChromaSamples = VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT, + eStorageReadWithoutFormat = VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT, + eStorageWriteWithoutFormat = VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT, + eSampledImageDepthComparison = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT, + eSampledImageFilterCubic = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT, + eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, + eHostImageTransfer = VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT, + eHostImageTransferEXT = VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT, + eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR, + eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR, eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR, eFragmentDensityMapEXT = VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT, eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eVideoEncodeInputKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR, - eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eLinearColorAttachmentNV = VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV, - eWeightImageQCOM = VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM, - eWeightSampledImageQCOM = VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM, - eBlockMatchingQCOM = VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM, - eBoxFilterSampledQCOM = VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM, - eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT + eVideoEncodeInputKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR, + eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR, + eAccelerationStructureRadiusBufferNV = VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_RADIUS_BUFFER_BIT_NV, + eLinearColorAttachmentNV = VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV, + eWeightImageQCOM = VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM, + eWeightSampledImageQCOM = VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM, + eBlockMatchingQCOM = VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM, + eBoxFilterSampledQCOM = VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM, + eOpticalFlowImageNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV, + eOpticalFlowVectorNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV, + eOpticalFlowCostNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV, + eVideoEncodeQuantizationDeltaMapKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR, + eVideoEncodeEmphasisMapKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR }; using FormatFeatureFlagBits2KHR = FormatFeatureFlagBits2; - //=== VK_KHR_surface === + using FormatFeatureFlags2 = Flags; + using FormatFeatureFlags2KHR = FormatFeatureFlags2; - enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR - { + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FormatFeatureFlags2 allFlags = + FormatFeatureFlagBits2::eSampledImage | FormatFeatureFlagBits2::eStorageImage | FormatFeatureFlagBits2::eStorageImageAtomic | + FormatFeatureFlagBits2::eUniformTexelBuffer | FormatFeatureFlagBits2::eStorageTexelBuffer | FormatFeatureFlagBits2::eStorageTexelBufferAtomic | + FormatFeatureFlagBits2::eVertexBuffer | FormatFeatureFlagBits2::eColorAttachment | FormatFeatureFlagBits2::eColorAttachmentBlend | + FormatFeatureFlagBits2::eDepthStencilAttachment | FormatFeatureFlagBits2::eBlitSrc | FormatFeatureFlagBits2::eBlitDst | + FormatFeatureFlagBits2::eSampledImageFilterLinear | FormatFeatureFlagBits2::eTransferSrc | FormatFeatureFlagBits2::eTransferDst | + FormatFeatureFlagBits2::eSampledImageFilterMinmax | FormatFeatureFlagBits2::eMidpointChromaSamples | + FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter | FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter | + FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit | + FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable | FormatFeatureFlagBits2::eDisjoint | + FormatFeatureFlagBits2::eCositedChromaSamples | FormatFeatureFlagBits2::eStorageReadWithoutFormat | FormatFeatureFlagBits2::eStorageWriteWithoutFormat | + FormatFeatureFlagBits2::eSampledImageDepthComparison | FormatFeatureFlagBits2::eSampledImageFilterCubic | FormatFeatureFlagBits2::eHostImageTransfer | + FormatFeatureFlagBits2::eVideoDecodeOutputKHR | FormatFeatureFlagBits2::eVideoDecodeDpbKHR | + FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR | FormatFeatureFlagBits2::eFragmentDensityMapEXT | + FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR | FormatFeatureFlagBits2::eVideoEncodeInputKHR | FormatFeatureFlagBits2::eVideoEncodeDpbKHR | + FormatFeatureFlagBits2::eAccelerationStructureRadiusBufferNV | FormatFeatureFlagBits2::eLinearColorAttachmentNV | + FormatFeatureFlagBits2::eWeightImageQCOM | FormatFeatureFlagBits2::eWeightSampledImageQCOM | FormatFeatureFlagBits2::eBlockMatchingQCOM | + FormatFeatureFlagBits2::eBoxFilterSampledQCOM | FormatFeatureFlagBits2::eOpticalFlowImageNV | FormatFeatureFlagBits2::eOpticalFlowVectorNV | + FormatFeatureFlagBits2::eOpticalFlowCostNV | FormatFeatureFlagBits2::eVideoEncodeQuantizationDeltaMapKHR | + FormatFeatureFlagBits2::eVideoEncodeEmphasisMapKHR; + }; + + //=== VK_VERSION_1_4 === + + enum class QueueGlobalPriority + { + eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW, + eLowKHR = VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR, + eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM, + eMediumKHR = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR, + eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH, + eHighKHR = VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR, + eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME, + eRealtimeKHR = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR + }; + using QueueGlobalPriorityEXT = QueueGlobalPriority; + using QueueGlobalPriorityKHR = QueueGlobalPriority; + + enum class LineRasterizationMode + { + eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT, + eDefaultKHR = VK_LINE_RASTERIZATION_MODE_DEFAULT_KHR, + eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR, + eRectangularKHR = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_KHR, + eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM, + eBresenhamKHR = VK_LINE_RASTERIZATION_MODE_BRESENHAM_KHR, + eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH, + eRectangularSmoothKHR = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_KHR + }; + using LineRasterizationModeEXT = LineRasterizationMode; + using LineRasterizationModeKHR = LineRasterizationMode; + + enum class MemoryUnmapFlagBits : VkMemoryUnmapFlags + { + eReserveEXT = VK_MEMORY_UNMAP_RESERVE_BIT_EXT + }; + using MemoryUnmapFlagBitsKHR = MemoryUnmapFlagBits; + + using MemoryUnmapFlags = Flags; + using MemoryUnmapFlagsKHR = MemoryUnmapFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryUnmapFlags allFlags = MemoryUnmapFlagBits::eReserveEXT; + }; + + enum class PipelineCreateFlagBits2 : VkPipelineCreateFlags2 + { + eDisableOptimization = VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT, + eAllowDerivatives = VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT, + eDerivative = VK_PIPELINE_CREATE_2_DERIVATIVE_BIT, + eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + eDispatchBase = VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT, + eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT, + eEarlyReturnOnFailure = VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT, + eNoProtectedAccess = VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT, + eNoProtectedAccessEXT = VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_EXT, + eProtectedAccessOnly = VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT, + eProtectedAccessOnlyEXT = VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eExecutionGraphAMDX = VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eRayTracingAllowSpheresAndLinearSweptSpheresNV = VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_SPHERES_AND_LINEAR_SWEPT_SPHERES_BIT_NV, + eEnableLegacyDitheringEXT = VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT, + eDeferCompileNV = VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV, + eCaptureStatisticsKHR = VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR, + eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, + eLinkTimeOptimizationEXT = VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_EXT, + eRetainLinkTimeOptimizationInfoEXT = VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT, + eLibraryKHR = VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR, + eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, + eRayTracingSkipBuiltInPrimitives = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_BUILT_IN_PRIMITIVES_BIT_KHR, + eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR, + eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, + eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, + eRayTracingShaderGroupHandleCaptureReplayKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR, + eIndirectBindableNV = VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_NV, + eRayTracingAllowMotionNV = VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_NV, + eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eRayTracingOpacityMicromapEXT = VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT, + eColorAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eDepthStencilAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eRayTracingDisplacementMicromapNV = VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV, + eDescriptorBufferEXT = VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT, + eDisallowOpacityMicromapARM = VK_PIPELINE_CREATE_2_DISALLOW_OPACITY_MICROMAP_BIT_ARM, + eCaptureDataKHR = VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR, + eIndirectBindableEXT = VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_EXT + }; + using PipelineCreateFlagBits2KHR = PipelineCreateFlagBits2; + + using PipelineCreateFlags2 = Flags; + using PipelineCreateFlags2KHR = PipelineCreateFlags2; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags2 allFlags = + PipelineCreateFlagBits2::eDisableOptimization | PipelineCreateFlagBits2::eAllowDerivatives | PipelineCreateFlagBits2::eDerivative | + PipelineCreateFlagBits2::eViewIndexFromDeviceIndex | PipelineCreateFlagBits2::eDispatchBase | PipelineCreateFlagBits2::eFailOnPipelineCompileRequired | + PipelineCreateFlagBits2::eEarlyReturnOnFailure | PipelineCreateFlagBits2::eNoProtectedAccess | PipelineCreateFlagBits2::eProtectedAccessOnly +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | PipelineCreateFlagBits2::eExecutionGraphAMDX +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | PipelineCreateFlagBits2::eRayTracingAllowSpheresAndLinearSweptSpheresNV | PipelineCreateFlagBits2::eEnableLegacyDitheringEXT | + PipelineCreateFlagBits2::eDeferCompileNV | PipelineCreateFlagBits2::eCaptureStatisticsKHR | PipelineCreateFlagBits2::eCaptureInternalRepresentationsKHR | + PipelineCreateFlagBits2::eLinkTimeOptimizationEXT | PipelineCreateFlagBits2::eRetainLinkTimeOptimizationInfoEXT | PipelineCreateFlagBits2::eLibraryKHR | + PipelineCreateFlagBits2::eRayTracingSkipTrianglesKHR | PipelineCreateFlagBits2::eRayTracingSkipAabbsKHR | + PipelineCreateFlagBits2::eRayTracingNoNullAnyHitShadersKHR | PipelineCreateFlagBits2::eRayTracingNoNullClosestHitShadersKHR | + PipelineCreateFlagBits2::eRayTracingNoNullMissShadersKHR | PipelineCreateFlagBits2::eRayTracingNoNullIntersectionShadersKHR | + PipelineCreateFlagBits2::eRayTracingShaderGroupHandleCaptureReplayKHR | PipelineCreateFlagBits2::eIndirectBindableNV | + PipelineCreateFlagBits2::eRayTracingAllowMotionNV | PipelineCreateFlagBits2::eRenderingFragmentShadingRateAttachmentKHR | + PipelineCreateFlagBits2::eRenderingFragmentDensityMapAttachmentEXT | PipelineCreateFlagBits2::eRayTracingOpacityMicromapEXT | + PipelineCreateFlagBits2::eColorAttachmentFeedbackLoopEXT | PipelineCreateFlagBits2::eDepthStencilAttachmentFeedbackLoopEXT | + PipelineCreateFlagBits2::eRayTracingDisplacementMicromapNV | PipelineCreateFlagBits2::eDescriptorBufferEXT | + PipelineCreateFlagBits2::eDisallowOpacityMicromapARM | PipelineCreateFlagBits2::eCaptureDataKHR | PipelineCreateFlagBits2::eIndirectBindableEXT; + }; + + enum class BufferUsageFlagBits2 : VkBufferUsageFlags2 + { + eTransferSrc = VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT, + eTransferDst = VK_BUFFER_USAGE_2_TRANSFER_DST_BIT, + eUniformTexelBuffer = VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT, + eUniformBuffer = VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT, + eStorageBuffer = VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT, + eIndexBuffer = VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT, + eVertexBuffer = VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT, + eIndirectBuffer = VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT, + eShaderDeviceAddress = VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eExecutionGraphScratchAMDX = VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eConditionalRenderingEXT = VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT, + eShaderBindingTableKHR = VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR, + eRayTracingNV = VK_BUFFER_USAGE_2_RAY_TRACING_BIT_NV, + eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, + eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, + eVideoDecodeSrcKHR = VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDstKHR = VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR, + eVideoEncodeDstKHR = VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrcKHR = VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR, + eAccelerationStructureBuildInputReadOnlyKHR = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR, + eAccelerationStructureStorageKHR = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR, + eSamplerDescriptorBufferEXT = VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT, + eResourceDescriptorBufferEXT = VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT, + ePushDescriptorsDescriptorBufferEXT = VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT, + eMicromapBuildInputReadOnlyEXT = VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT, + eMicromapStorageEXT = VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT, + ePreprocessBufferEXT = VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT + }; + using BufferUsageFlagBits2KHR = BufferUsageFlagBits2; + + using BufferUsageFlags2 = Flags; + using BufferUsageFlags2KHR = BufferUsageFlags2; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BufferUsageFlags2 allFlags = + BufferUsageFlagBits2::eTransferSrc | BufferUsageFlagBits2::eTransferDst | BufferUsageFlagBits2::eUniformTexelBuffer | + BufferUsageFlagBits2::eStorageTexelBuffer | BufferUsageFlagBits2::eUniformBuffer | BufferUsageFlagBits2::eStorageBuffer | + BufferUsageFlagBits2::eIndexBuffer | BufferUsageFlagBits2::eVertexBuffer | BufferUsageFlagBits2::eIndirectBuffer | + BufferUsageFlagBits2::eShaderDeviceAddress +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | BufferUsageFlagBits2::eExecutionGraphScratchAMDX +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | BufferUsageFlagBits2::eConditionalRenderingEXT | BufferUsageFlagBits2::eShaderBindingTableKHR | BufferUsageFlagBits2::eTransformFeedbackBufferEXT | + BufferUsageFlagBits2::eTransformFeedbackCounterBufferEXT | BufferUsageFlagBits2::eVideoDecodeSrcKHR | BufferUsageFlagBits2::eVideoDecodeDstKHR | + BufferUsageFlagBits2::eVideoEncodeDstKHR | BufferUsageFlagBits2::eVideoEncodeSrcKHR | BufferUsageFlagBits2::eAccelerationStructureBuildInputReadOnlyKHR | + BufferUsageFlagBits2::eAccelerationStructureStorageKHR | BufferUsageFlagBits2::eSamplerDescriptorBufferEXT | + BufferUsageFlagBits2::eResourceDescriptorBufferEXT | BufferUsageFlagBits2::ePushDescriptorsDescriptorBufferEXT | + BufferUsageFlagBits2::eMicromapBuildInputReadOnlyEXT | BufferUsageFlagBits2::eMicromapStorageEXT | BufferUsageFlagBits2::ePreprocessBufferEXT; + }; + + enum class PipelineRobustnessBufferBehavior + { + eDeviceDefault = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT, + eDisabled = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED, + eRobustBufferAccess = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS, + eRobustBufferAccess2 = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2 + }; + using PipelineRobustnessBufferBehaviorEXT = PipelineRobustnessBufferBehavior; + + enum class PipelineRobustnessImageBehavior + { + eDeviceDefault = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT, + eDisabled = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED, + eRobustImageAccess = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS, + eRobustImageAccess2 = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2 + }; + using PipelineRobustnessImageBehaviorEXT = PipelineRobustnessImageBehavior; + + enum class HostImageCopyFlagBits : VkHostImageCopyFlags + { + eMemcpy = VK_HOST_IMAGE_COPY_MEMCPY + }; + using HostImageCopyFlagBitsEXT = HostImageCopyFlagBits; + + using HostImageCopyFlags = Flags; + using HostImageCopyFlagsEXT = HostImageCopyFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR HostImageCopyFlags allFlags = HostImageCopyFlagBits::eMemcpy; + }; + + //=== VK_KHR_surface === + + enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR + { eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR, eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR, eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR, @@ -2889,6 +4858,18 @@ namespace VULKAN_HPP_NAMESPACE eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR }; + using SurfaceTransformFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SurfaceTransformFlagsKHR allFlags = + SurfaceTransformFlagBitsKHR::eIdentity | SurfaceTransformFlagBitsKHR::eRotate90 | SurfaceTransformFlagBitsKHR::eRotate180 | + SurfaceTransformFlagBitsKHR::eRotate270 | SurfaceTransformFlagBitsKHR::eHorizontalMirror | SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 | + SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 | SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 | SurfaceTransformFlagBitsKHR::eInherit; + }; + enum class PresentModeKHR { eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR, @@ -2896,29 +4877,30 @@ namespace VULKAN_HPP_NAMESPACE eFifo = VK_PRESENT_MODE_FIFO_KHR, eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR, eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR, - eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR + eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR, + eFifoLatestReadyEXT = VK_PRESENT_MODE_FIFO_LATEST_READY_EXT }; enum class ColorSpaceKHR { eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + eVkColorspaceSrgbNonlinear = VK_COLORSPACE_SRGB_NONLINEAR_KHR, eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT, eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, + eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT, eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT, eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT, eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT, eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT, eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT, - eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT, - eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT, - eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, - eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, - ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, - eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, - eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD, - eVkColorspaceSrgbNonlinear = VK_COLORSPACE_SRGB_NONLINEAR_KHR, - eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT + eDolbyvisionEXT VULKAN_HPP_DEPRECATED_17( "eDolbyvisionEXT is deprecated, but no reason was given in the API XML" ) = VK_COLOR_SPACE_DOLBYVISION_EXT, + eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT, + eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, + eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, + ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, + eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, + eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD }; enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR @@ -2929,13 +4911,35 @@ namespace VULKAN_HPP_NAMESPACE eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR }; + using CompositeAlphaFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR CompositeAlphaFlagsKHR allFlags = CompositeAlphaFlagBitsKHR::eOpaque | CompositeAlphaFlagBitsKHR::ePreMultiplied | + CompositeAlphaFlagBitsKHR::ePostMultiplied | CompositeAlphaFlagBitsKHR::eInherit; + }; + //=== VK_KHR_swapchain === enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR { - eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, - eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR, - eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR + eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, + eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR, + eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR, + eDeferredMemoryAllocationEXT = VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT + }; + + using SwapchainCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SwapchainCreateFlagsKHR allFlags = + SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions | SwapchainCreateFlagBitsKHR::eProtected | SwapchainCreateFlagBitsKHR::eMutableFormat | + SwapchainCreateFlagBitsKHR::eDeferredMemoryAllocationEXT; }; enum class DeviceGroupPresentModeFlagBitsKHR : VkDeviceGroupPresentModeFlagsKHR @@ -2946,6 +4950,17 @@ namespace VULKAN_HPP_NAMESPACE eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR }; + using DeviceGroupPresentModeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceGroupPresentModeFlagsKHR allFlags = + DeviceGroupPresentModeFlagBitsKHR::eLocal | DeviceGroupPresentModeFlagBitsKHR::eRemote | DeviceGroupPresentModeFlagBitsKHR::eSum | + DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice; + }; + //=== VK_KHR_display === enum class DisplayPlaneAlphaFlagBitsKHR : VkDisplayPlaneAlphaFlagsKHR @@ -2956,20 +4971,58 @@ namespace VULKAN_HPP_NAMESPACE ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR }; + using DisplayPlaneAlphaFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DisplayPlaneAlphaFlagsKHR allFlags = DisplayPlaneAlphaFlagBitsKHR::eOpaque | DisplayPlaneAlphaFlagBitsKHR::eGlobal | + DisplayPlaneAlphaFlagBitsKHR::ePerPixel | + DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied; + }; + enum class DisplayModeCreateFlagBitsKHR : VkDisplayModeCreateFlagsKHR { }; + using DisplayModeCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DisplayModeCreateFlagsKHR allFlags = {}; + }; + enum class DisplaySurfaceCreateFlagBitsKHR : VkDisplaySurfaceCreateFlagsKHR { }; + using DisplaySurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DisplaySurfaceCreateFlagsKHR allFlags = {}; + }; + #if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === enum class XlibSurfaceCreateFlagBitsKHR : VkXlibSurfaceCreateFlagsKHR { }; + + using XlibSurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR XlibSurfaceCreateFlagsKHR allFlags = {}; + }; #endif /*VK_USE_PLATFORM_XLIB_KHR*/ #if defined( VK_USE_PLATFORM_XCB_KHR ) @@ -2978,6 +5031,15 @@ namespace VULKAN_HPP_NAMESPACE enum class XcbSurfaceCreateFlagBitsKHR : VkXcbSurfaceCreateFlagsKHR { }; + + using XcbSurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR XcbSurfaceCreateFlagsKHR allFlags = {}; + }; #endif /*VK_USE_PLATFORM_XCB_KHR*/ #if defined( VK_USE_PLATFORM_WAYLAND_KHR ) @@ -2986,6 +5048,15 @@ namespace VULKAN_HPP_NAMESPACE enum class WaylandSurfaceCreateFlagBitsKHR : VkWaylandSurfaceCreateFlagsKHR { }; + + using WaylandSurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR WaylandSurfaceCreateFlagsKHR allFlags = {}; + }; #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #if defined( VK_USE_PLATFORM_ANDROID_KHR ) @@ -2994,6 +5065,15 @@ namespace VULKAN_HPP_NAMESPACE enum class AndroidSurfaceCreateFlagBitsKHR : VkAndroidSurfaceCreateFlagsKHR { }; + + using AndroidSurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AndroidSurfaceCreateFlagsKHR allFlags = {}; + }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_USE_PLATFORM_WIN32_KHR ) @@ -3002,6 +5082,15 @@ namespace VULKAN_HPP_NAMESPACE enum class Win32SurfaceCreateFlagBitsKHR : VkWin32SurfaceCreateFlagsKHR { }; + + using Win32SurfaceCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR Win32SurfaceCreateFlagsKHR allFlags = {}; + }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_debug_report === @@ -3015,53 +5104,68 @@ namespace VULKAN_HPP_NAMESPACE eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT }; + using DebugReportFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DebugReportFlagsEXT allFlags = DebugReportFlagBitsEXT::eInformation | DebugReportFlagBitsEXT::eWarning | + DebugReportFlagBitsEXT::ePerformanceWarning | DebugReportFlagBitsEXT::eError | + DebugReportFlagBitsEXT::eDebug; + }; + enum class DebugReportObjectTypeEXT { - eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, - eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, - ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, - eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, - eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, - eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, - eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, - eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, - eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, - eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, - eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, - eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, - eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, - eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, - eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, - eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, - ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, - ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, - eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, - ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, - eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, - eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, - eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, - eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, - eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, - eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, - eSurfaceKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, - eSwapchainKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, - eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, - eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, - eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, - eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, - eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, - eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, - eCuModuleNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT, - eCuFunctionNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT, - eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT, - eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, -#if defined( VK_USE_PLATFORM_FUCHSIA ) - eBufferCollectionFUCHSIA = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT, -#endif /*VK_USE_PLATFORM_FUCHSIA*/ + eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, + eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, + ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, + eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, + eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, + eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, + eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, + eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, + eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, + eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, + eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, + eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, + eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, + eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, + eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, + eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, + ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, + ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, + eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, + ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, + eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, + eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, + eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, + eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, + eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, + eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, + eSurfaceKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, + eSwapchainKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, + eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, - eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT, + eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, + eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, + eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT, + eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT, - eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT + eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT, + eCuModuleNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT, + eCuFunctionNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT, + eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT, + eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eCudaModuleNV = VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV_EXT, + eCudaFunctionNV = VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV_EXT, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#if defined( VK_USE_PLATFORM_FUCHSIA ) + eBufferCollectionFUCHSIA = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT +#endif /*VK_USE_PLATFORM_FUCHSIA*/ }; //=== VK_AMD_rasterization_order === @@ -3072,18 +5176,29 @@ namespace VULKAN_HPP_NAMESPACE eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD }; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_queue === enum class VideoCodecOperationFlagBitsKHR : VkVideoCodecOperationFlagsKHR { - eNone = VK_VIDEO_CODEC_OPERATION_NONE_KHR, -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - eEncodeH264EXT = VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT, - eEncodeH265EXT = VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_EXT, - eDecodeH264EXT = VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_EXT, - eDecodeH265EXT = VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_EXT -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eNone = VK_VIDEO_CODEC_OPERATION_NONE_KHR, + eEncodeH264 = VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR, + eEncodeH265 = VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR, + eDecodeH264 = VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR, + eDecodeH265 = VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR, + eDecodeAv1 = VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR, + eEncodeAv1 = VK_VIDEO_CODEC_OPERATION_ENCODE_AV1_BIT_KHR + }; + + using VideoCodecOperationFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoCodecOperationFlagsKHR allFlags = + VideoCodecOperationFlagBitsKHR::eNone | VideoCodecOperationFlagBitsKHR::eEncodeH264 | VideoCodecOperationFlagBitsKHR::eEncodeH265 | + VideoCodecOperationFlagBitsKHR::eDecodeH264 | VideoCodecOperationFlagBitsKHR::eDecodeH265 | VideoCodecOperationFlagBitsKHR::eDecodeAv1 | + VideoCodecOperationFlagBitsKHR::eEncodeAv1; }; enum class VideoChromaSubsamplingFlagBitsKHR : VkVideoChromaSubsamplingFlagsKHR @@ -3095,6 +5210,17 @@ namespace VULKAN_HPP_NAMESPACE e444 = VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR }; + using VideoChromaSubsamplingFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoChromaSubsamplingFlagsKHR allFlags = + VideoChromaSubsamplingFlagBitsKHR::eInvalid | VideoChromaSubsamplingFlagBitsKHR::eMonochrome | VideoChromaSubsamplingFlagBitsKHR::e420 | + VideoChromaSubsamplingFlagBitsKHR::e422 | VideoChromaSubsamplingFlagBitsKHR::e444; + }; + enum class VideoComponentBitDepthFlagBitsKHR : VkVideoComponentBitDepthFlagsKHR { eInvalid = VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR, @@ -3103,47 +5229,120 @@ namespace VULKAN_HPP_NAMESPACE e12 = VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR }; + using VideoComponentBitDepthFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoComponentBitDepthFlagsKHR allFlags = + VideoComponentBitDepthFlagBitsKHR::eInvalid | VideoComponentBitDepthFlagBitsKHR::e8 | VideoComponentBitDepthFlagBitsKHR::e10 | + VideoComponentBitDepthFlagBitsKHR::e12; + }; + enum class VideoCapabilityFlagBitsKHR : VkVideoCapabilityFlagsKHR { eProtectedContent = VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR, eSeparateReferenceImages = VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR }; + using VideoCapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoCapabilityFlagsKHR allFlags = + VideoCapabilityFlagBitsKHR::eProtectedContent | VideoCapabilityFlagBitsKHR::eSeparateReferenceImages; + }; + enum class VideoSessionCreateFlagBitsKHR : VkVideoSessionCreateFlagsKHR { - eProtectedContent = VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR + eProtectedContent = VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR, + eAllowEncodeParameterOptimizations = VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_PARAMETER_OPTIMIZATIONS_BIT_KHR, + eInlineQueries = VK_VIDEO_SESSION_CREATE_INLINE_QUERIES_BIT_KHR, + eAllowEncodeQuantizationDeltaMap = VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR, + eAllowEncodeEmphasisMap = VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_EMPHASIS_MAP_BIT_KHR, + eInlineSessionParameters = VK_VIDEO_SESSION_CREATE_INLINE_SESSION_PARAMETERS_BIT_KHR + }; + + using VideoSessionCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoSessionCreateFlagsKHR allFlags = + VideoSessionCreateFlagBitsKHR::eProtectedContent | VideoSessionCreateFlagBitsKHR::eAllowEncodeParameterOptimizations | + VideoSessionCreateFlagBitsKHR::eInlineQueries | VideoSessionCreateFlagBitsKHR::eAllowEncodeQuantizationDeltaMap | + VideoSessionCreateFlagBitsKHR::eAllowEncodeEmphasisMap | VideoSessionCreateFlagBitsKHR::eInlineSessionParameters; }; enum class VideoCodingControlFlagBitsKHR : VkVideoCodingControlFlagsKHR { - eReset = VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR, -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - eEncodeRateControl = VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR, - eEncodeRateControlLayer = VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_LAYER_BIT_KHR -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eReset = VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR, + eEncodeRateControl = VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR, + eEncodeQualityLevel = VK_VIDEO_CODING_CONTROL_ENCODE_QUALITY_LEVEL_BIT_KHR + }; + + using VideoCodingControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoCodingControlFlagsKHR allFlags = + VideoCodingControlFlagBitsKHR::eReset | VideoCodingControlFlagBitsKHR::eEncodeRateControl | VideoCodingControlFlagBitsKHR::eEncodeQualityLevel; }; enum class QueryResultStatusKHR { - eError = VK_QUERY_RESULT_STATUS_ERROR_KHR, - eNotReady = VK_QUERY_RESULT_STATUS_NOT_READY_KHR, - eComplete = VK_QUERY_RESULT_STATUS_COMPLETE_KHR + eError = VK_QUERY_RESULT_STATUS_ERROR_KHR, + eNotReady = VK_QUERY_RESULT_STATUS_NOT_READY_KHR, + eComplete = VK_QUERY_RESULT_STATUS_COMPLETE_KHR, + eInsufficientBitstreamBufferRange = VK_QUERY_RESULT_STATUS_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_KHR }; enum class VideoSessionParametersCreateFlagBitsKHR : VkVideoSessionParametersCreateFlagsKHR { + eQuantizationMapCompatible = VK_VIDEO_SESSION_PARAMETERS_CREATE_QUANTIZATION_MAP_COMPATIBLE_BIT_KHR + }; + + using VideoSessionParametersCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoSessionParametersCreateFlagsKHR allFlags = VideoSessionParametersCreateFlagBitsKHR::eQuantizationMapCompatible; }; enum class VideoBeginCodingFlagBitsKHR : VkVideoBeginCodingFlagsKHR { }; + using VideoBeginCodingFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoBeginCodingFlagsKHR allFlags = {}; + }; + enum class VideoEndCodingFlagBitsKHR : VkVideoEndCodingFlagsKHR { }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) + using VideoEndCodingFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEndCodingFlagsKHR allFlags = {}; + }; + //=== VK_KHR_video_decode_queue === enum class VideoDecodeCapabilityFlagBitsKHR : VkVideoDecodeCapabilityFlagsKHR @@ -3152,6 +5351,16 @@ namespace VULKAN_HPP_NAMESPACE eDpbAndOutputDistinct = VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR }; + using VideoDecodeCapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeCapabilityFlagsKHR allFlags = + VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputCoincide | VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputDistinct; + }; + enum class VideoDecodeUsageFlagBitsKHR : VkVideoDecodeUsageFlagsKHR { eDefault = VK_VIDEO_DECODE_USAGE_DEFAULT_KHR, @@ -3160,10 +5369,28 @@ namespace VULKAN_HPP_NAMESPACE eStreaming = VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR }; + using VideoDecodeUsageFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeUsageFlagsKHR allFlags = VideoDecodeUsageFlagBitsKHR::eDefault | VideoDecodeUsageFlagBitsKHR::eTranscoding | + VideoDecodeUsageFlagBitsKHR::eOffline | VideoDecodeUsageFlagBitsKHR::eStreaming; + }; + enum class VideoDecodeFlagBitsKHR : VkVideoDecodeFlagsKHR { }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + using VideoDecodeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeFlagsKHR allFlags = {}; + }; //=== VK_EXT_transform_feedback === @@ -3171,168 +5398,310 @@ namespace VULKAN_HPP_NAMESPACE { }; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_encode_h264 === + using PipelineRasterizationStateStreamCreateFlagsEXT = Flags; - enum class VideoEncodeH264CapabilityFlagBitsEXT : VkVideoEncodeH264CapabilityFlagsEXT + template <> + struct FlagTraits { - eDirect8X8InferenceEnabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_ENABLED_BIT_EXT, - eDirect8X8InferenceDisabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_DISABLED_BIT_EXT, - eSeparateColourPlane = VK_VIDEO_ENCODE_H264_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT, - eQpprimeYZeroTransformBypass = VK_VIDEO_ENCODE_H264_CAPABILITY_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_BIT_EXT, - eScalingLists = VK_VIDEO_ENCODE_H264_CAPABILITY_SCALING_LISTS_BIT_EXT, - eHrdCompliance = VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_EXT, - eChromaQpOffset = VK_VIDEO_ENCODE_H264_CAPABILITY_CHROMA_QP_OFFSET_BIT_EXT, - eSecondChromaQpOffset = VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT, - ePicInitQpMinus26 = VK_VIDEO_ENCODE_H264_CAPABILITY_PIC_INIT_QP_MINUS26_BIT_EXT, - eWeightedPred = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_BIT_EXT, - eWeightedBipredExplicit = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_EXPLICIT_BIT_EXT, - eWeightedBipredImplicit = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_IMPLICIT_BIT_EXT, - eWeightedPredNoTable = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT, - eTransform8X8 = VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT, - eCabac = VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT, - eCavlc = VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT, - eDeblockingFilterDisabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_DISABLED_BIT_EXT, - eDeblockingFilterEnabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT, - eDeblockingFilterPartial = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT, - eDisableDirectSpatialMvPred = VK_VIDEO_ENCODE_H264_CAPABILITY_DISABLE_DIRECT_SPATIAL_MV_PRED_BIT_EXT, - eMultipleSlicePerFrame = VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT, - eSliceMbCount = VK_VIDEO_ENCODE_H264_CAPABILITY_SLICE_MB_COUNT_BIT_EXT, - eRowUnalignedSlice = VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_EXT, - eDifferentSliceType = VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT, - eBFrameInL1List = VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationStateStreamCreateFlagsEXT allFlags = {}; }; - enum class VideoEncodeH264InputModeFlagBitsEXT : VkVideoEncodeH264InputModeFlagsEXT + //=== VK_KHR_video_encode_h264 === + + enum class VideoEncodeH264CapabilityFlagBitsKHR : VkVideoEncodeH264CapabilityFlagsKHR { - eFrame = VK_VIDEO_ENCODE_H264_INPUT_MODE_FRAME_BIT_EXT, - eSlice = VK_VIDEO_ENCODE_H264_INPUT_MODE_SLICE_BIT_EXT, - eNonVcl = VK_VIDEO_ENCODE_H264_INPUT_MODE_NON_VCL_BIT_EXT + eHrdCompliance = VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_KHR, + ePredictionWeightTableGenerated = VK_VIDEO_ENCODE_H264_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR, + eRowUnalignedSlice = VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_KHR, + eDifferentSliceType = VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_KHR, + eBFrameInL0List = VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR, + eBFrameInL1List = VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR, + ePerPictureTypeMinMaxQp = VK_VIDEO_ENCODE_H264_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR, + ePerSliceConstantQp = VK_VIDEO_ENCODE_H264_CAPABILITY_PER_SLICE_CONSTANT_QP_BIT_KHR, + eGeneratePrefixNalu = VK_VIDEO_ENCODE_H264_CAPABILITY_GENERATE_PREFIX_NALU_BIT_KHR, + eMbQpDiffWraparound = VK_VIDEO_ENCODE_H264_CAPABILITY_MB_QP_DIFF_WRAPAROUND_BIT_KHR }; - enum class VideoEncodeH264OutputModeFlagBitsEXT : VkVideoEncodeH264OutputModeFlagsEXT + using VideoEncodeH264CapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH264CapabilityFlagsKHR allFlags = + VideoEncodeH264CapabilityFlagBitsKHR::eHrdCompliance | VideoEncodeH264CapabilityFlagBitsKHR::ePredictionWeightTableGenerated | + VideoEncodeH264CapabilityFlagBitsKHR::eRowUnalignedSlice | VideoEncodeH264CapabilityFlagBitsKHR::eDifferentSliceType | + VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL0List | VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL1List | + VideoEncodeH264CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp | VideoEncodeH264CapabilityFlagBitsKHR::ePerSliceConstantQp | + VideoEncodeH264CapabilityFlagBitsKHR::eGeneratePrefixNalu | VideoEncodeH264CapabilityFlagBitsKHR::eMbQpDiffWraparound; + }; + + enum class VideoEncodeH264StdFlagBitsKHR : VkVideoEncodeH264StdFlagsKHR + { + eSeparateColorPlaneFlagSet = VK_VIDEO_ENCODE_H264_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR, + eQpprimeYZeroTransformBypassFlagSet = VK_VIDEO_ENCODE_H264_STD_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_FLAG_SET_BIT_KHR, + eScalingMatrixPresentFlagSet = VK_VIDEO_ENCODE_H264_STD_SCALING_MATRIX_PRESENT_FLAG_SET_BIT_KHR, + eChromaQpIndexOffset = VK_VIDEO_ENCODE_H264_STD_CHROMA_QP_INDEX_OFFSET_BIT_KHR, + eSecondChromaQpIndexOffset = VK_VIDEO_ENCODE_H264_STD_SECOND_CHROMA_QP_INDEX_OFFSET_BIT_KHR, + ePicInitQpMinus26 = VK_VIDEO_ENCODE_H264_STD_PIC_INIT_QP_MINUS26_BIT_KHR, + eWeightedPredFlagSet = VK_VIDEO_ENCODE_H264_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR, + eWeightedBipredIdcExplicit = VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_EXPLICIT_BIT_KHR, + eWeightedBipredIdcImplicit = VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_IMPLICIT_BIT_KHR, + eTransform8X8ModeFlagSet = VK_VIDEO_ENCODE_H264_STD_TRANSFORM_8X8_MODE_FLAG_SET_BIT_KHR, + eDirectSpatialMvPredFlagUnset = VK_VIDEO_ENCODE_H264_STD_DIRECT_SPATIAL_MV_PRED_FLAG_UNSET_BIT_KHR, + eEntropyCodingModeFlagUnset = VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_UNSET_BIT_KHR, + eEntropyCodingModeFlagSet = VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_SET_BIT_KHR, + eDirect8X8InferenceFlagUnset = VK_VIDEO_ENCODE_H264_STD_DIRECT_8X8_INFERENCE_FLAG_UNSET_BIT_KHR, + eConstrainedIntraPredFlagSet = VK_VIDEO_ENCODE_H264_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR, + eDeblockingFilterDisabled = VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_DISABLED_BIT_KHR, + eDeblockingFilterEnabled = VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_ENABLED_BIT_KHR, + eDeblockingFilterPartial = VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_PARTIAL_BIT_KHR, + eSliceQpDelta = VK_VIDEO_ENCODE_H264_STD_SLICE_QP_DELTA_BIT_KHR, + eDifferentSliceQpDelta = VK_VIDEO_ENCODE_H264_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR + }; + + using VideoEncodeH264StdFlagsKHR = Flags; + + template <> + struct FlagTraits { - eFrame = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FRAME_BIT_EXT, - eSlice = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_SLICE_BIT_EXT, - eNonVcl = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_NON_VCL_BIT_EXT + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH264StdFlagsKHR allFlags = + VideoEncodeH264StdFlagBitsKHR::eSeparateColorPlaneFlagSet | VideoEncodeH264StdFlagBitsKHR::eQpprimeYZeroTransformBypassFlagSet | + VideoEncodeH264StdFlagBitsKHR::eScalingMatrixPresentFlagSet | VideoEncodeH264StdFlagBitsKHR::eChromaQpIndexOffset | + VideoEncodeH264StdFlagBitsKHR::eSecondChromaQpIndexOffset | VideoEncodeH264StdFlagBitsKHR::ePicInitQpMinus26 | + VideoEncodeH264StdFlagBitsKHR::eWeightedPredFlagSet | VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcExplicit | + VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcImplicit | VideoEncodeH264StdFlagBitsKHR::eTransform8X8ModeFlagSet | + VideoEncodeH264StdFlagBitsKHR::eDirectSpatialMvPredFlagUnset | VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagUnset | + VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagSet | VideoEncodeH264StdFlagBitsKHR::eDirect8X8InferenceFlagUnset | + VideoEncodeH264StdFlagBitsKHR::eConstrainedIntraPredFlagSet | VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterDisabled | + VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterEnabled | VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterPartial | + VideoEncodeH264StdFlagBitsKHR::eSliceQpDelta | VideoEncodeH264StdFlagBitsKHR::eDifferentSliceQpDelta; }; - enum class VideoEncodeH264RateControlStructureEXT + enum class VideoEncodeH264RateControlFlagBitsKHR : VkVideoEncodeH264RateControlFlagsKHR { - eUnknown = VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT, - eFlat = VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAT_EXT, - eDyadic = VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_DYADIC_EXT + eAttemptHrdCompliance = VK_VIDEO_ENCODE_H264_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR, + eRegularGop = VK_VIDEO_ENCODE_H264_RATE_CONTROL_REGULAR_GOP_BIT_KHR, + eReferencePatternFlat = VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR, + eReferencePatternDyadic = VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR, + eTemporalLayerPatternDyadic = VK_VIDEO_ENCODE_H264_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_encode_h265 === + using VideoEncodeH264RateControlFlagsKHR = Flags; - enum class VideoEncodeH265CapabilityFlagBitsEXT : VkVideoEncodeH265CapabilityFlagsEXT + template <> + struct FlagTraits { - eSeparateColourPlane = VK_VIDEO_ENCODE_H265_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT, - eScalingLists = VK_VIDEO_ENCODE_H265_CAPABILITY_SCALING_LISTS_BIT_EXT, - eSampleAdaptiveOffsetEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_SAMPLE_ADAPTIVE_OFFSET_ENABLED_BIT_EXT, - ePcmEnable = VK_VIDEO_ENCODE_H265_CAPABILITY_PCM_ENABLE_BIT_EXT, - eSpsTemporalMvpEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_SPS_TEMPORAL_MVP_ENABLED_BIT_EXT, - eHrdCompliance = VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_EXT, - eInitQpMinus26 = VK_VIDEO_ENCODE_H265_CAPABILITY_INIT_QP_MINUS26_BIT_EXT, - eLog2ParallelMergeLevelMinus2 = VK_VIDEO_ENCODE_H265_CAPABILITY_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_EXT, - eSignDataHidingEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_SIGN_DATA_HIDING_ENABLED_BIT_EXT, - eTransformSkipEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_ENABLED_BIT_EXT, - eTransformSkipDisabled = VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_DISABLED_BIT_EXT, - ePpsSliceChromaQpOffsetsPresent = VK_VIDEO_ENCODE_H265_CAPABILITY_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_BIT_EXT, - eWeightedPred = VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_BIT_EXT, - eWeightedBipred = VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_BIPRED_BIT_EXT, - eWeightedPredNoTable = VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT, - eTransquantBypassEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSQUANT_BYPASS_ENABLED_BIT_EXT, - eEntropyCodingSyncEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_ENTROPY_CODING_SYNC_ENABLED_BIT_EXT, - eDeblockingFilterOverrideEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_DEBLOCKING_FILTER_OVERRIDE_ENABLED_BIT_EXT, - eMultipleTilePerFrame = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_FRAME_BIT_EXT, - eMultipleSlicePerTile = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_PER_TILE_BIT_EXT, - eMultipleTilePerSlice = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_SLICE_BIT_EXT, - eSliceSegmentCtbCount = VK_VIDEO_ENCODE_H265_CAPABILITY_SLICE_SEGMENT_CTB_COUNT_BIT_EXT, - eRowUnalignedSliceSegment = VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_EXT, - eDependentSliceSegment = VK_VIDEO_ENCODE_H265_CAPABILITY_DEPENDENT_SLICE_SEGMENT_BIT_EXT, - eDifferentSliceType = VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT, - eBFrameInL1List = VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH264RateControlFlagsKHR allFlags = + VideoEncodeH264RateControlFlagBitsKHR::eAttemptHrdCompliance | VideoEncodeH264RateControlFlagBitsKHR::eRegularGop | + VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternFlat | VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternDyadic | + VideoEncodeH264RateControlFlagBitsKHR::eTemporalLayerPatternDyadic; }; - enum class VideoEncodeH265InputModeFlagBitsEXT : VkVideoEncodeH265InputModeFlagsEXT + //=== VK_KHR_video_encode_h265 === + + enum class VideoEncodeH265CapabilityFlagBitsKHR : VkVideoEncodeH265CapabilityFlagsKHR { - eFrame = VK_VIDEO_ENCODE_H265_INPUT_MODE_FRAME_BIT_EXT, - eSliceSegment = VK_VIDEO_ENCODE_H265_INPUT_MODE_SLICE_SEGMENT_BIT_EXT, - eNonVcl = VK_VIDEO_ENCODE_H265_INPUT_MODE_NON_VCL_BIT_EXT + eHrdCompliance = VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_KHR, + ePredictionWeightTableGenerated = VK_VIDEO_ENCODE_H265_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR, + eRowUnalignedSliceSegment = VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_KHR, + eDifferentSliceSegmentType = VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_SEGMENT_TYPE_BIT_KHR, + eBFrameInL0List = VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR, + eBFrameInL1List = VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR, + ePerPictureTypeMinMaxQp = VK_VIDEO_ENCODE_H265_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR, + ePerSliceSegmentConstantQp = VK_VIDEO_ENCODE_H265_CAPABILITY_PER_SLICE_SEGMENT_CONSTANT_QP_BIT_KHR, + eMultipleTilesPerSliceSegment = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILES_PER_SLICE_SEGMENT_BIT_KHR, + eMultipleSliceSegmentsPerTile = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_SEGMENTS_PER_TILE_BIT_KHR, + eCuQpDiffWraparound = VK_VIDEO_ENCODE_H265_CAPABILITY_CU_QP_DIFF_WRAPAROUND_BIT_KHR }; - enum class VideoEncodeH265OutputModeFlagBitsEXT : VkVideoEncodeH265OutputModeFlagsEXT + using VideoEncodeH265CapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265CapabilityFlagsKHR allFlags = + VideoEncodeH265CapabilityFlagBitsKHR::eHrdCompliance | VideoEncodeH265CapabilityFlagBitsKHR::ePredictionWeightTableGenerated | + VideoEncodeH265CapabilityFlagBitsKHR::eRowUnalignedSliceSegment | VideoEncodeH265CapabilityFlagBitsKHR::eDifferentSliceSegmentType | + VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL0List | VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL1List | + VideoEncodeH265CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp | VideoEncodeH265CapabilityFlagBitsKHR::ePerSliceSegmentConstantQp | + VideoEncodeH265CapabilityFlagBitsKHR::eMultipleTilesPerSliceSegment | VideoEncodeH265CapabilityFlagBitsKHR::eMultipleSliceSegmentsPerTile | + VideoEncodeH265CapabilityFlagBitsKHR::eCuQpDiffWraparound; + }; + + enum class VideoEncodeH265StdFlagBitsKHR : VkVideoEncodeH265StdFlagsKHR + { + eSeparateColorPlaneFlagSet = VK_VIDEO_ENCODE_H265_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR, + eSampleAdaptiveOffsetEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_SAMPLE_ADAPTIVE_OFFSET_ENABLED_FLAG_SET_BIT_KHR, + eScalingListDataPresentFlagSet = VK_VIDEO_ENCODE_H265_STD_SCALING_LIST_DATA_PRESENT_FLAG_SET_BIT_KHR, + ePcmEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_PCM_ENABLED_FLAG_SET_BIT_KHR, + eSpsTemporalMvpEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_SPS_TEMPORAL_MVP_ENABLED_FLAG_SET_BIT_KHR, + eInitQpMinus26 = VK_VIDEO_ENCODE_H265_STD_INIT_QP_MINUS26_BIT_KHR, + eWeightedPredFlagSet = VK_VIDEO_ENCODE_H265_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR, + eWeightedBipredFlagSet = VK_VIDEO_ENCODE_H265_STD_WEIGHTED_BIPRED_FLAG_SET_BIT_KHR, + eLog2ParallelMergeLevelMinus2 = VK_VIDEO_ENCODE_H265_STD_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_KHR, + eSignDataHidingEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_SIGN_DATA_HIDING_ENABLED_FLAG_SET_BIT_KHR, + eTransformSkipEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_SET_BIT_KHR, + eTransformSkipEnabledFlagUnset = VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_UNSET_BIT_KHR, + ePpsSliceChromaQpOffsetsPresentFlagSet = VK_VIDEO_ENCODE_H265_STD_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_FLAG_SET_BIT_KHR, + eTransquantBypassEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_TRANSQUANT_BYPASS_ENABLED_FLAG_SET_BIT_KHR, + eConstrainedIntraPredFlagSet = VK_VIDEO_ENCODE_H265_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR, + eEntropyCodingSyncEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_ENTROPY_CODING_SYNC_ENABLED_FLAG_SET_BIT_KHR, + eDeblockingFilterOverrideEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_SET_BIT_KHR, + eDependentSliceSegmentsEnabledFlagSet = VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENTS_ENABLED_FLAG_SET_BIT_KHR, + eDependentSliceSegmentFlagSet = VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENT_FLAG_SET_BIT_KHR, + eSliceQpDelta = VK_VIDEO_ENCODE_H265_STD_SLICE_QP_DELTA_BIT_KHR, + eDifferentSliceQpDelta = VK_VIDEO_ENCODE_H265_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR + }; + + using VideoEncodeH265StdFlagsKHR = Flags; + + template <> + struct FlagTraits { - eFrame = VK_VIDEO_ENCODE_H265_OUTPUT_MODE_FRAME_BIT_EXT, - eSliceSegment = VK_VIDEO_ENCODE_H265_OUTPUT_MODE_SLICE_SEGMENT_BIT_EXT, - eNonVcl = VK_VIDEO_ENCODE_H265_OUTPUT_MODE_NON_VCL_BIT_EXT + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265StdFlagsKHR allFlags = + VideoEncodeH265StdFlagBitsKHR::eSeparateColorPlaneFlagSet | VideoEncodeH265StdFlagBitsKHR::eSampleAdaptiveOffsetEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eScalingListDataPresentFlagSet | VideoEncodeH265StdFlagBitsKHR::ePcmEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eSpsTemporalMvpEnabledFlagSet | VideoEncodeH265StdFlagBitsKHR::eInitQpMinus26 | + VideoEncodeH265StdFlagBitsKHR::eWeightedPredFlagSet | VideoEncodeH265StdFlagBitsKHR::eWeightedBipredFlagSet | + VideoEncodeH265StdFlagBitsKHR::eLog2ParallelMergeLevelMinus2 | VideoEncodeH265StdFlagBitsKHR::eSignDataHidingEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagSet | VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagUnset | + VideoEncodeH265StdFlagBitsKHR::ePpsSliceChromaQpOffsetsPresentFlagSet | VideoEncodeH265StdFlagBitsKHR::eTransquantBypassEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eConstrainedIntraPredFlagSet | VideoEncodeH265StdFlagBitsKHR::eEntropyCodingSyncEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eDeblockingFilterOverrideEnabledFlagSet | VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentsEnabledFlagSet | + VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentFlagSet | VideoEncodeH265StdFlagBitsKHR::eSliceQpDelta | + VideoEncodeH265StdFlagBitsKHR::eDifferentSliceQpDelta; }; - enum class VideoEncodeH265CtbSizeFlagBitsEXT : VkVideoEncodeH265CtbSizeFlagsEXT + enum class VideoEncodeH265CtbSizeFlagBitsKHR : VkVideoEncodeH265CtbSizeFlagsKHR { - e16 = VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_EXT, - e32 = VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_EXT, - e64 = VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_EXT + e16 = VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_KHR, + e32 = VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_KHR, + e64 = VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_KHR }; - enum class VideoEncodeH265TransformBlockSizeFlagBitsEXT : VkVideoEncodeH265TransformBlockSizeFlagsEXT + using VideoEncodeH265CtbSizeFlagsKHR = Flags; + + template <> + struct FlagTraits { - e4 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_EXT, - e8 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_EXT, - e16 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_EXT, - e32 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_EXT + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265CtbSizeFlagsKHR allFlags = + VideoEncodeH265CtbSizeFlagBitsKHR::e16 | VideoEncodeH265CtbSizeFlagBitsKHR::e32 | VideoEncodeH265CtbSizeFlagBitsKHR::e64; }; - enum class VideoEncodeH265RateControlStructureEXT + enum class VideoEncodeH265TransformBlockSizeFlagBitsKHR : VkVideoEncodeH265TransformBlockSizeFlagsKHR { - eUnknown = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT, - eFlat = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAT_EXT, - eDyadic = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_DYADIC_EXT + e4 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_KHR, + e8 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_KHR, + e16 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_KHR, + e32 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_KHR }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_decode_h264 === + using VideoEncodeH265TransformBlockSizeFlagsKHR = Flags; - enum class VideoDecodeH264PictureLayoutFlagBitsEXT : VkVideoDecodeH264PictureLayoutFlagsEXT + template <> + struct FlagTraits { - eProgressive = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_EXT, - eInterlacedInterleavedLines = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_EXT, - eInterlacedSeparatePlanes = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_EXT + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265TransformBlockSizeFlagsKHR allFlags = + VideoEncodeH265TransformBlockSizeFlagBitsKHR::e4 | VideoEncodeH265TransformBlockSizeFlagBitsKHR::e8 | VideoEncodeH265TransformBlockSizeFlagBitsKHR::e16 | + VideoEncodeH265TransformBlockSizeFlagBitsKHR::e32; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - //=== VK_AMD_shader_info === - enum class ShaderInfoTypeAMD + enum class VideoEncodeH265RateControlFlagBitsKHR : VkVideoEncodeH265RateControlFlagsKHR { - eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD, - eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD, - eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD + eAttemptHrdCompliance = VK_VIDEO_ENCODE_H265_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR, + eRegularGop = VK_VIDEO_ENCODE_H265_RATE_CONTROL_REGULAR_GOP_BIT_KHR, + eReferencePatternFlat = VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR, + eReferencePatternDyadic = VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR, + eTemporalSubLayerPatternDyadic = VK_VIDEO_ENCODE_H265_RATE_CONTROL_TEMPORAL_SUB_LAYER_PATTERN_DYADIC_BIT_KHR }; -#if defined( VK_USE_PLATFORM_GGP ) - //=== VK_GGP_stream_descriptor_surface === + using VideoEncodeH265RateControlFlagsKHR = Flags; - enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkStreamDescriptorSurfaceCreateFlagsGGP + template <> + struct FlagTraits { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265RateControlFlagsKHR allFlags = + VideoEncodeH265RateControlFlagBitsKHR::eAttemptHrdCompliance | VideoEncodeH265RateControlFlagBitsKHR::eRegularGop | + VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternFlat | VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternDyadic | + VideoEncodeH265RateControlFlagBitsKHR::eTemporalSubLayerPatternDyadic; }; -#endif /*VK_USE_PLATFORM_GGP*/ - //=== VK_NV_external_memory_capabilities === + //=== VK_KHR_video_decode_h264 === - enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV + enum class VideoDecodeH264PictureLayoutFlagBitsKHR : VkVideoDecodeH264PictureLayoutFlagsKHR { - eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV, + eProgressive = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_KHR, + eInterlacedInterleavedLines = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_KHR, + eInterlacedSeparatePlanes = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_KHR + }; + + using VideoDecodeH264PictureLayoutFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeH264PictureLayoutFlagsKHR allFlags = VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive | + VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedInterleavedLines | + VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedSeparatePlanes; + }; + + //=== VK_AMD_shader_info === + + enum class ShaderInfoTypeAMD + { + eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD, + eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD, + eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD + }; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkStreamDescriptorSurfaceCreateFlagsGGP + { + }; + + using StreamDescriptorSurfaceCreateFlagsGGP = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StreamDescriptorSurfaceCreateFlagsGGP allFlags = {}; + }; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV + { + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV, eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV, eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV, eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV }; + using ExternalMemoryHandleTypeFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryHandleTypeFlagsNV allFlags = + ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 | ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt | ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image | + ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt; + }; + enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV { eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV, @@ -3340,6 +5709,16 @@ namespace VULKAN_HPP_NAMESPACE eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV }; + using ExternalMemoryFeatureFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryFeatureFlagsNV allFlags = + ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly | ExternalMemoryFeatureFlagBitsNV::eExportable | ExternalMemoryFeatureFlagBitsNV::eImportable; + }; + //=== VK_EXT_validation_flags === enum class ValidationCheckEXT @@ -3354,25 +5733,16 @@ namespace VULKAN_HPP_NAMESPACE enum class ViSurfaceCreateFlagBitsNN : VkViSurfaceCreateFlagsNN { }; -#endif /*VK_USE_PLATFORM_VI_NN*/ - //=== VK_EXT_pipeline_robustness === - - enum class PipelineRobustnessBufferBehaviorEXT - { - eDeviceDefault = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT, - eDisabled = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT, - eRobustBufferAccess = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT, - eRobustBufferAccess2 = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT - }; + using ViSurfaceCreateFlagsNN = Flags; - enum class PipelineRobustnessImageBehaviorEXT + template <> + struct FlagTraits { - eDeviceDefault = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT, - eDisabled = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT, - eRobustImageAccess = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT, - eRobustImageAccess2 = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ViSurfaceCreateFlagsNN allFlags = {}; }; +#endif /*VK_USE_PLATFORM_VI_NN*/ //=== VK_EXT_conditional_rendering === @@ -3381,6 +5751,15 @@ namespace VULKAN_HPP_NAMESPACE eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT }; + using ConditionalRenderingFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ConditionalRenderingFlagsEXT allFlags = ConditionalRenderingFlagBitsEXT::eInverted; + }; + //=== VK_EXT_display_surface_counter === enum class SurfaceCounterFlagBitsEXT : VkSurfaceCounterFlagsEXT @@ -3388,6 +5767,15 @@ namespace VULKAN_HPP_NAMESPACE eVblank = VK_SURFACE_COUNTER_VBLANK_BIT_EXT }; + using SurfaceCounterFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR SurfaceCounterFlagsEXT allFlags = SurfaceCounterFlagBitsEXT::eVblank; + }; + //=== VK_EXT_display_control === enum class DisplayPowerStateEXT @@ -3425,6 +5813,15 @@ namespace VULKAN_HPP_NAMESPACE { }; + using PipelineViewportSwizzleStateCreateFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineViewportSwizzleStateCreateFlagsNV allFlags = {}; + }; + //=== VK_EXT_discard_rectangles === enum class DiscardRectangleModeEXT @@ -3437,6 +5834,15 @@ namespace VULKAN_HPP_NAMESPACE { }; + using PipelineDiscardRectangleStateCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineDiscardRectangleStateCreateFlagsEXT allFlags = {}; + }; + //=== VK_EXT_conservative_rasterization === enum class ConservativeRasterizationModeEXT @@ -3450,12 +5856,30 @@ namespace VULKAN_HPP_NAMESPACE { }; + using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationConservativeStateCreateFlagsEXT allFlags = {}; + }; + //=== VK_EXT_depth_clip_enable === enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT : VkPipelineRasterizationDepthClipStateCreateFlagsEXT { }; + using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationDepthClipStateCreateFlagsEXT allFlags = {}; + }; + //=== VK_KHR_performance_query === enum class PerformanceCounterDescriptionFlagBitsKHR : VkPerformanceCounterDescriptionFlagsKHR @@ -3464,14 +5888,24 @@ namespace VULKAN_HPP_NAMESPACE eConcurrentlyImpacted = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR }; + using PerformanceCounterDescriptionFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PerformanceCounterDescriptionFlagsKHR allFlags = + PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting | PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted; + }; + enum class PerformanceCounterScopeKHR { eCommandBuffer = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, + eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR, eRenderPass = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, + eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR, eCommand = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, - eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR, - eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR, - eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR + eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR }; enum class PerformanceCounterStorageKHR @@ -3503,12 +5937,30 @@ namespace VULKAN_HPP_NAMESPACE { }; + using AcquireProfilingLockFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AcquireProfilingLockFlagsKHR allFlags = {}; + }; + #if defined( VK_USE_PLATFORM_IOS_MVK ) //=== VK_MVK_ios_surface === enum class IOSSurfaceCreateFlagBitsMVK : VkIOSSurfaceCreateFlagsMVK { }; + + using IOSSurfaceCreateFlagsMVK = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR IOSSurfaceCreateFlagsMVK allFlags = {}; + }; #endif /*VK_USE_PLATFORM_IOS_MVK*/ #if defined( VK_USE_PLATFORM_MACOS_MVK ) @@ -3517,6 +5969,15 @@ namespace VULKAN_HPP_NAMESPACE enum class MacOSSurfaceCreateFlagBitsMVK : VkMacOSSurfaceCreateFlagsMVK { }; + + using MacOSSurfaceCreateFlagsMVK = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MacOSSurfaceCreateFlagsMVK allFlags = {}; + }; #endif /*VK_USE_PLATFORM_MACOS_MVK*/ //=== VK_EXT_debug_utils === @@ -3529,21 +5990,62 @@ namespace VULKAN_HPP_NAMESPACE eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT }; + using DebugUtilsMessageSeverityFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT allFlags = + DebugUtilsMessageSeverityFlagBitsEXT::eVerbose | DebugUtilsMessageSeverityFlagBitsEXT::eInfo | DebugUtilsMessageSeverityFlagBitsEXT::eWarning | + DebugUtilsMessageSeverityFlagBitsEXT::eError; + }; + enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT { - eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT, - eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, - ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT + eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT, + eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, + ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT, + eDeviceAddressBinding = VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT + }; + + using DebugUtilsMessageTypeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessageTypeFlagsEXT allFlags = + DebugUtilsMessageTypeFlagBitsEXT::eGeneral | DebugUtilsMessageTypeFlagBitsEXT::eValidation | DebugUtilsMessageTypeFlagBitsEXT::ePerformance | + DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding; }; enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkDebugUtilsMessengerCallbackDataFlagsEXT { }; + using DebugUtilsMessengerCallbackDataFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessengerCallbackDataFlagsEXT allFlags = {}; + }; + enum class DebugUtilsMessengerCreateFlagBitsEXT : VkDebugUtilsMessengerCreateFlagsEXT { }; + using DebugUtilsMessengerCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessengerCreateFlagsEXT allFlags = {}; + }; + //=== VK_EXT_blend_operation_advanced === enum class BlendOverlapEXT @@ -3559,6 +6061,15 @@ namespace VULKAN_HPP_NAMESPACE { }; + using PipelineCoverageToColorStateCreateFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCoverageToColorStateCreateFlagsNV allFlags = {}; + }; + //=== VK_KHR_acceleration_structure === enum class AccelerationStructureTypeKHR @@ -3583,29 +6094,79 @@ namespace VULKAN_HPP_NAMESPACE }; using GeometryFlagBitsNV = GeometryFlagBitsKHR; + using GeometryFlagsKHR = Flags; + using GeometryFlagsNV = GeometryFlagsKHR; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR GeometryFlagsKHR allFlags = GeometryFlagBitsKHR::eOpaque | GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation; + }; + enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR { - eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, - eTriangleFlipFacing = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR, - eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, - eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, - eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV, - eTriangleFrontCounterclockwiseKHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, - eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV + eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, + eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV, + eTriangleFlipFacing = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR, + eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, + eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, + eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, + eForceOpacityMicromap2StateEXT = VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT, + eDisableOpacityMicromapsEXT = VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT }; using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR; + using GeometryInstanceFlagsKHR = Flags; + using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR GeometryInstanceFlagsKHR allFlags = + GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable | GeometryInstanceFlagBitsKHR::eTriangleFlipFacing | GeometryInstanceFlagBitsKHR::eForceOpaque | + GeometryInstanceFlagBitsKHR::eForceNoOpaque | GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT | + GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT; + }; + enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR { - eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, - eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, - ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, - ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, - eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR, - eMotionNV = VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV + eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, + eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, + ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, + ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, + eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR, + eMotionNV = VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV, + eAllowOpacityMicromapUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT, + eAllowDisableOpacityMicromapsEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT, + eAllowOpacityMicromapDataUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eAllowDisplacementMicromapUpdateNV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eAllowDataAccess = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_KHR }; using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR; + using BuildAccelerationStructureFlagsKHR = Flags; + using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BuildAccelerationStructureFlagsKHR allFlags = + BuildAccelerationStructureFlagBitsKHR::eAllowUpdate | BuildAccelerationStructureFlagBitsKHR::eAllowCompaction | + BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace | BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild | + BuildAccelerationStructureFlagBitsKHR::eLowMemory | BuildAccelerationStructureFlagBitsKHR::eMotionNV | + BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT | BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT | + BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | BuildAccelerationStructureFlagBitsKHR::eAllowDisplacementMicromapUpdateNV +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | BuildAccelerationStructureFlagBitsKHR::eAllowDataAccess; + }; + enum class CopyAccelerationStructureModeKHR { eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, @@ -3617,9 +6178,11 @@ namespace VULKAN_HPP_NAMESPACE enum class GeometryTypeKHR { - eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR, - eAabbs = VK_GEOMETRY_TYPE_AABBS_KHR, - eInstances = VK_GEOMETRY_TYPE_INSTANCES_KHR + eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR, + eAabbs = VK_GEOMETRY_TYPE_AABBS_KHR, + eInstances = VK_GEOMETRY_TYPE_INSTANCES_KHR, + eSpheresNV = VK_GEOMETRY_TYPE_SPHERES_NV, + eLinearSweptSpheresNV = VK_GEOMETRY_TYPE_LINEAR_SWEPT_SPHERES_NV }; using GeometryTypeNV = GeometryTypeKHR; @@ -3631,8 +6194,20 @@ namespace VULKAN_HPP_NAMESPACE enum class AccelerationStructureCreateFlagBitsKHR : VkAccelerationStructureCreateFlagsKHR { - eDeviceAddressCaptureReplay = VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR, - eMotionNV = VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV + eDeviceAddressCaptureReplay = VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR, + eDescriptorBufferCaptureReplayEXT = VK_ACCELERATION_STRUCTURE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT, + eMotionNV = VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV + }; + + using AccelerationStructureCreateFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AccelerationStructureCreateFlagsKHR allFlags = + AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay | AccelerationStructureCreateFlagBitsKHR::eDescriptorBufferCaptureReplayEXT | + AccelerationStructureCreateFlagBitsKHR::eMotionNV; }; enum class BuildAccelerationStructureModeKHR @@ -3641,6 +6216,24 @@ namespace VULKAN_HPP_NAMESPACE eUpdate = VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR }; + //=== VK_KHR_ray_tracing_pipeline === + + enum class RayTracingShaderGroupTypeKHR + { + eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, + eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR + }; + using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR; + + enum class ShaderGroupShaderKHR + { + eGeneral = VK_SHADER_GROUP_SHADER_GENERAL_KHR, + eClosestHit = VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR, + eAnyHit = VK_SHADER_GROUP_SHADER_ANY_HIT_KHR, + eIntersection = VK_SHADER_GROUP_SHADER_INTERSECTION_KHR + }; + //=== VK_NV_framebuffer_mixed_samples === enum class CoverageModulationModeNV @@ -3655,6 +6248,15 @@ namespace VULKAN_HPP_NAMESPACE { }; + using PipelineCoverageModulationStateCreateFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCoverageModulationStateCreateFlagsNV allFlags = {}; + }; + //=== VK_EXT_validation_cache === enum class ValidationCacheHeaderVersionEXT @@ -3666,6 +6268,15 @@ namespace VULKAN_HPP_NAMESPACE { }; + using ValidationCacheCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ValidationCacheCreateFlagsEXT allFlags = {}; + }; + //=== VK_NV_shading_rate_image === enum class ShadingRatePaletteEntryNV @@ -3707,26 +6318,14 @@ namespace VULKAN_HPP_NAMESPACE { }; - //=== VK_EXT_calibrated_timestamps === - - enum class TimeDomainEXT - { - eDevice = VK_TIME_DOMAIN_DEVICE_EXT, - eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT, - eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT, - eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT - }; - - //=== VK_KHR_global_priority === + using PipelineCompilerControlFlagsAMD = Flags; - enum class QueueGlobalPriorityKHR + template <> + struct FlagTraits { - eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR, - eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR, - eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR, - eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCompilerControlFlagsAMD allFlags = {}; }; - using QueueGlobalPriorityEXT = QueueGlobalPriorityKHR; //=== VK_AMD_memory_overallocation_behavior === @@ -3776,6 +6375,15 @@ namespace VULKAN_HPP_NAMESPACE enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkImagePipeSurfaceCreateFlagsFUCHSIA { }; + + using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImagePipeSurfaceCreateFlagsFUCHSIA allFlags = {}; + }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) @@ -3784,6 +6392,15 @@ namespace VULKAN_HPP_NAMESPACE enum class MetalSurfaceCreateFlagBitsEXT : VkMetalSurfaceCreateFlagsEXT { }; + + using MetalSurfaceCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MetalSurfaceCreateFlagsEXT allFlags = {}; + }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_fragment_shading_rate === @@ -3803,6 +6420,15 @@ namespace VULKAN_HPP_NAMESPACE { }; + using ShaderCorePropertiesFlagsAMD = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderCorePropertiesFlagsAMD allFlags = {}; + }; + //=== VK_EXT_validation_features === enum class ValidationFeatureEnableEXT @@ -3826,31 +6452,6 @@ namespace VULKAN_HPP_NAMESPACE eShaderValidationCache = VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT }; - //=== VK_NV_cooperative_matrix === - - enum class ScopeNV - { - eDevice = VK_SCOPE_DEVICE_NV, - eWorkgroup = VK_SCOPE_WORKGROUP_NV, - eSubgroup = VK_SCOPE_SUBGROUP_NV, - eQueueFamily = VK_SCOPE_QUEUE_FAMILY_NV - }; - - enum class ComponentTypeNV - { - eFloat16 = VK_COMPONENT_TYPE_FLOAT16_NV, - eFloat32 = VK_COMPONENT_TYPE_FLOAT32_NV, - eFloat64 = VK_COMPONENT_TYPE_FLOAT64_NV, - eSint8 = VK_COMPONENT_TYPE_SINT8_NV, - eSint16 = VK_COMPONENT_TYPE_SINT16_NV, - eSint32 = VK_COMPONENT_TYPE_SINT32_NV, - eSint64 = VK_COMPONENT_TYPE_SINT64_NV, - eUint8 = VK_COMPONENT_TYPE_UINT8_NV, - eUint16 = VK_COMPONENT_TYPE_UINT16_NV, - eUint32 = VK_COMPONENT_TYPE_UINT32_NV, - eUint64 = VK_COMPONENT_TYPE_UINT64_NV - }; - //=== VK_NV_coverage_reduction_mode === enum class CoverageReductionModeNV @@ -3863,6 +6464,15 @@ namespace VULKAN_HPP_NAMESPACE { }; + using PipelineCoverageReductionStateCreateFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCoverageReductionStateCreateFlagsNV allFlags = {}; + }; + //=== VK_EXT_provoking_vertex === enum class ProvokingVertexModeEXT @@ -3889,14 +6499,13 @@ namespace VULKAN_HPP_NAMESPACE { }; - //=== VK_EXT_line_rasterization === + using HeadlessSurfaceCreateFlagsEXT = Flags; - enum class LineRasterizationModeEXT + template <> + struct FlagTraits { - eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT, - eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT, - eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT, - eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR HeadlessSurfaceCreateFlagsEXT allFlags = {}; }; //=== VK_KHR_pipeline_executable_properties === @@ -3909,13 +6518,58 @@ namespace VULKAN_HPP_NAMESPACE eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR }; - //=== VK_NV_device_generated_commands === + //=== VK_EXT_surface_maintenance1 === - enum class IndirectStateFlagBitsNV : VkIndirectStateFlagsNV + enum class PresentScalingFlagBitsEXT : VkPresentScalingFlagsEXT + { + eOneToOne = VK_PRESENT_SCALING_ONE_TO_ONE_BIT_EXT, + eAspectRatioStretch = VK_PRESENT_SCALING_ASPECT_RATIO_STRETCH_BIT_EXT, + eStretch = VK_PRESENT_SCALING_STRETCH_BIT_EXT + }; + + using PresentScalingFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PresentScalingFlagsEXT allFlags = + PresentScalingFlagBitsEXT::eOneToOne | PresentScalingFlagBitsEXT::eAspectRatioStretch | PresentScalingFlagBitsEXT::eStretch; + }; + + enum class PresentGravityFlagBitsEXT : VkPresentGravityFlagsEXT + { + eMin = VK_PRESENT_GRAVITY_MIN_BIT_EXT, + eMax = VK_PRESENT_GRAVITY_MAX_BIT_EXT, + eCentered = VK_PRESENT_GRAVITY_CENTERED_BIT_EXT + }; + + using PresentGravityFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PresentGravityFlagsEXT allFlags = + PresentGravityFlagBitsEXT::eMin | PresentGravityFlagBitsEXT::eMax | PresentGravityFlagBitsEXT::eCentered; + }; + + //=== VK_NV_device_generated_commands === + + enum class IndirectStateFlagBitsNV : VkIndirectStateFlagsNV { eFlagFrontface = VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV }; + using IndirectStateFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR IndirectStateFlagsNV allFlags = IndirectStateFlagBitsNV::eFlagFrontface; + }; + enum class IndirectCommandsTokenTypeNV { eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV, @@ -3926,7 +6580,9 @@ namespace VULKAN_HPP_NAMESPACE eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV, eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV, eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV, - eDrawMeshTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV + eDrawMeshTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV, + ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV, + eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV }; enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV @@ -3936,6 +6592,26 @@ namespace VULKAN_HPP_NAMESPACE eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV }; + using IndirectCommandsLayoutUsageFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV allFlags = IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess | + IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences | + IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences; + }; + + //=== VK_EXT_depth_bias_control === + + enum class DepthBiasRepresentationEXT + { + eLeastRepresentableValueFormat = VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORMAT_EXT, + eLeastRepresentableValueForceUnorm = VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORCE_UNORM_EXT, + eFloat = VK_DEPTH_BIAS_REPRESENTATION_FLOAT_EXT + }; + //=== VK_EXT_device_memory_report === enum class DeviceMemoryReportEventTypeEXT @@ -3951,20 +6627,52 @@ namespace VULKAN_HPP_NAMESPACE { }; - //=== VK_EXT_pipeline_creation_cache_control === + using DeviceMemoryReportFlagsEXT = Flags; - enum class PipelineCacheCreateFlagBits : VkPipelineCacheCreateFlags + template <> + struct FlagTraits { - eExternallySynchronized = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT, - eExternallySynchronizedEXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceMemoryReportFlagsEXT allFlags = {}; }; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_encode_queue === enum class VideoEncodeCapabilityFlagBitsKHR : VkVideoEncodeCapabilityFlagsKHR { - ePrecedingExternallyEncodedBytes = VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR + ePrecedingExternallyEncodedBytes = VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR, + eInsufficientBitstreamBufferRangeDetection = VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR, + eQuantizationDeltaMap = VK_VIDEO_ENCODE_CAPABILITY_QUANTIZATION_DELTA_MAP_BIT_KHR, + eEmphasisMap = VK_VIDEO_ENCODE_CAPABILITY_EMPHASIS_MAP_BIT_KHR + }; + + using VideoEncodeCapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeCapabilityFlagsKHR allFlags = + VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes | VideoEncodeCapabilityFlagBitsKHR::eInsufficientBitstreamBufferRangeDetection | + VideoEncodeCapabilityFlagBitsKHR::eQuantizationDeltaMap | VideoEncodeCapabilityFlagBitsKHR::eEmphasisMap; + }; + + enum class VideoEncodeFeedbackFlagBitsKHR : VkVideoEncodeFeedbackFlagsKHR + { + eBitstreamBufferOffset = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BUFFER_OFFSET_BIT_KHR, + eBitstreamBytesWritten = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BYTES_WRITTEN_BIT_KHR, + eBitstreamHasOverrides = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_HAS_OVERRIDES_BIT_KHR + }; + + using VideoEncodeFeedbackFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeFeedbackFlagsKHR allFlags = VideoEncodeFeedbackFlagBitsKHR::eBitstreamBufferOffset | + VideoEncodeFeedbackFlagBitsKHR::eBitstreamBytesWritten | + VideoEncodeFeedbackFlagBitsKHR::eBitstreamHasOverrides; }; enum class VideoEncodeUsageFlagBitsKHR : VkVideoEncodeUsageFlagsKHR @@ -3976,6 +6684,17 @@ namespace VULKAN_HPP_NAMESPACE eConferencing = VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR }; + using VideoEncodeUsageFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeUsageFlagsKHR allFlags = VideoEncodeUsageFlagBitsKHR::eDefault | VideoEncodeUsageFlagBitsKHR::eTranscoding | + VideoEncodeUsageFlagBitsKHR::eStreaming | VideoEncodeUsageFlagBitsKHR::eRecording | + VideoEncodeUsageFlagBitsKHR::eConferencing; + }; + enum class VideoEncodeContentFlagBitsKHR : VkVideoEncodeContentFlagsKHR { eDefault = VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR, @@ -3984,6 +6703,17 @@ namespace VULKAN_HPP_NAMESPACE eRendered = VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR }; + using VideoEncodeContentFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeContentFlagsKHR allFlags = + VideoEncodeContentFlagBitsKHR::eDefault | VideoEncodeContentFlagBitsKHR::eCamera | VideoEncodeContentFlagBitsKHR::eDesktop | + VideoEncodeContentFlagBitsKHR::eRendered; + }; + enum class VideoEncodeTuningModeKHR { eDefault = VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR, @@ -3995,19 +6725,51 @@ namespace VULKAN_HPP_NAMESPACE enum class VideoEncodeRateControlModeFlagBitsKHR : VkVideoEncodeRateControlModeFlagsKHR { - eNone = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR, - eCbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR, - eVbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR + eDefault = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DEFAULT_KHR, + eDisabled = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DISABLED_BIT_KHR, + eCbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR, + eVbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR + }; + + using VideoEncodeRateControlModeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeRateControlModeFlagsKHR allFlags = + VideoEncodeRateControlModeFlagBitsKHR::eDefault | VideoEncodeRateControlModeFlagBitsKHR::eDisabled | VideoEncodeRateControlModeFlagBitsKHR::eCbr | + VideoEncodeRateControlModeFlagBitsKHR::eVbr; }; enum class VideoEncodeFlagBitsKHR : VkVideoEncodeFlagsKHR { + eWithQuantizationDeltaMap = VK_VIDEO_ENCODE_WITH_QUANTIZATION_DELTA_MAP_BIT_KHR, + eWithEmphasisMap = VK_VIDEO_ENCODE_WITH_EMPHASIS_MAP_BIT_KHR + }; + + using VideoEncodeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeFlagsKHR allFlags = + VideoEncodeFlagBitsKHR::eWithQuantizationDeltaMap | VideoEncodeFlagBitsKHR::eWithEmphasisMap; }; enum class VideoEncodeRateControlFlagBitsKHR : VkVideoEncodeRateControlFlagsKHR { }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + using VideoEncodeRateControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeRateControlFlagsKHR allFlags = {}; + }; //=== VK_NV_device_diagnostics_config === @@ -4019,6 +6781,17 @@ namespace VULKAN_HPP_NAMESPACE eEnableShaderErrorReporting = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV }; + using DeviceDiagnosticsConfigFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceDiagnosticsConfigFlagsNV allFlags = + DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo | DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking | + DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints | DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderErrorReporting; + }; + #if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_objects === @@ -4031,6 +6804,17 @@ namespace VULKAN_HPP_NAMESPACE eMetalIosurface = VK_EXPORT_METAL_OBJECT_TYPE_METAL_IOSURFACE_BIT_EXT, eMetalSharedEvent = VK_EXPORT_METAL_OBJECT_TYPE_METAL_SHARED_EVENT_BIT_EXT }; + + using ExportMetalObjectTypeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ExportMetalObjectTypeFlagsEXT allFlags = + ExportMetalObjectTypeFlagBitsEXT::eMetalDevice | ExportMetalObjectTypeFlagBitsEXT::eMetalCommandQueue | ExportMetalObjectTypeFlagBitsEXT::eMetalBuffer | + ExportMetalObjectTypeFlagBitsEXT::eMetalTexture | ExportMetalObjectTypeFlagBitsEXT::eMetalIosurface | ExportMetalObjectTypeFlagBitsEXT::eMetalSharedEvent; + }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_EXT_graphics_pipeline_library === @@ -4043,9 +6827,15 @@ namespace VULKAN_HPP_NAMESPACE eFragmentOutputInterface = VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT }; - enum class PipelineLayoutCreateFlagBits : VkPipelineLayoutCreateFlags + using GraphicsPipelineLibraryFlagsEXT = Flags; + + template <> + struct FlagTraits { - eIndependentSetsEXT = VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR GraphicsPipelineLibraryFlagsEXT allFlags = + GraphicsPipelineLibraryFlagBitsEXT::eVertexInputInterface | GraphicsPipelineLibraryFlagBitsEXT::ePreRasterizationShaders | + GraphicsPipelineLibraryFlagBitsEXT::eFragmentShader | GraphicsPipelineLibraryFlagBitsEXT::eFragmentOutputInterface; }; //=== VK_NV_fragment_shading_rate_enums === @@ -4085,10 +6875,28 @@ namespace VULKAN_HPP_NAMESPACE { }; + using AccelerationStructureMotionInfoFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AccelerationStructureMotionInfoFlagsNV allFlags = {}; + }; + enum class AccelerationStructureMotionInstanceFlagBitsNV : VkAccelerationStructureMotionInstanceFlagsNV { }; + using AccelerationStructureMotionInstanceFlagsNV = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AccelerationStructureMotionInstanceFlagsNV allFlags = {}; + }; + //=== VK_EXT_image_compression_control === enum class ImageCompressionFlagBitsEXT : VkImageCompressionFlagsEXT @@ -4099,6 +6907,17 @@ namespace VULKAN_HPP_NAMESPACE eDisabled = VK_IMAGE_COMPRESSION_DISABLED_EXT }; + using ImageCompressionFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageCompressionFlagsEXT allFlags = + ImageCompressionFlagBitsEXT::eDefault | ImageCompressionFlagBitsEXT::eFixedRateDefault | ImageCompressionFlagBitsEXT::eFixedRateExplicit | + ImageCompressionFlagBitsEXT::eDisabled; + }; + enum class ImageCompressionFixedRateFlagBitsEXT : VkImageCompressionFixedRateFlagsEXT { eNone = VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT, @@ -4128,30 +6947,79 @@ namespace VULKAN_HPP_NAMESPACE e24Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT }; + using ImageCompressionFixedRateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageCompressionFixedRateFlagsEXT allFlags = + ImageCompressionFixedRateFlagBitsEXT::eNone | ImageCompressionFixedRateFlagBitsEXT::e1Bpc | ImageCompressionFixedRateFlagBitsEXT::e2Bpc | + ImageCompressionFixedRateFlagBitsEXT::e3Bpc | ImageCompressionFixedRateFlagBitsEXT::e4Bpc | ImageCompressionFixedRateFlagBitsEXT::e5Bpc | + ImageCompressionFixedRateFlagBitsEXT::e6Bpc | ImageCompressionFixedRateFlagBitsEXT::e7Bpc | ImageCompressionFixedRateFlagBitsEXT::e8Bpc | + ImageCompressionFixedRateFlagBitsEXT::e9Bpc | ImageCompressionFixedRateFlagBitsEXT::e10Bpc | ImageCompressionFixedRateFlagBitsEXT::e11Bpc | + ImageCompressionFixedRateFlagBitsEXT::e12Bpc | ImageCompressionFixedRateFlagBitsEXT::e13Bpc | ImageCompressionFixedRateFlagBitsEXT::e14Bpc | + ImageCompressionFixedRateFlagBitsEXT::e15Bpc | ImageCompressionFixedRateFlagBitsEXT::e16Bpc | ImageCompressionFixedRateFlagBitsEXT::e17Bpc | + ImageCompressionFixedRateFlagBitsEXT::e18Bpc | ImageCompressionFixedRateFlagBitsEXT::e19Bpc | ImageCompressionFixedRateFlagBitsEXT::e20Bpc | + ImageCompressionFixedRateFlagBitsEXT::e21Bpc | ImageCompressionFixedRateFlagBitsEXT::e22Bpc | ImageCompressionFixedRateFlagBitsEXT::e23Bpc | + ImageCompressionFixedRateFlagBitsEXT::e24Bpc; + }; + + //=== VK_EXT_device_fault === + + enum class DeviceFaultAddressTypeEXT + { + eNone = VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT, + eReadInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT, + eWriteInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT, + eExecuteInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT, + eInstructionPointerUnknown = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT, + eInstructionPointerInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT, + eInstructionPointerFault = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT + }; + + enum class DeviceFaultVendorBinaryHeaderVersionEXT + { + eOne = VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT + }; + #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) //=== VK_EXT_directfb_surface === enum class DirectFBSurfaceCreateFlagBitsEXT : VkDirectFBSurfaceCreateFlagsEXT { }; + + using DirectFBSurfaceCreateFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DirectFBSurfaceCreateFlagsEXT allFlags = {}; + }; #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - //=== VK_KHR_ray_tracing_pipeline === + //=== VK_EXT_device_address_binding_report === - enum class RayTracingShaderGroupTypeKHR + enum class DeviceAddressBindingFlagBitsEXT : VkDeviceAddressBindingFlagsEXT { - eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, - eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, - eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR + eInternalObject = VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT }; - using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR; - enum class ShaderGroupShaderKHR + using DeviceAddressBindingFlagsEXT = Flags; + + template <> + struct FlagTraits { - eGeneral = VK_SHADER_GROUP_SHADER_GENERAL_KHR, - eClosestHit = VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR, - eAnyHit = VK_SHADER_GROUP_SHADER_ANY_HIT_KHR, - eIntersection = VK_SHADER_GROUP_SHADER_INTERSECTION_KHR + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceAddressBindingFlagsEXT allFlags = DeviceAddressBindingFlagBitsEXT::eInternalObject; + }; + + enum class DeviceAddressBindingTypeEXT + { + eBind = VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT, + eUnbind = VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT }; #if defined( VK_USE_PLATFORM_FUCHSIA ) @@ -4166,4378 +7034,1032 @@ namespace VULKAN_HPP_NAMESPACE eProtectedOptional = VK_IMAGE_CONSTRAINTS_INFO_PROTECTED_OPTIONAL_FUCHSIA }; + using ImageConstraintsInfoFlagsFUCHSIA = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageConstraintsInfoFlagsFUCHSIA allFlags = + ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely | ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften | + ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely | ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften | + ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional; + }; + enum class ImageFormatConstraintsFlagBitsFUCHSIA : VkImageFormatConstraintsFlagsFUCHSIA { }; + + using ImageFormatConstraintsFlagsFUCHSIA = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ImageFormatConstraintsFlagsFUCHSIA allFlags = {}; + }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ + //=== VK_EXT_frame_boundary === + + enum class FrameBoundaryFlagBitsEXT : VkFrameBoundaryFlagsEXT + { + eFrameEnd = VK_FRAME_BOUNDARY_FRAME_END_BIT_EXT + }; + + using FrameBoundaryFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR FrameBoundaryFlagsEXT allFlags = FrameBoundaryFlagBitsEXT::eFrameEnd; + }; + #if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === enum class ScreenSurfaceCreateFlagBitsQNX : VkScreenSurfaceCreateFlagsQNX { }; -#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - //=== VK_EXT_subpass_merge_feedback === + using ScreenSurfaceCreateFlagsQNX = Flags; - enum class SubpassMergeStatusEXT + template <> + struct FlagTraits { - eMerged = VK_SUBPASS_MERGE_STATUS_MERGED_EXT, - eDisallowed = VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT, - eNotMergedSideEffects = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT, - eNotMergedSamplesMismatch = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT, - eNotMergedViewsMismatch = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT, - eNotMergedAliasing = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT, - eNotMergedDependencies = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT, - eNotMergedIncompatibleInputAttachment = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT, - eNotMergedTooManyAttachments = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT, - eNotMergedInsufficientStorage = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT, - eNotMergedDepthStencilCount = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT, - eNotMergedResolveAttachmentReuse = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT, - eNotMergedSingleSubpass = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT, - eNotMergedUnspecified = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ScreenSurfaceCreateFlagsQNX allFlags = {}; }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - //=== VK_EXT_rasterization_order_attachment_access === + //=== VK_EXT_opacity_micromap === - enum class PipelineColorBlendStateCreateFlagBits : VkPipelineColorBlendStateCreateFlags + enum class MicromapTypeEXT { - eRasterizationOrderAttachmentAccessEXT = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT, - eRasterizationOrderAttachmentAccessARM = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM + eOpacityMicromap = VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eDisplacementMicromapNV = VK_MICROMAP_TYPE_DISPLACEMENT_MICROMAP_NV +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ }; - enum class PipelineDepthStencilStateCreateFlagBits : VkPipelineDepthStencilStateCreateFlags + enum class BuildMicromapFlagBitsEXT : VkBuildMicromapFlagsEXT { - eRasterizationOrderAttachmentDepthAccessEXT = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT, - eRasterizationOrderAttachmentStencilAccessEXT = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT, - eRasterizationOrderAttachmentDepthAccessARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM, - eRasterizationOrderAttachmentStencilAccessARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM + ePreferFastTrace = VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT, + ePreferFastBuild = VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT, + eAllowCompaction = VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT }; - template - struct IndexTypeValue + using BuildMicromapFlagsEXT = Flags; + + template <> + struct FlagTraits { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BuildMicromapFlagsEXT allFlags = + BuildMicromapFlagBitsEXT::ePreferFastTrace | BuildMicromapFlagBitsEXT::ePreferFastBuild | BuildMicromapFlagBitsEXT::eAllowCompaction; }; - template <> - struct IndexTypeValue + enum class CopyMicromapModeEXT { - static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint16; + eClone = VK_COPY_MICROMAP_MODE_CLONE_EXT, + eSerialize = VK_COPY_MICROMAP_MODE_SERIALIZE_EXT, + eDeserialize = VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT, + eCompact = VK_COPY_MICROMAP_MODE_COMPACT_EXT }; - template <> - struct CppType + enum class MicromapCreateFlagBitsEXT : VkMicromapCreateFlagsEXT { - using Type = uint16_t; + eDeviceAddressCaptureReplay = VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT }; + using MicromapCreateFlagsEXT = Flags; + template <> - struct IndexTypeValue + struct FlagTraits { - static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint32; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MicromapCreateFlagsEXT allFlags = MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay; }; - template <> - struct CppType + enum class BuildMicromapModeEXT { - using Type = uint32_t; + eBuild = VK_BUILD_MICROMAP_MODE_BUILD_EXT }; - template <> - struct IndexTypeValue + enum class OpacityMicromapFormatEXT { - static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8EXT; + e2State = VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT, + e4State = VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT }; - template <> - struct CppType + enum class OpacityMicromapSpecialIndexEXT { - using Type = uint8_t; + eFullyTransparent = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT, + eFullyOpaque = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT, + eFullyUnknownTransparent = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT, + eFullyUnknownOpaque = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT, + eClusterGeometryDisableOpacityMicromapNV = VK_OPACITY_MICROMAP_SPECIAL_INDEX_CLUSTER_GEOMETRY_DISABLE_OPACITY_MICROMAP_NV }; - //================ - //=== BITMASKs === - //================ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === - //=== VK_VERSION_1_0 === + enum class DisplacementMicromapFormatNV + { + e64Triangles64Bytes = VK_DISPLACEMENT_MICROMAP_FORMAT_64_TRIANGLES_64_BYTES_NV, + e256Triangles128Bytes = VK_DISPLACEMENT_MICROMAP_FORMAT_256_TRIANGLES_128_BYTES_NV, + e1024Triangles128Bytes = VK_DISPLACEMENT_MICROMAP_FORMAT_1024_TRIANGLES_128_BYTES_NV + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - using FormatFeatureFlags = Flags; + //=== VK_ARM_scheduling_controls === - template <> - struct FlagTraits + enum class PhysicalDeviceSchedulingControlsFlagBitsARM : VkPhysicalDeviceSchedulingControlsFlagsARM { - enum : VkFlags - { - allFlags = - VkFlags( FormatFeatureFlagBits::eSampledImage ) | VkFlags( FormatFeatureFlagBits::eStorageImage ) | - VkFlags( FormatFeatureFlagBits::eStorageImageAtomic ) | VkFlags( FormatFeatureFlagBits::eUniformTexelBuffer ) | - VkFlags( FormatFeatureFlagBits::eStorageTexelBuffer ) | VkFlags( FormatFeatureFlagBits::eStorageTexelBufferAtomic ) | - VkFlags( FormatFeatureFlagBits::eVertexBuffer ) | VkFlags( FormatFeatureFlagBits::eColorAttachment ) | - VkFlags( FormatFeatureFlagBits::eColorAttachmentBlend ) | VkFlags( FormatFeatureFlagBits::eDepthStencilAttachment ) | - VkFlags( FormatFeatureFlagBits::eBlitSrc ) | VkFlags( FormatFeatureFlagBits::eBlitDst ) | VkFlags( FormatFeatureFlagBits::eSampledImageFilterLinear ) | - VkFlags( FormatFeatureFlagBits::eTransferSrc ) | VkFlags( FormatFeatureFlagBits::eTransferDst ) | - VkFlags( FormatFeatureFlagBits::eMidpointChromaSamples ) | VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) | - VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) | - VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) | - VkFlags( FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) | VkFlags( FormatFeatureFlagBits::eDisjoint ) | - VkFlags( FormatFeatureFlagBits::eCositedChromaSamples ) | VkFlags( FormatFeatureFlagBits::eSampledImageFilterMinmax ) -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - | VkFlags( FormatFeatureFlagBits::eVideoDecodeOutputKHR ) | VkFlags( FormatFeatureFlagBits::eVideoDecodeDpbKHR ) -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - | VkFlags( FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) | VkFlags( FormatFeatureFlagBits::eSampledImageFilterCubicEXT ) | - VkFlags( FormatFeatureFlagBits::eFragmentDensityMapEXT ) | VkFlags( FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR ) -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - | VkFlags( FormatFeatureFlagBits::eVideoEncodeInputKHR ) | VkFlags( FormatFeatureFlagBits::eVideoEncodeDpbKHR ) -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - }; + eShaderCoreCount = VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_SHADER_CORE_COUNT_ARM }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return FormatFeatureFlags( bit0 ) | bit1; - } + using PhysicalDeviceSchedulingControlsFlagsARM = Flags; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator&( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + template <> + struct FlagTraits { - return FormatFeatureFlags( bit0 ) & bit1; - } + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PhysicalDeviceSchedulingControlsFlagsARM allFlags = PhysicalDeviceSchedulingControlsFlagBitsARM::eShaderCoreCount; + }; + + //=== VK_NV_memory_decompression === - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator^( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class MemoryDecompressionMethodFlagBitsNV : VkMemoryDecompressionMethodFlagsNV { - return FormatFeatureFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator~( FormatFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( FormatFeatureFlags( bits ) ); - } + eGdeflate10 = VK_MEMORY_DECOMPRESSION_METHOD_GDEFLATE_1_0_BIT_NV + }; - using ImageCreateFlags = Flags; + using MemoryDecompressionMethodFlagsNV = Flags; template <> - struct FlagTraits + struct FlagTraits { - enum : VkFlags - { - allFlags = VkFlags( ImageCreateFlagBits::eSparseBinding ) | VkFlags( ImageCreateFlagBits::eSparseResidency ) | - VkFlags( ImageCreateFlagBits::eSparseAliased ) | VkFlags( ImageCreateFlagBits::eMutableFormat ) | - VkFlags( ImageCreateFlagBits::eCubeCompatible ) | VkFlags( ImageCreateFlagBits::eAlias ) | - VkFlags( ImageCreateFlagBits::eSplitInstanceBindRegions ) | VkFlags( ImageCreateFlagBits::e2DArrayCompatible ) | - VkFlags( ImageCreateFlagBits::eBlockTexelViewCompatible ) | VkFlags( ImageCreateFlagBits::eExtendedUsage ) | - VkFlags( ImageCreateFlagBits::eProtected ) | VkFlags( ImageCreateFlagBits::eDisjoint ) | VkFlags( ImageCreateFlagBits::eCornerSampledNV ) | - VkFlags( ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) | VkFlags( ImageCreateFlagBits::eSubsampledEXT ) | - VkFlags( ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT ) | VkFlags( ImageCreateFlagBits::e2DViewCompatibleEXT ) | - VkFlags( ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM ) - }; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryDecompressionMethodFlagsNV allFlags = MemoryDecompressionMethodFlagBitsNV::eGdeflate10; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageCreateFlags( bit0 ) | bit1; - } + //=== VK_NV_ray_tracing_linear_swept_spheres === - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator&( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class RayTracingLssIndexingModeNV { - return ImageCreateFlags( bit0 ) & bit1; - } + eList = VK_RAY_TRACING_LSS_INDEXING_MODE_LIST_NV, + eSuccessive = VK_RAY_TRACING_LSS_INDEXING_MODE_SUCCESSIVE_NV + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator^( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class RayTracingLssPrimitiveEndCapsModeNV { - return ImageCreateFlags( bit0 ) ^ bit1; - } + eNone = VK_RAY_TRACING_LSS_PRIMITIVE_END_CAPS_MODE_NONE_NV, + eChained = VK_RAY_TRACING_LSS_PRIMITIVE_END_CAPS_MODE_CHAINED_NV + }; + + //=== VK_EXT_subpass_merge_feedback === - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator~( ImageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + enum class SubpassMergeStatusEXT { - return ~( ImageCreateFlags( bits ) ); - } + eMerged = VK_SUBPASS_MERGE_STATUS_MERGED_EXT, + eDisallowed = VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT, + eNotMergedSideEffects = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT, + eNotMergedSamplesMismatch = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT, + eNotMergedViewsMismatch = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT, + eNotMergedAliasing = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT, + eNotMergedDependencies = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT, + eNotMergedIncompatibleInputAttachment = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT, + eNotMergedTooManyAttachments = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT, + eNotMergedInsufficientStorage = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT, + eNotMergedDepthStencilCount = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT, + eNotMergedResolveAttachmentReuse = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT, + eNotMergedSingleSubpass = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT, + eNotMergedUnspecified = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT + }; - using ImageUsageFlags = Flags; + //=== VK_LUNARG_direct_driver_loading === - template <> - struct FlagTraits + enum class DirectDriverLoadingModeLUNARG { - enum : VkFlags - { - allFlags = VkFlags( ImageUsageFlagBits::eTransferSrc ) | VkFlags( ImageUsageFlagBits::eTransferDst ) | VkFlags( ImageUsageFlagBits::eSampled ) | - VkFlags( ImageUsageFlagBits::eStorage ) | VkFlags( ImageUsageFlagBits::eColorAttachment ) | - VkFlags( ImageUsageFlagBits::eDepthStencilAttachment ) | VkFlags( ImageUsageFlagBits::eTransientAttachment ) | - VkFlags( ImageUsageFlagBits::eInputAttachment ) -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - | VkFlags( ImageUsageFlagBits::eVideoDecodeDstKHR ) | VkFlags( ImageUsageFlagBits::eVideoDecodeSrcKHR ) | - VkFlags( ImageUsageFlagBits::eVideoDecodeDpbKHR ) -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - | VkFlags( ImageUsageFlagBits::eFragmentDensityMapEXT ) | VkFlags( ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR ) -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - | VkFlags( ImageUsageFlagBits::eVideoEncodeDstKHR ) | VkFlags( ImageUsageFlagBits::eVideoEncodeSrcKHR ) | - VkFlags( ImageUsageFlagBits::eVideoEncodeDpbKHR ) -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - | VkFlags( ImageUsageFlagBits::eAttachmentFeedbackLoopEXT ) | VkFlags( ImageUsageFlagBits::eInvocationMaskHUAWEI ) | - VkFlags( ImageUsageFlagBits::eSampleWeightQCOM ) | VkFlags( ImageUsageFlagBits::eSampleBlockMatchQCOM ) - }; + eExclusive = VK_DIRECT_DRIVER_LOADING_MODE_EXCLUSIVE_LUNARG, + eInclusive = VK_DIRECT_DRIVER_LOADING_MODE_INCLUSIVE_LUNARG }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class DirectDriverLoadingFlagBitsLUNARG : VkDirectDriverLoadingFlagsLUNARG { - return ImageUsageFlags( bit0 ) | bit1; - } + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator&( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageUsageFlags( bit0 ) & bit1; - } + using DirectDriverLoadingFlagsLUNARG = Flags; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator^( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + template <> + struct FlagTraits { - return ImageUsageFlags( bit0 ) ^ bit1; - } + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR DirectDriverLoadingFlagsLUNARG allFlags = {}; + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator~( ImageUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT + //=== VK_NV_optical_flow === + + enum class OpticalFlowUsageFlagBitsNV : VkOpticalFlowUsageFlagsNV { - return ~( ImageUsageFlags( bits ) ); - } + eUnknown = VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV, + eInput = VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV, + eOutput = VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV, + eHint = VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV, + eCost = VK_OPTICAL_FLOW_USAGE_COST_BIT_NV, + eGlobalFlow = VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV + }; - using InstanceCreateFlags = Flags; + using OpticalFlowUsageFlagsNV = Flags; template <> - struct FlagTraits + struct FlagTraits { - enum : VkFlags - { - allFlags = VkFlags( InstanceCreateFlagBits::eEnumeratePortabilityKHR ) - }; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowUsageFlagsNV allFlags = OpticalFlowUsageFlagBitsNV::eUnknown | OpticalFlowUsageFlagBitsNV::eInput | + OpticalFlowUsageFlagBitsNV::eOutput | OpticalFlowUsageFlagBitsNV::eHint | + OpticalFlowUsageFlagBitsNV::eCost | OpticalFlowUsageFlagBitsNV::eGlobalFlow; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR InstanceCreateFlags operator|( InstanceCreateFlagBits bit0, InstanceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class OpticalFlowGridSizeFlagBitsNV : VkOpticalFlowGridSizeFlagsNV { - return InstanceCreateFlags( bit0 ) | bit1; - } + eUnknown = VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV, + e1X1 = VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV, + e2X2 = VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV, + e4X4 = VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV, + e8X8 = VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR InstanceCreateFlags operator&( InstanceCreateFlagBits bit0, InstanceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return InstanceCreateFlags( bit0 ) & bit1; - } + using OpticalFlowGridSizeFlagsNV = Flags; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR InstanceCreateFlags operator^( InstanceCreateFlagBits bit0, InstanceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + template <> + struct FlagTraits { - return InstanceCreateFlags( bit0 ) ^ bit1; - } + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowGridSizeFlagsNV allFlags = OpticalFlowGridSizeFlagBitsNV::eUnknown | OpticalFlowGridSizeFlagBitsNV::e1X1 | + OpticalFlowGridSizeFlagBitsNV::e2X2 | OpticalFlowGridSizeFlagBitsNV::e4X4 | + OpticalFlowGridSizeFlagBitsNV::e8X8; + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR InstanceCreateFlags operator~( InstanceCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + enum class OpticalFlowPerformanceLevelNV { - return ~( InstanceCreateFlags( bits ) ); - } - - using MemoryHeapFlags = Flags; + eUnknown = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV, + eSlow = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV, + eMedium = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV, + eFast = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV + }; - template <> - struct FlagTraits + enum class OpticalFlowSessionBindingPointNV { - enum : VkFlags - { - allFlags = VkFlags( MemoryHeapFlagBits::eDeviceLocal ) | VkFlags( MemoryHeapFlagBits::eMultiInstance ) - }; + eUnknown = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV, + eInput = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV, + eReference = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV, + eHint = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV, + eFlowVector = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV, + eBackwardFlowVector = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV, + eCost = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV, + eBackwardCost = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV, + eGlobalFlow = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class OpticalFlowSessionCreateFlagBitsNV : VkOpticalFlowSessionCreateFlagsNV { - return MemoryHeapFlags( bit0 ) | bit1; - } + eEnableHint = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV, + eEnableCost = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV, + eEnableGlobalFlow = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV, + eAllowRegions = VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV, + eBothDirections = VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator&( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return MemoryHeapFlags( bit0 ) & bit1; - } + using OpticalFlowSessionCreateFlagsNV = Flags; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator^( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + template <> + struct FlagTraits { - return MemoryHeapFlags( bit0 ) ^ bit1; - } + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowSessionCreateFlagsNV allFlags = + OpticalFlowSessionCreateFlagBitsNV::eEnableHint | OpticalFlowSessionCreateFlagBitsNV::eEnableCost | + OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow | OpticalFlowSessionCreateFlagBitsNV::eAllowRegions | + OpticalFlowSessionCreateFlagBitsNV::eBothDirections; + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator~( MemoryHeapFlagBits bits ) VULKAN_HPP_NOEXCEPT + enum class OpticalFlowExecuteFlagBitsNV : VkOpticalFlowExecuteFlagsNV { - return ~( MemoryHeapFlags( bits ) ); - } + eDisableTemporalHints = VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV + }; - using MemoryPropertyFlags = Flags; + using OpticalFlowExecuteFlagsNV = Flags; template <> - struct FlagTraits + struct FlagTraits { - enum : VkFlags - { - allFlags = VkFlags( MemoryPropertyFlagBits::eDeviceLocal ) | VkFlags( MemoryPropertyFlagBits::eHostVisible ) | - VkFlags( MemoryPropertyFlagBits::eHostCoherent ) | VkFlags( MemoryPropertyFlagBits::eHostCached ) | - VkFlags( MemoryPropertyFlagBits::eLazilyAllocated ) | VkFlags( MemoryPropertyFlagBits::eProtected ) | - VkFlags( MemoryPropertyFlagBits::eDeviceCoherentAMD ) | VkFlags( MemoryPropertyFlagBits::eDeviceUncachedAMD ) | - VkFlags( MemoryPropertyFlagBits::eRdmaCapableNV ) - }; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowExecuteFlagsNV allFlags = OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return MemoryPropertyFlags( bit0 ) | bit1; - } + //=== VK_AMD_anti_lag === - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator&( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class AntiLagModeAMD { - return MemoryPropertyFlags( bit0 ) & bit1; - } + eDriverControl = VK_ANTI_LAG_MODE_DRIVER_CONTROL_AMD, + eOn = VK_ANTI_LAG_MODE_ON_AMD, + eOff = VK_ANTI_LAG_MODE_OFF_AMD + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator^( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class AntiLagStageAMD { - return MemoryPropertyFlags( bit0 ) ^ bit1; - } + eInput = VK_ANTI_LAG_STAGE_INPUT_AMD, + ePresent = VK_ANTI_LAG_STAGE_PRESENT_AMD + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits ) VULKAN_HPP_NOEXCEPT + //=== VK_EXT_shader_object === + + enum class ShaderCreateFlagBitsEXT : VkShaderCreateFlagsEXT { - return ~( MemoryPropertyFlags( bits ) ); - } + eLinkStage = VK_SHADER_CREATE_LINK_STAGE_BIT_EXT, + eAllowVaryingSubgroupSize = VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT, + eRequireFullSubgroups = VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT, + eNoTaskShader = VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT, + eDispatchBase = VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT, + eFragmentShadingRateAttachment = VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT, + eFragmentDensityMapAttachment = VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eIndirectBindable = VK_SHADER_CREATE_INDIRECT_BINDABLE_BIT_EXT + }; - using QueueFlags = Flags; + using ShaderCreateFlagsEXT = Flags; template <> - struct FlagTraits + struct FlagTraits { - enum : VkFlags - { - allFlags = VkFlags( QueueFlagBits::eGraphics ) | VkFlags( QueueFlagBits::eCompute ) | VkFlags( QueueFlagBits::eTransfer ) | - VkFlags( QueueFlagBits::eSparseBinding ) | VkFlags( QueueFlagBits::eProtected ) -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - | VkFlags( QueueFlagBits::eVideoDecodeKHR ) | VkFlags( QueueFlagBits::eVideoEncodeKHR ) -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - }; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderCreateFlagsEXT allFlags = + ShaderCreateFlagBitsEXT::eLinkStage | ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize | ShaderCreateFlagBitsEXT::eRequireFullSubgroups | + ShaderCreateFlagBitsEXT::eNoTaskShader | ShaderCreateFlagBitsEXT::eDispatchBase | ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment | + ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment | ShaderCreateFlagBitsEXT::eIndirectBindable; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class ShaderCodeTypeEXT { - return QueueFlags( bit0 ) | bit1; - } + eBinary = VK_SHADER_CODE_TYPE_BINARY_EXT, + eSpirv = VK_SHADER_CODE_TYPE_SPIRV_EXT + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator&( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + //=== VK_NV_ray_tracing_invocation_reorder === + + enum class RayTracingInvocationReorderModeNV { - return QueueFlags( bit0 ) & bit1; - } + eNone = VK_RAY_TRACING_INVOCATION_REORDER_MODE_NONE_NV, + eReorder = VK_RAY_TRACING_INVOCATION_REORDER_MODE_REORDER_NV + }; + + //=== VK_NV_cooperative_vector === - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator^( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class CooperativeVectorMatrixLayoutNV { - return QueueFlags( bit0 ) ^ bit1; - } + eRowMajor = VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_ROW_MAJOR_NV, + eColumnMajor = VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_COLUMN_MAJOR_NV, + eInferencingOptimal = VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_INFERENCING_OPTIMAL_NV, + eTrainingOptimal = VK_COOPERATIVE_VECTOR_MATRIX_LAYOUT_TRAINING_OPTIMAL_NV + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator~( QueueFlagBits bits ) VULKAN_HPP_NOEXCEPT + enum class ComponentTypeKHR { - return ~( QueueFlags( bits ) ); - } + eFloat16 = VK_COMPONENT_TYPE_FLOAT16_KHR, + eFloat32 = VK_COMPONENT_TYPE_FLOAT32_KHR, + eFloat64 = VK_COMPONENT_TYPE_FLOAT64_KHR, + eSint8 = VK_COMPONENT_TYPE_SINT8_KHR, + eSint16 = VK_COMPONENT_TYPE_SINT16_KHR, + eSint32 = VK_COMPONENT_TYPE_SINT32_KHR, + eSint64 = VK_COMPONENT_TYPE_SINT64_KHR, + eUint8 = VK_COMPONENT_TYPE_UINT8_KHR, + eUint16 = VK_COMPONENT_TYPE_UINT16_KHR, + eUint32 = VK_COMPONENT_TYPE_UINT32_KHR, + eUint64 = VK_COMPONENT_TYPE_UINT64_KHR, + eSint8PackedNV = VK_COMPONENT_TYPE_SINT8_PACKED_NV, + eUint8PackedNV = VK_COMPONENT_TYPE_UINT8_PACKED_NV, + eFloatE4M3NV = VK_COMPONENT_TYPE_FLOAT_E4M3_NV, + eFloatE5M2NV = VK_COMPONENT_TYPE_FLOAT_E5M2_NV + }; + using ComponentTypeNV = ComponentTypeKHR; - using SampleCountFlags = Flags; + //=== VK_EXT_layer_settings === - template <> - struct FlagTraits + enum class LayerSettingTypeEXT { - enum : VkFlags - { - allFlags = VkFlags( SampleCountFlagBits::e1 ) | VkFlags( SampleCountFlagBits::e2 ) | VkFlags( SampleCountFlagBits::e4 ) | - VkFlags( SampleCountFlagBits::e8 ) | VkFlags( SampleCountFlagBits::e16 ) | VkFlags( SampleCountFlagBits::e32 ) | - VkFlags( SampleCountFlagBits::e64 ) - }; + eBool32 = VK_LAYER_SETTING_TYPE_BOOL32_EXT, + eInt32 = VK_LAYER_SETTING_TYPE_INT32_EXT, + eInt64 = VK_LAYER_SETTING_TYPE_INT64_EXT, + eUint32 = VK_LAYER_SETTING_TYPE_UINT32_EXT, + eUint64 = VK_LAYER_SETTING_TYPE_UINT64_EXT, + eFloat32 = VK_LAYER_SETTING_TYPE_FLOAT32_EXT, + eFloat64 = VK_LAYER_SETTING_TYPE_FLOAT64_EXT, + eString = VK_LAYER_SETTING_TYPE_STRING_EXT }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SampleCountFlags( bit0 ) | bit1; - } + //================================= + //=== Layer Setting Type Traits === + //================================= - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator&( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + template <> + struct CppType { - return SampleCountFlags( bit0 ) & bit1; - } + using Type = vk::Bool32; + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator^( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + template <> + struct CppType { - return SampleCountFlags( bit0 ) ^ bit1; - } + using Type = int32_t; + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator~( SampleCountFlagBits bits ) VULKAN_HPP_NOEXCEPT + template <> + struct CppType { - return ~( SampleCountFlags( bits ) ); - } - - using DeviceCreateFlags = Flags; - - using DeviceQueueCreateFlags = Flags; + using Type = int64_t; + }; template <> - struct FlagTraits + struct CppType { - enum : VkFlags - { - allFlags = VkFlags( DeviceQueueCreateFlagBits::eProtected ) - }; + using Type = uint32_t; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + template <> + struct CppType { - return DeviceQueueCreateFlags( bit0 ) | bit1; - } + using Type = uint64_t; + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator&( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + template <> + struct CppType { - return DeviceQueueCreateFlags( bit0 ) & bit1; - } + using Type = float; + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator^( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + template <> + struct CppType { - return DeviceQueueCreateFlags( bit0 ) ^ bit1; - } + using Type = double; + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + template <> + struct CppType { - return ~( DeviceQueueCreateFlags( bits ) ); - } - - using PipelineStageFlags = Flags; + using Type = char *; + }; - template <> - struct FlagTraits + template + bool isSameType( LayerSettingTypeEXT layerSettingType ) { - enum : VkFlags + switch ( layerSettingType ) { - allFlags = VkFlags( PipelineStageFlagBits::eTopOfPipe ) | VkFlags( PipelineStageFlagBits::eDrawIndirect ) | - VkFlags( PipelineStageFlagBits::eVertexInput ) | VkFlags( PipelineStageFlagBits::eVertexShader ) | - VkFlags( PipelineStageFlagBits::eTessellationControlShader ) | VkFlags( PipelineStageFlagBits::eTessellationEvaluationShader ) | - VkFlags( PipelineStageFlagBits::eGeometryShader ) | VkFlags( PipelineStageFlagBits::eFragmentShader ) | - VkFlags( PipelineStageFlagBits::eEarlyFragmentTests ) | VkFlags( PipelineStageFlagBits::eLateFragmentTests ) | - VkFlags( PipelineStageFlagBits::eColorAttachmentOutput ) | VkFlags( PipelineStageFlagBits::eComputeShader ) | - VkFlags( PipelineStageFlagBits::eTransfer ) | VkFlags( PipelineStageFlagBits::eBottomOfPipe ) | VkFlags( PipelineStageFlagBits::eHost ) | - VkFlags( PipelineStageFlagBits::eAllGraphics ) | VkFlags( PipelineStageFlagBits::eAllCommands ) | VkFlags( PipelineStageFlagBits::eNone ) | - VkFlags( PipelineStageFlagBits::eTransformFeedbackEXT ) | VkFlags( PipelineStageFlagBits::eConditionalRenderingEXT ) | - VkFlags( PipelineStageFlagBits::eAccelerationStructureBuildKHR ) | VkFlags( PipelineStageFlagBits::eRayTracingShaderKHR ) | - VkFlags( PipelineStageFlagBits::eFragmentDensityProcessEXT ) | VkFlags( PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR ) | - VkFlags( PipelineStageFlagBits::eCommandPreprocessNV ) | VkFlags( PipelineStageFlagBits::eTaskShaderEXT ) | - VkFlags( PipelineStageFlagBits::eMeshShaderEXT ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineStageFlags( bit0 ) | bit1; + case LayerSettingTypeEXT::eBool32: return std::is_same::value; + case LayerSettingTypeEXT::eInt32: return std::is_same::value; + case LayerSettingTypeEXT::eInt64: return std::is_same::value; + case LayerSettingTypeEXT::eUint32: return std::is_same::value; + case LayerSettingTypeEXT::eUint64: return std::is_same::value; + case LayerSettingTypeEXT::eFloat32: return std::is_same::value; + case LayerSettingTypeEXT::eFloat64: return std::is_same::value; + case LayerSettingTypeEXT::eString: return std::is_same::value; + default: return false; + } } - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator&( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineStageFlags( bit0 ) & bit1; - } + //=== VK_NV_low_latency2 === - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator^( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class LatencyMarkerNV { - return PipelineStageFlags( bit0 ) ^ bit1; - } + eSimulationStart = VK_LATENCY_MARKER_SIMULATION_START_NV, + eSimulationEnd = VK_LATENCY_MARKER_SIMULATION_END_NV, + eRendersubmitStart = VK_LATENCY_MARKER_RENDERSUBMIT_START_NV, + eRendersubmitEnd = VK_LATENCY_MARKER_RENDERSUBMIT_END_NV, + ePresentStart = VK_LATENCY_MARKER_PRESENT_START_NV, + ePresentEnd = VK_LATENCY_MARKER_PRESENT_END_NV, + eInputSample = VK_LATENCY_MARKER_INPUT_SAMPLE_NV, + eTriggerFlash = VK_LATENCY_MARKER_TRIGGER_FLASH_NV, + eOutOfBandRendersubmitStart = VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_START_NV, + eOutOfBandRendersubmitEnd = VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_END_NV, + eOutOfBandPresentStart = VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_START_NV, + eOutOfBandPresentEnd = VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_END_NV + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator~( PipelineStageFlagBits bits ) VULKAN_HPP_NOEXCEPT + enum class OutOfBandQueueTypeNV { - return ~( PipelineStageFlags( bits ) ); - } + eRender = VK_OUT_OF_BAND_QUEUE_TYPE_RENDER_NV, + ePresent = VK_OUT_OF_BAND_QUEUE_TYPE_PRESENT_NV + }; - using MemoryMapFlags = Flags; + //=== VK_KHR_cooperative_matrix === - using ImageAspectFlags = Flags; + enum class ScopeKHR + { + eDevice = VK_SCOPE_DEVICE_KHR, + eWorkgroup = VK_SCOPE_WORKGROUP_KHR, + eSubgroup = VK_SCOPE_SUBGROUP_KHR, + eQueueFamily = VK_SCOPE_QUEUE_FAMILY_KHR + }; + using ScopeNV = ScopeKHR; - template <> - struct FlagTraits + //=== VK_KHR_video_encode_av1 === + + enum class VideoEncodeAV1PredictionModeKHR { - enum : VkFlags - { - allFlags = VkFlags( ImageAspectFlagBits::eColor ) | VkFlags( ImageAspectFlagBits::eDepth ) | VkFlags( ImageAspectFlagBits::eStencil ) | - VkFlags( ImageAspectFlagBits::eMetadata ) | VkFlags( ImageAspectFlagBits::ePlane0 ) | VkFlags( ImageAspectFlagBits::ePlane1 ) | - VkFlags( ImageAspectFlagBits::ePlane2 ) | VkFlags( ImageAspectFlagBits::eNone ) | VkFlags( ImageAspectFlagBits::eMemoryPlane0EXT ) | - VkFlags( ImageAspectFlagBits::eMemoryPlane1EXT ) | VkFlags( ImageAspectFlagBits::eMemoryPlane2EXT ) | - VkFlags( ImageAspectFlagBits::eMemoryPlane3EXT ) - }; + eIntraOnly = VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_INTRA_ONLY_KHR, + eSingleReference = VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_SINGLE_REFERENCE_KHR, + eUnidirectionalCompound = VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_UNIDIRECTIONAL_COMPOUND_KHR, + eBidirectionalCompound = VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_BIDIRECTIONAL_COMPOUND_KHR }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class VideoEncodeAV1RateControlGroupKHR { - return ImageAspectFlags( bit0 ) | bit1; - } + eIntra = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_INTRA_KHR, + ePredictive = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_PREDICTIVE_KHR, + eBipredictive = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_BIPREDICTIVE_KHR + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator&( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class VideoEncodeAV1CapabilityFlagBitsKHR : VkVideoEncodeAV1CapabilityFlagsKHR { - return ImageAspectFlags( bit0 ) & bit1; - } + ePerRateControlGroupMinMaxQIndex = VK_VIDEO_ENCODE_AV1_CAPABILITY_PER_RATE_CONTROL_GROUP_MIN_MAX_Q_INDEX_BIT_KHR, + eGenerateObuExtensionHeader = VK_VIDEO_ENCODE_AV1_CAPABILITY_GENERATE_OBU_EXTENSION_HEADER_BIT_KHR, + ePrimaryReferenceCdfOnly = VK_VIDEO_ENCODE_AV1_CAPABILITY_PRIMARY_REFERENCE_CDF_ONLY_BIT_KHR, + eFrameSizeOverride = VK_VIDEO_ENCODE_AV1_CAPABILITY_FRAME_SIZE_OVERRIDE_BIT_KHR, + eMotionVectorScaling = VK_VIDEO_ENCODE_AV1_CAPABILITY_MOTION_VECTOR_SCALING_BIT_KHR + }; + + using VideoEncodeAV1CapabilityFlagsKHR = Flags; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator^( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + template <> + struct FlagTraits { - return ImageAspectFlags( bit0 ) ^ bit1; - } + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeAV1CapabilityFlagsKHR allFlags = + VideoEncodeAV1CapabilityFlagBitsKHR::ePerRateControlGroupMinMaxQIndex | VideoEncodeAV1CapabilityFlagBitsKHR::eGenerateObuExtensionHeader | + VideoEncodeAV1CapabilityFlagBitsKHR::ePrimaryReferenceCdfOnly | VideoEncodeAV1CapabilityFlagBitsKHR::eFrameSizeOverride | + VideoEncodeAV1CapabilityFlagBitsKHR::eMotionVectorScaling; + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator~( ImageAspectFlagBits bits ) VULKAN_HPP_NOEXCEPT + enum class VideoEncodeAV1StdFlagBitsKHR : VkVideoEncodeAV1StdFlagsKHR { - return ~( ImageAspectFlags( bits ) ); - } + eUniformTileSpacingFlagSet = VK_VIDEO_ENCODE_AV1_STD_UNIFORM_TILE_SPACING_FLAG_SET_BIT_KHR, + eSkipModePresentUnset = VK_VIDEO_ENCODE_AV1_STD_SKIP_MODE_PRESENT_UNSET_BIT_KHR, + ePrimaryRefFrame = VK_VIDEO_ENCODE_AV1_STD_PRIMARY_REF_FRAME_BIT_KHR, + eDeltaQ = VK_VIDEO_ENCODE_AV1_STD_DELTA_Q_BIT_KHR + }; - using SparseImageFormatFlags = Flags; + using VideoEncodeAV1StdFlagsKHR = Flags; template <> - struct FlagTraits + struct FlagTraits { - enum : VkFlags - { - allFlags = VkFlags( SparseImageFormatFlagBits::eSingleMiptail ) | VkFlags( SparseImageFormatFlagBits::eAlignedMipSize ) | - VkFlags( SparseImageFormatFlagBits::eNonstandardBlockSize ) - }; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeAV1StdFlagsKHR allFlags = + VideoEncodeAV1StdFlagBitsKHR::eUniformTileSpacingFlagSet | VideoEncodeAV1StdFlagBitsKHR::eSkipModePresentUnset | + VideoEncodeAV1StdFlagBitsKHR::ePrimaryRefFrame | VideoEncodeAV1StdFlagBitsKHR::eDeltaQ; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class VideoEncodeAV1SuperblockSizeFlagBitsKHR : VkVideoEncodeAV1SuperblockSizeFlagsKHR { - return SparseImageFormatFlags( bit0 ) | bit1; - } + e64 = VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_64_BIT_KHR, + e128 = VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_128_BIT_KHR + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator&( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SparseImageFormatFlags( bit0 ) & bit1; - } + using VideoEncodeAV1SuperblockSizeFlagsKHR = Flags; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator^( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + template <> + struct FlagTraits { - return SparseImageFormatFlags( bit0 ) ^ bit1; - } + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeAV1SuperblockSizeFlagsKHR allFlags = + VideoEncodeAV1SuperblockSizeFlagBitsKHR::e64 | VideoEncodeAV1SuperblockSizeFlagBitsKHR::e128; + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits ) VULKAN_HPP_NOEXCEPT + enum class VideoEncodeAV1RateControlFlagBitsKHR : VkVideoEncodeAV1RateControlFlagsKHR { - return ~( SparseImageFormatFlags( bits ) ); - } + eRegularGop = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REGULAR_GOP_BIT_KHR, + eTemporalLayerPatternDyadic = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR, + eReferencePatternFlat = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR, + eReferencePatternDyadic = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR + }; - using SparseMemoryBindFlags = Flags; + using VideoEncodeAV1RateControlFlagsKHR = Flags; template <> - struct FlagTraits + struct FlagTraits { - enum : VkFlags - { - allFlags = VkFlags( SparseMemoryBindFlagBits::eMetadata ) - }; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeAV1RateControlFlagsKHR allFlags = + VideoEncodeAV1RateControlFlagBitsKHR::eRegularGop | VideoEncodeAV1RateControlFlagBitsKHR::eTemporalLayerPatternDyadic | + VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternFlat | VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternDyadic; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SparseMemoryBindFlags( bit0 ) | bit1; - } + //=== VK_QCOM_image_processing2 === - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator&( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class BlockMatchWindowCompareModeQCOM { - return SparseMemoryBindFlags( bit0 ) & bit1; - } + eMin = VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MIN_QCOM, + eMax = VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MAX_QCOM + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator^( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + //=== VK_QCOM_filter_cubic_weights === + + enum class CubicFilterWeightsQCOM { - return SparseMemoryBindFlags( bit0 ) ^ bit1; - } + eCatmullRom = VK_CUBIC_FILTER_WEIGHTS_CATMULL_ROM_QCOM, + eZeroTangentCardinal = VK_CUBIC_FILTER_WEIGHTS_ZERO_TANGENT_CARDINAL_QCOM, + eBSpline = VK_CUBIC_FILTER_WEIGHTS_B_SPLINE_QCOM, + eMitchellNetravali = VK_CUBIC_FILTER_WEIGHTS_MITCHELL_NETRAVALI_QCOM + }; + + //=== VK_MSFT_layered_driver === - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits ) VULKAN_HPP_NOEXCEPT + enum class LayeredDriverUnderlyingApiMSFT { - return ~( SparseMemoryBindFlags( bits ) ); - } + eNone = VK_LAYERED_DRIVER_UNDERLYING_API_NONE_MSFT, + eD3D12 = VK_LAYERED_DRIVER_UNDERLYING_API_D3D12_MSFT + }; - using FenceCreateFlags = Flags; + //=== VK_KHR_calibrated_timestamps === - template <> - struct FlagTraits + enum class TimeDomainKHR { - enum : VkFlags - { - allFlags = VkFlags( FenceCreateFlagBits::eSignaled ) - }; + eDevice = VK_TIME_DOMAIN_DEVICE_KHR, + eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR, + eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR, + eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR }; + using TimeDomainEXT = TimeDomainKHR; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + //=== VK_NV_display_stereo === + + enum class DisplaySurfaceStereoTypeNV { - return FenceCreateFlags( bit0 ) | bit1; - } + eNone = VK_DISPLAY_SURFACE_STEREO_TYPE_NONE_NV, + eOnboardDin = VK_DISPLAY_SURFACE_STEREO_TYPE_ONBOARD_DIN_NV, + eHdmi3D = VK_DISPLAY_SURFACE_STEREO_TYPE_HDMI_3D_NV, + eInbandDisplayport = VK_DISPLAY_SURFACE_STEREO_TYPE_INBAND_DISPLAYPORT_NV + }; + + //=== VK_KHR_maintenance7 === - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator&( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class PhysicalDeviceLayeredApiKHR { - return FenceCreateFlags( bit0 ) & bit1; - } + eVulkan = VK_PHYSICAL_DEVICE_LAYERED_API_VULKAN_KHR, + eD3D12 = VK_PHYSICAL_DEVICE_LAYERED_API_D3D12_KHR, + eMetal = VK_PHYSICAL_DEVICE_LAYERED_API_METAL_KHR, + eOpengl = VK_PHYSICAL_DEVICE_LAYERED_API_OPENGL_KHR, + eOpengles = VK_PHYSICAL_DEVICE_LAYERED_API_OPENGLES_KHR + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator^( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + //=== VK_NV_cluster_acceleration_structure === + + enum class ClusterAccelerationStructureClusterFlagBitsNV : VkClusterAccelerationStructureClusterFlagsNV { - return FenceCreateFlags( bit0 ) ^ bit1; - } + eAllowDisableOpacityMicromaps = VK_CLUSTER_ACCELERATION_STRUCTURE_CLUSTER_ALLOW_DISABLE_OPACITY_MICROMAPS_NV + }; + + using ClusterAccelerationStructureClusterFlagsNV = Flags; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator~( FenceCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + template <> + struct FlagTraits { - return ~( FenceCreateFlags( bits ) ); - } + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ClusterAccelerationStructureClusterFlagsNV allFlags = + ClusterAccelerationStructureClusterFlagBitsNV::eAllowDisableOpacityMicromaps; + }; - using SemaphoreCreateFlags = Flags; + enum class ClusterAccelerationStructureGeometryFlagBitsNV : VkClusterAccelerationStructureGeometryFlagsNV + { + eCullDisable = VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_CULL_DISABLE_BIT_NV, + eNoDuplicateAnyhitInvocation = VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_NO_DUPLICATE_ANYHIT_INVOCATION_BIT_NV, + eOpaque = VK_CLUSTER_ACCELERATION_STRUCTURE_GEOMETRY_OPAQUE_BIT_NV + }; - using EventCreateFlags = Flags; + using ClusterAccelerationStructureGeometryFlagsNV = Flags; template <> - struct FlagTraits + struct FlagTraits { - enum : VkFlags - { - allFlags = VkFlags( EventCreateFlagBits::eDeviceOnly ) - }; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ClusterAccelerationStructureGeometryFlagsNV allFlags = + ClusterAccelerationStructureGeometryFlagBitsNV::eCullDisable | ClusterAccelerationStructureGeometryFlagBitsNV::eNoDuplicateAnyhitInvocation | + ClusterAccelerationStructureGeometryFlagBitsNV::eOpaque; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator|( EventCreateFlagBits bit0, EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class ClusterAccelerationStructureAddressResolutionFlagBitsNV : VkClusterAccelerationStructureAddressResolutionFlagsNV { - return EventCreateFlags( bit0 ) | bit1; - } + eIndirectedDstImplicitData = VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_IMPLICIT_DATA_BIT_NV, + eIndirectedScratchData = VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SCRATCH_DATA_BIT_NV, + eIndirectedDstAddressArray = VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_ADDRESS_ARRAY_BIT_NV, + eIndirectedDstSizesArray = VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_DST_SIZES_ARRAY_BIT_NV, + eIndirectedSrcInfosArray = VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SRC_INFOS_ARRAY_BIT_NV, + eIndirectedSrcInfosCount = VK_CLUSTER_ACCELERATION_STRUCTURE_ADDRESS_RESOLUTION_INDIRECTED_SRC_INFOS_COUNT_BIT_NV + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator&( EventCreateFlagBits bit0, EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return EventCreateFlags( bit0 ) & bit1; - } + using ClusterAccelerationStructureAddressResolutionFlagsNV = Flags; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator^( EventCreateFlagBits bit0, EventCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + template <> + struct FlagTraits { - return EventCreateFlags( bit0 ) ^ bit1; - } + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ClusterAccelerationStructureAddressResolutionFlagsNV allFlags = + ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstImplicitData | + ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedScratchData | + ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstAddressArray | + ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstSizesArray | + ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedSrcInfosArray | + ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedSrcInfosCount; + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR EventCreateFlags operator~( EventCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + enum class ClusterAccelerationStructureIndexFormatFlagBitsNV : VkClusterAccelerationStructureIndexFormatFlagsNV { - return ~( EventCreateFlags( bits ) ); - } + e8 = VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_8BIT_NV, + e16 = VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_16BIT_NV, + e32 = VK_CLUSTER_ACCELERATION_STRUCTURE_INDEX_FORMAT_32BIT_NV + }; - using QueryPipelineStatisticFlags = Flags; + using ClusterAccelerationStructureIndexFormatFlagsNV = Flags; template <> - struct FlagTraits + struct FlagTraits { - enum : VkFlags - { - allFlags = VkFlags( QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) | VkFlags( QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) | - VkFlags( QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) | VkFlags( QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) | - VkFlags( QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) | VkFlags( QueryPipelineStatisticFlagBits::eClippingInvocations ) | - VkFlags( QueryPipelineStatisticFlagBits::eClippingPrimitives ) | VkFlags( QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) | - VkFlags( QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) | - VkFlags( QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) | - VkFlags( QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) | VkFlags( QueryPipelineStatisticFlagBits::eTaskShaderInvocationsEXT ) | - VkFlags( QueryPipelineStatisticFlagBits::eMeshShaderInvocationsEXT ) - }; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR ClusterAccelerationStructureIndexFormatFlagsNV allFlags = ClusterAccelerationStructureIndexFormatFlagBitsNV::e8 | + ClusterAccelerationStructureIndexFormatFlagBitsNV::e16 | + ClusterAccelerationStructureIndexFormatFlagBitsNV::e32; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, - QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class ClusterAccelerationStructureTypeNV { - return QueryPipelineStatisticFlags( bit0 ) | bit1; - } + eClustersBottomLevel = VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_CLUSTERS_BOTTOM_LEVEL_NV, + eTriangleCluster = VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_TRIANGLE_CLUSTER_NV, + eTriangleClusterTemplate = VK_CLUSTER_ACCELERATION_STRUCTURE_TYPE_TRIANGLE_CLUSTER_TEMPLATE_NV + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator&( QueryPipelineStatisticFlagBits bit0, - QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class ClusterAccelerationStructureOpTypeNV { - return QueryPipelineStatisticFlags( bit0 ) & bit1; - } + eMoveObjects = VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_MOVE_OBJECTS_NV, + eBuildClustersBottomLevel = VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_CLUSTERS_BOTTOM_LEVEL_NV, + eBuildTriangleCluster = VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_TRIANGLE_CLUSTER_NV, + eBuildTriangleClusterTemplate = VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_BUILD_TRIANGLE_CLUSTER_TEMPLATE_NV, + eInstantiateTriangleCluster = VK_CLUSTER_ACCELERATION_STRUCTURE_OP_TYPE_INSTANTIATE_TRIANGLE_CLUSTER_NV + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator^( QueryPipelineStatisticFlagBits bit0, - QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class ClusterAccelerationStructureOpModeNV { - return QueryPipelineStatisticFlags( bit0 ) ^ bit1; - } + eImplicitDestinations = VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_IMPLICIT_DESTINATIONS_NV, + eExplicitDestinations = VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_EXPLICIT_DESTINATIONS_NV, + eComputeSizes = VK_CLUSTER_ACCELERATION_STRUCTURE_OP_MODE_COMPUTE_SIZES_NV + }; + + //=== VK_NV_partitioned_acceleration_structure === - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits ) VULKAN_HPP_NOEXCEPT + enum class PartitionedAccelerationStructureOpTypeNV { - return ~( QueryPipelineStatisticFlags( bits ) ); - } + eWriteInstance = VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_WRITE_INSTANCE_NV, + eUpdateInstance = VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_UPDATE_INSTANCE_NV, + eWritePartitionTranslation = VK_PARTITIONED_ACCELERATION_STRUCTURE_OP_TYPE_WRITE_PARTITION_TRANSLATION_NV + }; - using QueryPoolCreateFlags = Flags; + enum class PartitionedAccelerationStructureInstanceFlagBitsNV : VkPartitionedAccelerationStructureInstanceFlagsNV + { + eFlagTriangleFacingCullDisable = VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_TRIANGLE_FACING_CULL_DISABLE_BIT_NV, + eFlagTriangleFlipFacing = VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_TRIANGLE_FLIP_FACING_BIT_NV, + eFlagForceOpaque = VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_FORCE_OPAQUE_BIT_NV, + eFlagForceNoOpaque = VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_FORCE_NO_OPAQUE_BIT_NV, + eFlagEnableExplicitBoundingBox = VK_PARTITIONED_ACCELERATION_STRUCTURE_INSTANCE_FLAG_ENABLE_EXPLICIT_BOUNDING_BOX_NV + }; - using QueryResultFlags = Flags; + using PartitionedAccelerationStructureInstanceFlagsNV = Flags; template <> - struct FlagTraits + struct FlagTraits { - enum : VkFlags - { - allFlags = VkFlags( QueryResultFlagBits::e64 ) | VkFlags( QueryResultFlagBits::eWait ) | VkFlags( QueryResultFlagBits::eWithAvailability ) | - VkFlags( QueryResultFlagBits::ePartial ) -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - | VkFlags( QueryResultFlagBits::eWithStatusKHR ) -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - }; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PartitionedAccelerationStructureInstanceFlagsNV allFlags = + PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagTriangleFacingCullDisable | + PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagTriangleFlipFacing | PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagForceOpaque | + PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagForceNoOpaque | + PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagEnableExplicitBoundingBox; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return QueryResultFlags( bit0 ) | bit1; - } + //=== VK_EXT_device_generated_commands === - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator&( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class IndirectCommandsTokenTypeEXT { - return QueryResultFlags( bit0 ) & bit1; - } + eExecutionSet = VK_INDIRECT_COMMANDS_TOKEN_TYPE_EXECUTION_SET_EXT, + ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_EXT, + eSequenceIndex = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SEQUENCE_INDEX_EXT, + eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_EXT, + eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_EXT, + eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_EXT, + eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_EXT, + eDrawIndexedCount = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_COUNT_EXT, + eDrawCount = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_COUNT_EXT, + eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_EXT, + eDrawMeshTasksNV = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV_EXT, + eDrawMeshTasksCountNV = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_NV_EXT, + eDrawMeshTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_EXT, + eDrawMeshTasksCount = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_EXT, + eTraceRays2 = VK_INDIRECT_COMMANDS_TOKEN_TYPE_TRACE_RAYS2_EXT + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator^( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class IndirectExecutionSetInfoTypeEXT { - return QueryResultFlags( bit0 ) ^ bit1; - } + ePipelines = VK_INDIRECT_EXECUTION_SET_INFO_TYPE_PIPELINES_EXT, + eShaderObjects = VK_INDIRECT_EXECUTION_SET_INFO_TYPE_SHADER_OBJECTS_EXT + }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator~( QueryResultFlagBits bits ) VULKAN_HPP_NOEXCEPT + enum class IndirectCommandsLayoutUsageFlagBitsEXT : VkIndirectCommandsLayoutUsageFlagsEXT { - return ~( QueryResultFlags( bits ) ); - } + eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_EXT, + eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_EXT + }; - using BufferCreateFlags = Flags; + using IndirectCommandsLayoutUsageFlagsEXT = Flags; template <> - struct FlagTraits + struct FlagTraits { - enum : VkFlags - { - allFlags = VkFlags( BufferCreateFlagBits::eSparseBinding ) | VkFlags( BufferCreateFlagBits::eSparseResidency ) | - VkFlags( BufferCreateFlagBits::eSparseAliased ) | VkFlags( BufferCreateFlagBits::eProtected ) | - VkFlags( BufferCreateFlagBits::eDeviceAddressCaptureReplay ) - }; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR IndirectCommandsLayoutUsageFlagsEXT allFlags = + IndirectCommandsLayoutUsageFlagBitsEXT::eExplicitPreprocess | IndirectCommandsLayoutUsageFlagBitsEXT::eUnorderedSequences; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + enum class IndirectCommandsInputModeFlagBitsEXT : VkIndirectCommandsInputModeFlagsEXT { - return BufferCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator&( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return BufferCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator^( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return BufferCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator~( BufferCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( BufferCreateFlags( bits ) ); - } - - using BufferUsageFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( BufferUsageFlagBits::eTransferSrc ) | VkFlags( BufferUsageFlagBits::eTransferDst ) | - VkFlags( BufferUsageFlagBits::eUniformTexelBuffer ) | VkFlags( BufferUsageFlagBits::eStorageTexelBuffer ) | - VkFlags( BufferUsageFlagBits::eUniformBuffer ) | VkFlags( BufferUsageFlagBits::eStorageBuffer ) | - VkFlags( BufferUsageFlagBits::eIndexBuffer ) | VkFlags( BufferUsageFlagBits::eVertexBuffer ) | - VkFlags( BufferUsageFlagBits::eIndirectBuffer ) | VkFlags( BufferUsageFlagBits::eShaderDeviceAddress ) -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - | VkFlags( BufferUsageFlagBits::eVideoDecodeSrcKHR ) | VkFlags( BufferUsageFlagBits::eVideoDecodeDstKHR ) -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - | VkFlags( BufferUsageFlagBits::eTransformFeedbackBufferEXT ) | VkFlags( BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) | - VkFlags( BufferUsageFlagBits::eConditionalRenderingEXT ) | VkFlags( BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR ) | - VkFlags( BufferUsageFlagBits::eAccelerationStructureStorageKHR ) | VkFlags( BufferUsageFlagBits::eShaderBindingTableKHR ) -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - | VkFlags( BufferUsageFlagBits::eVideoEncodeDstKHR ) | VkFlags( BufferUsageFlagBits::eVideoEncodeSrcKHR ) -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return BufferUsageFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator&( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return BufferUsageFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator^( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return BufferUsageFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator~( BufferUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( BufferUsageFlags( bits ) ); - } - - using BufferViewCreateFlags = Flags; - - using ImageViewCreateFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) | VkFlags( ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageViewCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator&( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageViewCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator^( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageViewCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator~( ImageViewCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ImageViewCreateFlags( bits ) ); - } - - using ShaderModuleCreateFlags = Flags; - - using PipelineCacheCreateFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( PipelineCacheCreateFlagBits::eExternallySynchronized ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator|( PipelineCacheCreateFlagBits bit0, - PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCacheCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator&( PipelineCacheCreateFlagBits bit0, - PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCacheCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator^( PipelineCacheCreateFlagBits bit0, - PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCacheCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator~( PipelineCacheCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( PipelineCacheCreateFlags( bits ) ); - } - - using ColorComponentFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ColorComponentFlagBits::eR ) | VkFlags( ColorComponentFlagBits::eG ) | VkFlags( ColorComponentFlagBits::eB ) | - VkFlags( ColorComponentFlagBits::eA ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ColorComponentFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator&( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ColorComponentFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator^( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ColorComponentFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator~( ColorComponentFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ColorComponentFlags( bits ) ); - } - - using CullModeFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( CullModeFlagBits::eNone ) | VkFlags( CullModeFlagBits::eFront ) | VkFlags( CullModeFlagBits::eBack ) | - VkFlags( CullModeFlagBits::eFrontAndBack ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CullModeFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator&( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CullModeFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator^( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CullModeFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator~( CullModeFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( CullModeFlags( bits ) ); - } - - using PipelineColorBlendStateCreateFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessEXT ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineColorBlendStateCreateFlags operator|( PipelineColorBlendStateCreateFlagBits bit0, - PipelineColorBlendStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineColorBlendStateCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineColorBlendStateCreateFlags operator&( PipelineColorBlendStateCreateFlagBits bit0, - PipelineColorBlendStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineColorBlendStateCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineColorBlendStateCreateFlags operator^( PipelineColorBlendStateCreateFlagBits bit0, - PipelineColorBlendStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineColorBlendStateCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineColorBlendStateCreateFlags operator~( PipelineColorBlendStateCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( PipelineColorBlendStateCreateFlags( bits ) ); - } - - using PipelineCreateFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( PipelineCreateFlagBits::eDisableOptimization ) | VkFlags( PipelineCreateFlagBits::eAllowDerivatives ) | - VkFlags( PipelineCreateFlagBits::eDerivative ) | VkFlags( PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) | - VkFlags( PipelineCreateFlagBits::eDispatchBase ) | VkFlags( PipelineCreateFlagBits::eFailOnPipelineCompileRequired ) | - VkFlags( PipelineCreateFlagBits::eEarlyReturnOnFailure ) | VkFlags( PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR ) | - VkFlags( PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT ) | - VkFlags( PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) | - VkFlags( PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) | VkFlags( PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) | - VkFlags( PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) | VkFlags( PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) | - VkFlags( PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) | VkFlags( PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR ) | - VkFlags( PipelineCreateFlagBits::eDeferCompileNV ) | VkFlags( PipelineCreateFlagBits::eCaptureStatisticsKHR ) | - VkFlags( PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) | VkFlags( PipelineCreateFlagBits::eIndirectBindableNV ) | - VkFlags( PipelineCreateFlagBits::eLibraryKHR ) | VkFlags( PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT ) | - VkFlags( PipelineCreateFlagBits::eLinkTimeOptimizationEXT ) | VkFlags( PipelineCreateFlagBits::eRayTracingAllowMotionNV ) | - VkFlags( PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT ) | VkFlags( PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator&( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator^( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator~( PipelineCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( PipelineCreateFlags( bits ) ); - } - - using PipelineDepthStencilStateCreateFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessEXT ) | - VkFlags( PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessEXT ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateFlags operator|( PipelineDepthStencilStateCreateFlagBits bit0, - PipelineDepthStencilStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineDepthStencilStateCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateFlags operator&( PipelineDepthStencilStateCreateFlagBits bit0, - PipelineDepthStencilStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineDepthStencilStateCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateFlags operator^( PipelineDepthStencilStateCreateFlagBits bit0, - PipelineDepthStencilStateCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineDepthStencilStateCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateFlags operator~( PipelineDepthStencilStateCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( PipelineDepthStencilStateCreateFlags( bits ) ); - } - - using PipelineDynamicStateCreateFlags = Flags; - - using PipelineInputAssemblyStateCreateFlags = Flags; - - using PipelineLayoutCreateFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( PipelineLayoutCreateFlagBits::eIndependentSetsEXT ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineLayoutCreateFlags operator|( PipelineLayoutCreateFlagBits bit0, - PipelineLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineLayoutCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineLayoutCreateFlags operator&( PipelineLayoutCreateFlagBits bit0, - PipelineLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineLayoutCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineLayoutCreateFlags operator^( PipelineLayoutCreateFlagBits bit0, - PipelineLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineLayoutCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineLayoutCreateFlags operator~( PipelineLayoutCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( PipelineLayoutCreateFlags( bits ) ); - } - - using PipelineMultisampleStateCreateFlags = Flags; - - using PipelineRasterizationStateCreateFlags = Flags; - - using PipelineShaderStageCreateFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize ) | VkFlags( PipelineShaderStageCreateFlagBits::eRequireFullSubgroups ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator|( PipelineShaderStageCreateFlagBits bit0, - PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineShaderStageCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator&( PipelineShaderStageCreateFlagBits bit0, - PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineShaderStageCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator^( PipelineShaderStageCreateFlagBits bit0, - PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineShaderStageCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator~( PipelineShaderStageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( PipelineShaderStageCreateFlags( bits ) ); - } - - using PipelineTessellationStateCreateFlags = Flags; - - using PipelineVertexInputStateCreateFlags = Flags; - - using PipelineViewportStateCreateFlags = Flags; - - using ShaderStageFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ShaderStageFlagBits::eVertex ) | VkFlags( ShaderStageFlagBits::eTessellationControl ) | - VkFlags( ShaderStageFlagBits::eTessellationEvaluation ) | VkFlags( ShaderStageFlagBits::eGeometry ) | - VkFlags( ShaderStageFlagBits::eFragment ) | VkFlags( ShaderStageFlagBits::eCompute ) | VkFlags( ShaderStageFlagBits::eAllGraphics ) | - VkFlags( ShaderStageFlagBits::eAll ) | VkFlags( ShaderStageFlagBits::eRaygenKHR ) | VkFlags( ShaderStageFlagBits::eAnyHitKHR ) | - VkFlags( ShaderStageFlagBits::eClosestHitKHR ) | VkFlags( ShaderStageFlagBits::eMissKHR ) | VkFlags( ShaderStageFlagBits::eIntersectionKHR ) | - VkFlags( ShaderStageFlagBits::eCallableKHR ) | VkFlags( ShaderStageFlagBits::eTaskEXT ) | VkFlags( ShaderStageFlagBits::eMeshEXT ) | - VkFlags( ShaderStageFlagBits::eSubpassShadingHUAWEI ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ShaderStageFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator&( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ShaderStageFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator^( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ShaderStageFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator~( ShaderStageFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ShaderStageFlags( bits ) ); - } - - using SamplerCreateFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( SamplerCreateFlagBits::eSubsampledEXT ) | VkFlags( SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) | - VkFlags( SamplerCreateFlagBits::eNonSeamlessCubeMapEXT ) | VkFlags( SamplerCreateFlagBits::eImageProcessingQCOM ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SamplerCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator&( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SamplerCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator^( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SamplerCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator~( SamplerCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SamplerCreateFlags( bits ) ); - } - - using DescriptorPoolCreateFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) | VkFlags( DescriptorPoolCreateFlagBits::eUpdateAfterBind ) | - VkFlags( DescriptorPoolCreateFlagBits::eHostOnlyEXT ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, - DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorPoolCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator&( DescriptorPoolCreateFlagBits bit0, - DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorPoolCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator^( DescriptorPoolCreateFlagBits bit0, - DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorPoolCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DescriptorPoolCreateFlags( bits ) ); - } - - using DescriptorPoolResetFlags = Flags; - - using DescriptorSetLayoutCreateFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) | VkFlags( DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) | - VkFlags( DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, - DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorSetLayoutCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator&( DescriptorSetLayoutCreateFlagBits bit0, - DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorSetLayoutCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator^( DescriptorSetLayoutCreateFlagBits bit0, - DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorSetLayoutCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator~( DescriptorSetLayoutCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DescriptorSetLayoutCreateFlags( bits ) ); - } - - using AccessFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( AccessFlagBits::eIndirectCommandRead ) | VkFlags( AccessFlagBits::eIndexRead ) | VkFlags( AccessFlagBits::eVertexAttributeRead ) | - VkFlags( AccessFlagBits::eUniformRead ) | VkFlags( AccessFlagBits::eInputAttachmentRead ) | VkFlags( AccessFlagBits::eShaderRead ) | - VkFlags( AccessFlagBits::eShaderWrite ) | VkFlags( AccessFlagBits::eColorAttachmentRead ) | VkFlags( AccessFlagBits::eColorAttachmentWrite ) | - VkFlags( AccessFlagBits::eDepthStencilAttachmentRead ) | VkFlags( AccessFlagBits::eDepthStencilAttachmentWrite ) | - VkFlags( AccessFlagBits::eTransferRead ) | VkFlags( AccessFlagBits::eTransferWrite ) | VkFlags( AccessFlagBits::eHostRead ) | - VkFlags( AccessFlagBits::eHostWrite ) | VkFlags( AccessFlagBits::eMemoryRead ) | VkFlags( AccessFlagBits::eMemoryWrite ) | - VkFlags( AccessFlagBits::eNone ) | VkFlags( AccessFlagBits::eTransformFeedbackWriteEXT ) | - VkFlags( AccessFlagBits::eTransformFeedbackCounterReadEXT ) | VkFlags( AccessFlagBits::eTransformFeedbackCounterWriteEXT ) | - VkFlags( AccessFlagBits::eConditionalRenderingReadEXT ) | VkFlags( AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) | - VkFlags( AccessFlagBits::eAccelerationStructureReadKHR ) | VkFlags( AccessFlagBits::eAccelerationStructureWriteKHR ) | - VkFlags( AccessFlagBits::eFragmentDensityMapReadEXT ) | VkFlags( AccessFlagBits::eFragmentShadingRateAttachmentReadKHR ) | - VkFlags( AccessFlagBits::eCommandPreprocessReadNV ) | VkFlags( AccessFlagBits::eCommandPreprocessWriteNV ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return AccessFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator&( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return AccessFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator^( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return AccessFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator~( AccessFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( AccessFlags( bits ) ); - } - - using AttachmentDescriptionFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( AttachmentDescriptionFlagBits::eMayAlias ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, - AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return AttachmentDescriptionFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator&( AttachmentDescriptionFlagBits bit0, - AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return AttachmentDescriptionFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator^( AttachmentDescriptionFlagBits bit0, - AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return AttachmentDescriptionFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( AttachmentDescriptionFlags( bits ) ); - } - - using DependencyFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( DependencyFlagBits::eByRegion ) | VkFlags( DependencyFlagBits::eDeviceGroup ) | VkFlags( DependencyFlagBits::eViewLocal ) | - VkFlags( DependencyFlagBits::eFeedbackLoopEXT ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DependencyFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator&( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DependencyFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator^( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DependencyFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator~( DependencyFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DependencyFlags( bits ) ); - } - - using FramebufferCreateFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( FramebufferCreateFlagBits::eImageless ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return FramebufferCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator&( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return FramebufferCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator^( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return FramebufferCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator~( FramebufferCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( FramebufferCreateFlags( bits ) ); - } - - using RenderPassCreateFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( RenderPassCreateFlagBits::eTransformQCOM ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return RenderPassCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator&( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return RenderPassCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator^( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return RenderPassCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator~( RenderPassCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( RenderPassCreateFlags( bits ) ); - } - - using SubpassDescriptionFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( SubpassDescriptionFlagBits::ePerViewAttributesNVX ) | VkFlags( SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) | - VkFlags( SubpassDescriptionFlagBits::eFragmentRegionQCOM ) | VkFlags( SubpassDescriptionFlagBits::eShaderResolveQCOM ) | - VkFlags( SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessEXT ) | - VkFlags( SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessEXT ) | - VkFlags( SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessEXT ) | - VkFlags( SubpassDescriptionFlagBits::eEnableLegacyDitheringEXT ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, - SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SubpassDescriptionFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator&( SubpassDescriptionFlagBits bit0, - SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SubpassDescriptionFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator^( SubpassDescriptionFlagBits bit0, - SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SubpassDescriptionFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SubpassDescriptionFlags( bits ) ); - } - - using CommandPoolCreateFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( CommandPoolCreateFlagBits::eTransient ) | VkFlags( CommandPoolCreateFlagBits::eResetCommandBuffer ) | - VkFlags( CommandPoolCreateFlagBits::eProtected ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CommandPoolCreateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator&( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CommandPoolCreateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator^( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CommandPoolCreateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( CommandPoolCreateFlags( bits ) ); - } - - using CommandPoolResetFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( CommandPoolResetFlagBits::eReleaseResources ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CommandPoolResetFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator&( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CommandPoolResetFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator^( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CommandPoolResetFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( CommandPoolResetFlags( bits ) ); - } - - using CommandBufferResetFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( CommandBufferResetFlagBits::eReleaseResources ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, - CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CommandBufferResetFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator&( CommandBufferResetFlagBits bit0, - CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CommandBufferResetFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator^( CommandBufferResetFlagBits bit0, - CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CommandBufferResetFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( CommandBufferResetFlags( bits ) ); - } - - using CommandBufferUsageFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( CommandBufferUsageFlagBits::eOneTimeSubmit ) | VkFlags( CommandBufferUsageFlagBits::eRenderPassContinue ) | - VkFlags( CommandBufferUsageFlagBits::eSimultaneousUse ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, - CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CommandBufferUsageFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator&( CommandBufferUsageFlagBits bit0, - CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CommandBufferUsageFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator^( CommandBufferUsageFlagBits bit0, - CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return CommandBufferUsageFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( CommandBufferUsageFlags( bits ) ); - } - - using QueryControlFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( QueryControlFlagBits::ePrecise ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return QueryControlFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator&( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return QueryControlFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator^( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return QueryControlFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator~( QueryControlFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( QueryControlFlags( bits ) ); - } - - using StencilFaceFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( StencilFaceFlagBits::eFront ) | VkFlags( StencilFaceFlagBits::eBack ) | VkFlags( StencilFaceFlagBits::eFrontAndBack ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return StencilFaceFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator&( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return StencilFaceFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator^( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return StencilFaceFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator~( StencilFaceFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( StencilFaceFlags( bits ) ); - } - - //=== VK_VERSION_1_1 === - - using SubgroupFeatureFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( SubgroupFeatureFlagBits::eBasic ) | VkFlags( SubgroupFeatureFlagBits::eVote ) | VkFlags( SubgroupFeatureFlagBits::eArithmetic ) | - VkFlags( SubgroupFeatureFlagBits::eBallot ) | VkFlags( SubgroupFeatureFlagBits::eShuffle ) | - VkFlags( SubgroupFeatureFlagBits::eShuffleRelative ) | VkFlags( SubgroupFeatureFlagBits::eClustered ) | - VkFlags( SubgroupFeatureFlagBits::eQuad ) | VkFlags( SubgroupFeatureFlagBits::ePartitionedNV ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SubgroupFeatureFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator&( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SubgroupFeatureFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator^( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SubgroupFeatureFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SubgroupFeatureFlags( bits ) ); - } - - using PeerMemoryFeatureFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( PeerMemoryFeatureFlagBits::eCopySrc ) | VkFlags( PeerMemoryFeatureFlagBits::eCopyDst ) | - VkFlags( PeerMemoryFeatureFlagBits::eGenericSrc ) | VkFlags( PeerMemoryFeatureFlagBits::eGenericDst ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PeerMemoryFeatureFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator&( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PeerMemoryFeatureFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator^( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PeerMemoryFeatureFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( PeerMemoryFeatureFlags( bits ) ); - } - - using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags; - - using MemoryAllocateFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( MemoryAllocateFlagBits::eDeviceMask ) | VkFlags( MemoryAllocateFlagBits::eDeviceAddress ) | - VkFlags( MemoryAllocateFlagBits::eDeviceAddressCaptureReplay ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return MemoryAllocateFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator&( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return MemoryAllocateFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator^( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return MemoryAllocateFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( MemoryAllocateFlags( bits ) ); - } - - using MemoryAllocateFlagsKHR = MemoryAllocateFlags; - - using CommandPoolTrimFlags = Flags; - - using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags; - - using DescriptorUpdateTemplateCreateFlags = Flags; - - using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags; - - using ExternalMemoryHandleTypeFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) | VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) | - VkFlags( ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) | VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) | - VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) | VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) | - VkFlags( ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) | VkFlags( ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) -#if defined( VK_USE_PLATFORM_ANDROID_KHR ) - | VkFlags( ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - | VkFlags( ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) | VkFlags( ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) -#if defined( VK_USE_PLATFORM_FUCHSIA ) - | VkFlags( ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA ) -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - | VkFlags( ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator|( ExternalMemoryHandleTypeFlagBits bit0, - ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryHandleTypeFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator&( ExternalMemoryHandleTypeFlagBits bit0, - ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryHandleTypeFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator^( ExternalMemoryHandleTypeFlagBits bit0, - ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryHandleTypeFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator~( ExternalMemoryHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ExternalMemoryHandleTypeFlags( bits ) ); - } - - using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags; - - using ExternalMemoryFeatureFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ExternalMemoryFeatureFlagBits::eDedicatedOnly ) | VkFlags( ExternalMemoryFeatureFlagBits::eExportable ) | - VkFlags( ExternalMemoryFeatureFlagBits::eImportable ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator|( ExternalMemoryFeatureFlagBits bit0, - ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryFeatureFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator&( ExternalMemoryFeatureFlagBits bit0, - ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryFeatureFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator^( ExternalMemoryFeatureFlagBits bit0, - ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryFeatureFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ExternalMemoryFeatureFlags( bits ) ); - } - - using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags; - - using ExternalFenceHandleTypeFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueFd ) | VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) | - VkFlags( ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) | VkFlags( ExternalFenceHandleTypeFlagBits::eSyncFd ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator|( ExternalFenceHandleTypeFlagBits bit0, - ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalFenceHandleTypeFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator&( ExternalFenceHandleTypeFlagBits bit0, - ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalFenceHandleTypeFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator^( ExternalFenceHandleTypeFlagBits bit0, - ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalFenceHandleTypeFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ExternalFenceHandleTypeFlags( bits ) ); - } - - using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags; - - using ExternalFenceFeatureFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ExternalFenceFeatureFlagBits::eExportable ) | VkFlags( ExternalFenceFeatureFlagBits::eImportable ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator|( ExternalFenceFeatureFlagBits bit0, - ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalFenceFeatureFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator&( ExternalFenceFeatureFlagBits bit0, - ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalFenceFeatureFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator^( ExternalFenceFeatureFlagBits bit0, - ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalFenceFeatureFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ExternalFenceFeatureFlags( bits ) ); - } - - using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags; - - using FenceImportFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( FenceImportFlagBits::eTemporary ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator|( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return FenceImportFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator&( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return FenceImportFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator^( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return FenceImportFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator~( FenceImportFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( FenceImportFlags( bits ) ); - } - - using FenceImportFlagsKHR = FenceImportFlags; - - using SemaphoreImportFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( SemaphoreImportFlagBits::eTemporary ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SemaphoreImportFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator&( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SemaphoreImportFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator^( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SemaphoreImportFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SemaphoreImportFlags( bits ) ); - } - - using SemaphoreImportFlagsKHR = SemaphoreImportFlags; - - using ExternalSemaphoreHandleTypeFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) | VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) | - VkFlags( ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) | VkFlags( ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) | - VkFlags( ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) -#if defined( VK_USE_PLATFORM_FUCHSIA ) - | VkFlags( ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA ) -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator|( ExternalSemaphoreHandleTypeFlagBits bit0, - ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalSemaphoreHandleTypeFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator&( ExternalSemaphoreHandleTypeFlagBits bit0, - ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalSemaphoreHandleTypeFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator^( ExternalSemaphoreHandleTypeFlagBits bit0, - ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalSemaphoreHandleTypeFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator~( ExternalSemaphoreHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ExternalSemaphoreHandleTypeFlags( bits ) ); - } - - using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags; - - using ExternalSemaphoreFeatureFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ExternalSemaphoreFeatureFlagBits::eExportable ) | VkFlags( ExternalSemaphoreFeatureFlagBits::eImportable ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator|( ExternalSemaphoreFeatureFlagBits bit0, - ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalSemaphoreFeatureFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator&( ExternalSemaphoreFeatureFlagBits bit0, - ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalSemaphoreFeatureFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator^( ExternalSemaphoreFeatureFlagBits bit0, - ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalSemaphoreFeatureFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator~( ExternalSemaphoreFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ExternalSemaphoreFeatureFlags( bits ) ); - } - - using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags; - - //=== VK_VERSION_1_2 === - - using DescriptorBindingFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( DescriptorBindingFlagBits::eUpdateAfterBind ) | VkFlags( DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) | - VkFlags( DescriptorBindingFlagBits::ePartiallyBound ) | VkFlags( DescriptorBindingFlagBits::eVariableDescriptorCount ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator|( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorBindingFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator&( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorBindingFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator^( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return DescriptorBindingFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator~( DescriptorBindingFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DescriptorBindingFlags( bits ) ); - } - - using DescriptorBindingFlagsEXT = DescriptorBindingFlags; - - using ResolveModeFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ResolveModeFlagBits::eNone ) | VkFlags( ResolveModeFlagBits::eSampleZero ) | VkFlags( ResolveModeFlagBits::eAverage ) | - VkFlags( ResolveModeFlagBits::eMin ) | VkFlags( ResolveModeFlagBits::eMax ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator|( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ResolveModeFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator&( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ResolveModeFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator^( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ResolveModeFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator~( ResolveModeFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ResolveModeFlags( bits ) ); - } - - using ResolveModeFlagsKHR = ResolveModeFlags; - - using SemaphoreWaitFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( SemaphoreWaitFlagBits::eAny ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator|( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SemaphoreWaitFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator&( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SemaphoreWaitFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator^( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SemaphoreWaitFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator~( SemaphoreWaitFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SemaphoreWaitFlags( bits ) ); - } - - using SemaphoreWaitFlagsKHR = SemaphoreWaitFlags; - - //=== VK_VERSION_1_3 === - - using PipelineCreationFeedbackFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( PipelineCreationFeedbackFlagBits::eValid ) | VkFlags( PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit ) | - VkFlags( PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlags operator|( PipelineCreationFeedbackFlagBits bit0, - PipelineCreationFeedbackFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCreationFeedbackFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlags operator&( PipelineCreationFeedbackFlagBits bit0, - PipelineCreationFeedbackFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCreationFeedbackFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlags operator^( PipelineCreationFeedbackFlagBits bit0, - PipelineCreationFeedbackFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineCreationFeedbackFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlags operator~( PipelineCreationFeedbackFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( PipelineCreationFeedbackFlags( bits ) ); - } - - using PipelineCreationFeedbackFlagsEXT = PipelineCreationFeedbackFlags; - - using ToolPurposeFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ToolPurposeFlagBits::eValidation ) | VkFlags( ToolPurposeFlagBits::eProfiling ) | VkFlags( ToolPurposeFlagBits::eTracing ) | - VkFlags( ToolPurposeFlagBits::eAdditionalFeatures ) | VkFlags( ToolPurposeFlagBits::eModifyingFeatures ) | - VkFlags( ToolPurposeFlagBits::eDebugReportingEXT ) | VkFlags( ToolPurposeFlagBits::eDebugMarkersEXT ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlags operator|( ToolPurposeFlagBits bit0, ToolPurposeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ToolPurposeFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlags operator&( ToolPurposeFlagBits bit0, ToolPurposeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ToolPurposeFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlags operator^( ToolPurposeFlagBits bit0, ToolPurposeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return ToolPurposeFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlags operator~( ToolPurposeFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ToolPurposeFlags( bits ) ); - } - - using ToolPurposeFlagsEXT = ToolPurposeFlags; - - using PrivateDataSlotCreateFlags = Flags; - - using PrivateDataSlotCreateFlagsEXT = PrivateDataSlotCreateFlags; - - using PipelineStageFlags2 = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags64 - { - allFlags = VkFlags64( PipelineStageFlagBits2::eNone ) | VkFlags64( PipelineStageFlagBits2::eTopOfPipe ) | - VkFlags64( PipelineStageFlagBits2::eDrawIndirect ) | VkFlags64( PipelineStageFlagBits2::eVertexInput ) | - VkFlags64( PipelineStageFlagBits2::eVertexShader ) | VkFlags64( PipelineStageFlagBits2::eTessellationControlShader ) | - VkFlags64( PipelineStageFlagBits2::eTessellationEvaluationShader ) | VkFlags64( PipelineStageFlagBits2::eGeometryShader ) | - VkFlags64( PipelineStageFlagBits2::eFragmentShader ) | VkFlags64( PipelineStageFlagBits2::eEarlyFragmentTests ) | - VkFlags64( PipelineStageFlagBits2::eLateFragmentTests ) | VkFlags64( PipelineStageFlagBits2::eColorAttachmentOutput ) | - VkFlags64( PipelineStageFlagBits2::eComputeShader ) | VkFlags64( PipelineStageFlagBits2::eAllTransfer ) | - VkFlags64( PipelineStageFlagBits2::eBottomOfPipe ) | VkFlags64( PipelineStageFlagBits2::eHost ) | - VkFlags64( PipelineStageFlagBits2::eAllGraphics ) | VkFlags64( PipelineStageFlagBits2::eAllCommands ) | - VkFlags64( PipelineStageFlagBits2::eCopy ) | VkFlags64( PipelineStageFlagBits2::eResolve ) | VkFlags64( PipelineStageFlagBits2::eBlit ) | - VkFlags64( PipelineStageFlagBits2::eClear ) | VkFlags64( PipelineStageFlagBits2::eIndexInput ) | - VkFlags64( PipelineStageFlagBits2::eVertexAttributeInput ) | VkFlags64( PipelineStageFlagBits2::ePreRasterizationShaders ) -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - | VkFlags64( PipelineStageFlagBits2::eVideoDecodeKHR ) | VkFlags64( PipelineStageFlagBits2::eVideoEncodeKHR ) -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - | VkFlags64( PipelineStageFlagBits2::eTransformFeedbackEXT ) | VkFlags64( PipelineStageFlagBits2::eConditionalRenderingEXT ) | - VkFlags64( PipelineStageFlagBits2::eCommandPreprocessNV ) | VkFlags64( PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR ) | - VkFlags64( PipelineStageFlagBits2::eAccelerationStructureBuildKHR ) | VkFlags64( PipelineStageFlagBits2::eRayTracingShaderKHR ) | - VkFlags64( PipelineStageFlagBits2::eFragmentDensityProcessEXT ) | VkFlags64( PipelineStageFlagBits2::eTaskShaderEXT ) | - VkFlags64( PipelineStageFlagBits2::eMeshShaderEXT ) | VkFlags64( PipelineStageFlagBits2::eSubpassShadingHUAWEI ) | - VkFlags64( PipelineStageFlagBits2::eInvocationMaskHUAWEI ) | VkFlags64( PipelineStageFlagBits2::eAccelerationStructureCopyKHR ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2 operator|( PipelineStageFlagBits2 bit0, PipelineStageFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineStageFlags2( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2 operator&( PipelineStageFlagBits2 bit0, PipelineStageFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineStageFlags2( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2 operator^( PipelineStageFlagBits2 bit0, PipelineStageFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT - { - return PipelineStageFlags2( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags2 operator~( PipelineStageFlagBits2 bits ) VULKAN_HPP_NOEXCEPT - { - return ~( PipelineStageFlags2( bits ) ); - } - - using PipelineStageFlags2KHR = PipelineStageFlags2; - - using AccessFlags2 = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags64 - { - allFlags = VkFlags64( AccessFlagBits2::eNone ) | VkFlags64( AccessFlagBits2::eIndirectCommandRead ) | VkFlags64( AccessFlagBits2::eIndexRead ) | - VkFlags64( AccessFlagBits2::eVertexAttributeRead ) | VkFlags64( AccessFlagBits2::eUniformRead ) | - VkFlags64( AccessFlagBits2::eInputAttachmentRead ) | VkFlags64( AccessFlagBits2::eShaderRead ) | VkFlags64( AccessFlagBits2::eShaderWrite ) | - VkFlags64( AccessFlagBits2::eColorAttachmentRead ) | VkFlags64( AccessFlagBits2::eColorAttachmentWrite ) | - VkFlags64( AccessFlagBits2::eDepthStencilAttachmentRead ) | VkFlags64( AccessFlagBits2::eDepthStencilAttachmentWrite ) | - VkFlags64( AccessFlagBits2::eTransferRead ) | VkFlags64( AccessFlagBits2::eTransferWrite ) | VkFlags64( AccessFlagBits2::eHostRead ) | - VkFlags64( AccessFlagBits2::eHostWrite ) | VkFlags64( AccessFlagBits2::eMemoryRead ) | VkFlags64( AccessFlagBits2::eMemoryWrite ) | - VkFlags64( AccessFlagBits2::eShaderSampledRead ) | VkFlags64( AccessFlagBits2::eShaderStorageRead ) | - VkFlags64( AccessFlagBits2::eShaderStorageWrite ) -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - | VkFlags64( AccessFlagBits2::eVideoDecodeReadKHR ) | VkFlags64( AccessFlagBits2::eVideoDecodeWriteKHR ) | - VkFlags64( AccessFlagBits2::eVideoEncodeReadKHR ) | VkFlags64( AccessFlagBits2::eVideoEncodeWriteKHR ) -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - | VkFlags64( AccessFlagBits2::eTransformFeedbackWriteEXT ) | VkFlags64( AccessFlagBits2::eTransformFeedbackCounterReadEXT ) | - VkFlags64( AccessFlagBits2::eTransformFeedbackCounterWriteEXT ) | VkFlags64( AccessFlagBits2::eConditionalRenderingReadEXT ) | - VkFlags64( AccessFlagBits2::eCommandPreprocessReadNV ) | VkFlags64( AccessFlagBits2::eCommandPreprocessWriteNV ) | - VkFlags64( AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR ) | VkFlags64( AccessFlagBits2::eAccelerationStructureReadKHR ) | - VkFlags64( AccessFlagBits2::eAccelerationStructureWriteKHR ) | VkFlags64( AccessFlagBits2::eFragmentDensityMapReadEXT ) | - VkFlags64( AccessFlagBits2::eColorAttachmentReadNoncoherentEXT ) | VkFlags64( AccessFlagBits2::eInvocationMaskReadHUAWEI ) | - VkFlags64( AccessFlagBits2::eShaderBindingTableReadKHR ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2 operator|( AccessFlagBits2 bit0, AccessFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT - { - return AccessFlags2( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2 operator&( AccessFlagBits2 bit0, AccessFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT - { - return AccessFlags2( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2 operator^( AccessFlagBits2 bit0, AccessFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT - { - return AccessFlags2( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags2 operator~( AccessFlagBits2 bits ) VULKAN_HPP_NOEXCEPT - { - return ~( AccessFlags2( bits ) ); - } - - using AccessFlags2KHR = AccessFlags2; - - using SubmitFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( SubmitFlagBits::eProtected ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlags operator|( SubmitFlagBits bit0, SubmitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SubmitFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlags operator&( SubmitFlagBits bit0, SubmitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SubmitFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlags operator^( SubmitFlagBits bit0, SubmitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return SubmitFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubmitFlags operator~( SubmitFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SubmitFlags( bits ) ); - } - - using SubmitFlagsKHR = SubmitFlags; - - using RenderingFlags = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( RenderingFlagBits::eContentsSecondaryCommandBuffers ) | VkFlags( RenderingFlagBits::eSuspending ) | - VkFlags( RenderingFlagBits::eResuming ) | VkFlags( RenderingFlagBits::eEnableLegacyDitheringEXT ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderingFlags operator|( RenderingFlagBits bit0, RenderingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return RenderingFlags( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderingFlags operator&( RenderingFlagBits bit0, RenderingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return RenderingFlags( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderingFlags operator^( RenderingFlagBits bit0, RenderingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT - { - return RenderingFlags( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderingFlags operator~( RenderingFlagBits bits ) VULKAN_HPP_NOEXCEPT - { - return ~( RenderingFlags( bits ) ); - } - - using RenderingFlagsKHR = RenderingFlags; - - using FormatFeatureFlags2 = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags64 - { - allFlags = VkFlags64( FormatFeatureFlagBits2::eSampledImage ) | VkFlags64( FormatFeatureFlagBits2::eStorageImage ) | - VkFlags64( FormatFeatureFlagBits2::eStorageImageAtomic ) | VkFlags64( FormatFeatureFlagBits2::eUniformTexelBuffer ) | - VkFlags64( FormatFeatureFlagBits2::eStorageTexelBuffer ) | VkFlags64( FormatFeatureFlagBits2::eStorageTexelBufferAtomic ) | - VkFlags64( FormatFeatureFlagBits2::eVertexBuffer ) | VkFlags64( FormatFeatureFlagBits2::eColorAttachment ) | - VkFlags64( FormatFeatureFlagBits2::eColorAttachmentBlend ) | VkFlags64( FormatFeatureFlagBits2::eDepthStencilAttachment ) | - VkFlags64( FormatFeatureFlagBits2::eBlitSrc ) | VkFlags64( FormatFeatureFlagBits2::eBlitDst ) | - VkFlags64( FormatFeatureFlagBits2::eSampledImageFilterLinear ) | VkFlags64( FormatFeatureFlagBits2::eSampledImageFilterCubic ) | - VkFlags64( FormatFeatureFlagBits2::eTransferSrc ) | VkFlags64( FormatFeatureFlagBits2::eTransferDst ) | - VkFlags64( FormatFeatureFlagBits2::eSampledImageFilterMinmax ) | VkFlags64( FormatFeatureFlagBits2::eMidpointChromaSamples ) | - VkFlags64( FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter ) | - VkFlags64( FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter ) | - VkFlags64( FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit ) | - VkFlags64( FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) | - VkFlags64( FormatFeatureFlagBits2::eDisjoint ) | VkFlags64( FormatFeatureFlagBits2::eCositedChromaSamples ) | - VkFlags64( FormatFeatureFlagBits2::eStorageReadWithoutFormat ) | VkFlags64( FormatFeatureFlagBits2::eStorageWriteWithoutFormat ) | - VkFlags64( FormatFeatureFlagBits2::eSampledImageDepthComparison ) -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - | VkFlags64( FormatFeatureFlagBits2::eVideoDecodeOutputKHR ) | VkFlags64( FormatFeatureFlagBits2::eVideoDecodeDpbKHR ) -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - | VkFlags64( FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR ) | VkFlags64( FormatFeatureFlagBits2::eFragmentDensityMapEXT ) | - VkFlags64( FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR ) -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - | VkFlags64( FormatFeatureFlagBits2::eVideoEncodeInputKHR ) | VkFlags64( FormatFeatureFlagBits2::eVideoEncodeDpbKHR ) -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - | VkFlags64( FormatFeatureFlagBits2::eLinearColorAttachmentNV ) | VkFlags64( FormatFeatureFlagBits2::eWeightImageQCOM ) | - VkFlags64( FormatFeatureFlagBits2::eWeightSampledImageQCOM ) | VkFlags64( FormatFeatureFlagBits2::eBlockMatchingQCOM ) | - VkFlags64( FormatFeatureFlagBits2::eBoxFilterSampledQCOM ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags2 operator|( FormatFeatureFlagBits2 bit0, FormatFeatureFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT - { - return FormatFeatureFlags2( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags2 operator&( FormatFeatureFlagBits2 bit0, FormatFeatureFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT - { - return FormatFeatureFlags2( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags2 operator^( FormatFeatureFlagBits2 bit0, FormatFeatureFlagBits2 bit1 ) VULKAN_HPP_NOEXCEPT - { - return FormatFeatureFlags2( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags2 operator~( FormatFeatureFlagBits2 bits ) VULKAN_HPP_NOEXCEPT - { - return ~( FormatFeatureFlags2( bits ) ); - } - - using FormatFeatureFlags2KHR = FormatFeatureFlags2; - - //=== VK_KHR_surface === - - using CompositeAlphaFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( CompositeAlphaFlagBitsKHR::eOpaque ) | VkFlags( CompositeAlphaFlagBitsKHR::ePreMultiplied ) | - VkFlags( CompositeAlphaFlagBitsKHR::ePostMultiplied ) | VkFlags( CompositeAlphaFlagBitsKHR::eInherit ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return CompositeAlphaFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator&( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return CompositeAlphaFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator^( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return CompositeAlphaFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( CompositeAlphaFlagsKHR( bits ) ); - } - - //=== VK_KHR_swapchain === - - using SwapchainCreateFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) | VkFlags( SwapchainCreateFlagBitsKHR::eProtected ) | - VkFlags( SwapchainCreateFlagBitsKHR::eMutableFormat ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, - SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return SwapchainCreateFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator&( SwapchainCreateFlagBitsKHR bit0, - SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return SwapchainCreateFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator^( SwapchainCreateFlagBitsKHR bit0, - SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return SwapchainCreateFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SwapchainCreateFlagsKHR( bits ) ); - } - - using DeviceGroupPresentModeFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( DeviceGroupPresentModeFlagBitsKHR::eLocal ) | VkFlags( DeviceGroupPresentModeFlagBitsKHR::eRemote ) | - VkFlags( DeviceGroupPresentModeFlagBitsKHR::eSum ) | VkFlags( DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator|( DeviceGroupPresentModeFlagBitsKHR bit0, - DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return DeviceGroupPresentModeFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator&( DeviceGroupPresentModeFlagBitsKHR bit0, - DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return DeviceGroupPresentModeFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator^( DeviceGroupPresentModeFlagBitsKHR bit0, - DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return DeviceGroupPresentModeFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator~( DeviceGroupPresentModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DeviceGroupPresentModeFlagsKHR( bits ) ); - } - - //=== VK_KHR_display === - - using DisplayModeCreateFlagsKHR = Flags; - - using DisplayPlaneAlphaFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( DisplayPlaneAlphaFlagBitsKHR::eOpaque ) | VkFlags( DisplayPlaneAlphaFlagBitsKHR::eGlobal ) | - VkFlags( DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) | VkFlags( DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, - DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator&( DisplayPlaneAlphaFlagBitsKHR bit0, - DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return DisplayPlaneAlphaFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator^( DisplayPlaneAlphaFlagBitsKHR bit0, - DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return DisplayPlaneAlphaFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DisplayPlaneAlphaFlagsKHR( bits ) ); - } - - using DisplaySurfaceCreateFlagsKHR = Flags; - - using SurfaceTransformFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( SurfaceTransformFlagBitsKHR::eIdentity ) | VkFlags( SurfaceTransformFlagBitsKHR::eRotate90 ) | - VkFlags( SurfaceTransformFlagBitsKHR::eRotate180 ) | VkFlags( SurfaceTransformFlagBitsKHR::eRotate270 ) | - VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirror ) | VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) | - VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) | VkFlags( SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) | - VkFlags( SurfaceTransformFlagBitsKHR::eInherit ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, - SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return SurfaceTransformFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator&( SurfaceTransformFlagBitsKHR bit0, - SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return SurfaceTransformFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator^( SurfaceTransformFlagBitsKHR bit0, - SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return SurfaceTransformFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SurfaceTransformFlagsKHR( bits ) ); - } - -#if defined( VK_USE_PLATFORM_XLIB_KHR ) - //=== VK_KHR_xlib_surface === - - using XlibSurfaceCreateFlagsKHR = Flags; - -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - -#if defined( VK_USE_PLATFORM_XCB_KHR ) - //=== VK_KHR_xcb_surface === - - using XcbSurfaceCreateFlagsKHR = Flags; - -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - -#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) - //=== VK_KHR_wayland_surface === - - using WaylandSurfaceCreateFlagsKHR = Flags; - -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -#if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_KHR_android_surface === - - using AndroidSurfaceCreateFlagsKHR = Flags; - -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_win32_surface === - - using Win32SurfaceCreateFlagsKHR = Flags; - -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_EXT_debug_report === - - using DebugReportFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( DebugReportFlagBitsEXT::eInformation ) | VkFlags( DebugReportFlagBitsEXT::eWarning ) | - VkFlags( DebugReportFlagBitsEXT::ePerformanceWarning ) | VkFlags( DebugReportFlagBitsEXT::eError ) | VkFlags( DebugReportFlagBitsEXT::eDebug ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugReportFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator&( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugReportFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator^( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugReportFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DebugReportFlagsEXT( bits ) ); - } - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_queue === - - using VideoCodecOperationFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( VideoCodecOperationFlagBitsKHR::eNone ) -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - | VkFlags( VideoCodecOperationFlagBitsKHR::eEncodeH264EXT ) | VkFlags( VideoCodecOperationFlagBitsKHR::eEncodeH265EXT ) | - VkFlags( VideoCodecOperationFlagBitsKHR::eDecodeH264EXT ) | VkFlags( VideoCodecOperationFlagBitsKHR::eDecodeH265EXT ) -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR operator|( VideoCodecOperationFlagBitsKHR bit0, - VideoCodecOperationFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoCodecOperationFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR operator&( VideoCodecOperationFlagBitsKHR bit0, - VideoCodecOperationFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoCodecOperationFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR operator^( VideoCodecOperationFlagBitsKHR bit0, - VideoCodecOperationFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoCodecOperationFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodecOperationFlagsKHR operator~( VideoCodecOperationFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoCodecOperationFlagsKHR( bits ) ); - } - - using VideoChromaSubsamplingFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( VideoChromaSubsamplingFlagBitsKHR::eInvalid ) | VkFlags( VideoChromaSubsamplingFlagBitsKHR::eMonochrome ) | - VkFlags( VideoChromaSubsamplingFlagBitsKHR::e420 ) | VkFlags( VideoChromaSubsamplingFlagBitsKHR::e422 ) | - VkFlags( VideoChromaSubsamplingFlagBitsKHR::e444 ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR operator|( VideoChromaSubsamplingFlagBitsKHR bit0, - VideoChromaSubsamplingFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoChromaSubsamplingFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR operator&( VideoChromaSubsamplingFlagBitsKHR bit0, - VideoChromaSubsamplingFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoChromaSubsamplingFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR operator^( VideoChromaSubsamplingFlagBitsKHR bit0, - VideoChromaSubsamplingFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoChromaSubsamplingFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoChromaSubsamplingFlagsKHR operator~( VideoChromaSubsamplingFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoChromaSubsamplingFlagsKHR( bits ) ); - } - - using VideoComponentBitDepthFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( VideoComponentBitDepthFlagBitsKHR::eInvalid ) | VkFlags( VideoComponentBitDepthFlagBitsKHR::e8 ) | - VkFlags( VideoComponentBitDepthFlagBitsKHR::e10 ) | VkFlags( VideoComponentBitDepthFlagBitsKHR::e12 ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR operator|( VideoComponentBitDepthFlagBitsKHR bit0, - VideoComponentBitDepthFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoComponentBitDepthFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR operator&( VideoComponentBitDepthFlagBitsKHR bit0, - VideoComponentBitDepthFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoComponentBitDepthFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR operator^( VideoComponentBitDepthFlagBitsKHR bit0, - VideoComponentBitDepthFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoComponentBitDepthFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoComponentBitDepthFlagsKHR operator~( VideoComponentBitDepthFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoComponentBitDepthFlagsKHR( bits ) ); - } - - using VideoCapabilityFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( VideoCapabilityFlagBitsKHR::eProtectedContent ) | VkFlags( VideoCapabilityFlagBitsKHR::eSeparateReferenceImages ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilityFlagsKHR operator|( VideoCapabilityFlagBitsKHR bit0, - VideoCapabilityFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoCapabilityFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilityFlagsKHR operator&( VideoCapabilityFlagBitsKHR bit0, - VideoCapabilityFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoCapabilityFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilityFlagsKHR operator^( VideoCapabilityFlagBitsKHR bit0, - VideoCapabilityFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoCapabilityFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCapabilityFlagsKHR operator~( VideoCapabilityFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoCapabilityFlagsKHR( bits ) ); - } - - using VideoSessionCreateFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( VideoSessionCreateFlagBitsKHR::eProtectedContent ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR operator|( VideoSessionCreateFlagBitsKHR bit0, - VideoSessionCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoSessionCreateFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR operator&( VideoSessionCreateFlagBitsKHR bit0, - VideoSessionCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoSessionCreateFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR operator^( VideoSessionCreateFlagBitsKHR bit0, - VideoSessionCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoSessionCreateFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoSessionCreateFlagsKHR operator~( VideoSessionCreateFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoSessionCreateFlagsKHR( bits ) ); - } - - using VideoSessionParametersCreateFlagsKHR = Flags; - - using VideoBeginCodingFlagsKHR = Flags; - - using VideoEndCodingFlagsKHR = Flags; - - using VideoCodingControlFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( VideoCodingControlFlagBitsKHR::eReset ) -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - | VkFlags( VideoCodingControlFlagBitsKHR::eEncodeRateControl ) | VkFlags( VideoCodingControlFlagBitsKHR::eEncodeRateControlLayer ) -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR operator|( VideoCodingControlFlagBitsKHR bit0, - VideoCodingControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoCodingControlFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR operator&( VideoCodingControlFlagBitsKHR bit0, - VideoCodingControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoCodingControlFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR operator^( VideoCodingControlFlagBitsKHR bit0, - VideoCodingControlFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoCodingControlFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoCodingControlFlagsKHR operator~( VideoCodingControlFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoCodingControlFlagsKHR( bits ) ); - } - -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_decode_queue === - - using VideoDecodeCapabilityFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputCoincide ) | VkFlags( VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputDistinct ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeCapabilityFlagsKHR operator|( VideoDecodeCapabilityFlagBitsKHR bit0, - VideoDecodeCapabilityFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoDecodeCapabilityFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeCapabilityFlagsKHR operator&( VideoDecodeCapabilityFlagBitsKHR bit0, - VideoDecodeCapabilityFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoDecodeCapabilityFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeCapabilityFlagsKHR operator^( VideoDecodeCapabilityFlagBitsKHR bit0, - VideoDecodeCapabilityFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoDecodeCapabilityFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeCapabilityFlagsKHR operator~( VideoDecodeCapabilityFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoDecodeCapabilityFlagsKHR( bits ) ); - } - - using VideoDecodeUsageFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( VideoDecodeUsageFlagBitsKHR::eDefault ) | VkFlags( VideoDecodeUsageFlagBitsKHR::eTranscoding ) | - VkFlags( VideoDecodeUsageFlagBitsKHR::eOffline ) | VkFlags( VideoDecodeUsageFlagBitsKHR::eStreaming ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeUsageFlagsKHR operator|( VideoDecodeUsageFlagBitsKHR bit0, - VideoDecodeUsageFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoDecodeUsageFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeUsageFlagsKHR operator&( VideoDecodeUsageFlagBitsKHR bit0, - VideoDecodeUsageFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoDecodeUsageFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeUsageFlagsKHR operator^( VideoDecodeUsageFlagBitsKHR bit0, - VideoDecodeUsageFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoDecodeUsageFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeUsageFlagsKHR operator~( VideoDecodeUsageFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoDecodeUsageFlagsKHR( bits ) ); - } - - using VideoDecodeFlagsKHR = Flags; - -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - //=== VK_EXT_transform_feedback === - - using PipelineRasterizationStateStreamCreateFlagsEXT = Flags; - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_encode_h264 === - - using VideoEncodeH264CapabilityFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = - VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceEnabled ) | - VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceDisabled ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eSeparateColourPlane ) | - VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eQpprimeYZeroTransformBypass ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eScalingLists ) | - VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eHrdCompliance ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eChromaQpOffset ) | - VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eSecondChromaQpOffset ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::ePicInitQpMinus26 ) | - VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPred ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredExplicit ) | - VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredImplicit ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPredNoTable ) | - VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eTransform8X8 ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eCabac ) | - VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eCavlc ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterDisabled ) | - VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterEnabled ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterPartial ) | - VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDisableDirectSpatialMvPred ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eMultipleSlicePerFrame ) | - VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eSliceMbCount ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eRowUnalignedSlice ) | - VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eDifferentSliceType ) | VkFlags( VideoEncodeH264CapabilityFlagBitsEXT::eBFrameInL1List ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilityFlagsEXT operator|( VideoEncodeH264CapabilityFlagBitsEXT bit0, - VideoEncodeH264CapabilityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH264CapabilityFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilityFlagsEXT operator&( VideoEncodeH264CapabilityFlagBitsEXT bit0, - VideoEncodeH264CapabilityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH264CapabilityFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilityFlagsEXT operator^( VideoEncodeH264CapabilityFlagBitsEXT bit0, - VideoEncodeH264CapabilityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH264CapabilityFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilityFlagsEXT operator~( VideoEncodeH264CapabilityFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoEncodeH264CapabilityFlagsEXT( bits ) ); - } - - using VideoEncodeH264InputModeFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eFrame ) | VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eSlice ) | - VkFlags( VideoEncodeH264InputModeFlagBitsEXT::eNonVcl ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT operator|( VideoEncodeH264InputModeFlagBitsEXT bit0, - VideoEncodeH264InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH264InputModeFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT operator&( VideoEncodeH264InputModeFlagBitsEXT bit0, - VideoEncodeH264InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH264InputModeFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT operator^( VideoEncodeH264InputModeFlagBitsEXT bit0, - VideoEncodeH264InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH264InputModeFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264InputModeFlagsEXT operator~( VideoEncodeH264InputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoEncodeH264InputModeFlagsEXT( bits ) ); - } - - using VideoEncodeH264OutputModeFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eFrame ) | VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eSlice ) | - VkFlags( VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT operator|( VideoEncodeH264OutputModeFlagBitsEXT bit0, - VideoEncodeH264OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH264OutputModeFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT operator&( VideoEncodeH264OutputModeFlagBitsEXT bit0, - VideoEncodeH264OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH264OutputModeFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT operator^( VideoEncodeH264OutputModeFlagBitsEXT bit0, - VideoEncodeH264OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH264OutputModeFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT operator~( VideoEncodeH264OutputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoEncodeH264OutputModeFlagsEXT( bits ) ); - } - -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_encode_h265 === - - using VideoEncodeH265CapabilityFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = - VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eSeparateColourPlane ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eScalingLists ) | - VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eSampleAdaptiveOffsetEnabled ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::ePcmEnable ) | - VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eSpsTemporalMvpEnabled ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eHrdCompliance ) | - VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eInitQpMinus26 ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eLog2ParallelMergeLevelMinus2 ) | - VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eSignDataHidingEnabled ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipEnabled ) | - VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipDisabled ) | - VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::ePpsSliceChromaQpOffsetsPresent ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPred ) | - VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eWeightedBipred ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPredNoTable ) | - VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eTransquantBypassEnabled ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eEntropyCodingSyncEnabled ) | - VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eDeblockingFilterOverrideEnabled ) | - VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerFrame ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eMultipleSlicePerTile ) | - VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerSlice ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eSliceSegmentCtbCount ) | - VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eRowUnalignedSliceSegment ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eDependentSliceSegment ) | - VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eDifferentSliceType ) | VkFlags( VideoEncodeH265CapabilityFlagBitsEXT::eBFrameInL1List ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilityFlagsEXT operator|( VideoEncodeH265CapabilityFlagBitsEXT bit0, - VideoEncodeH265CapabilityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH265CapabilityFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilityFlagsEXT operator&( VideoEncodeH265CapabilityFlagBitsEXT bit0, - VideoEncodeH265CapabilityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH265CapabilityFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilityFlagsEXT operator^( VideoEncodeH265CapabilityFlagBitsEXT bit0, - VideoEncodeH265CapabilityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH265CapabilityFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilityFlagsEXT operator~( VideoEncodeH265CapabilityFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoEncodeH265CapabilityFlagsEXT( bits ) ); - } - - using VideoEncodeH265InputModeFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( VideoEncodeH265InputModeFlagBitsEXT::eFrame ) | VkFlags( VideoEncodeH265InputModeFlagBitsEXT::eSliceSegment ) | - VkFlags( VideoEncodeH265InputModeFlagBitsEXT::eNonVcl ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265InputModeFlagsEXT operator|( VideoEncodeH265InputModeFlagBitsEXT bit0, - VideoEncodeH265InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH265InputModeFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265InputModeFlagsEXT operator&( VideoEncodeH265InputModeFlagBitsEXT bit0, - VideoEncodeH265InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH265InputModeFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265InputModeFlagsEXT operator^( VideoEncodeH265InputModeFlagBitsEXT bit0, - VideoEncodeH265InputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH265InputModeFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265InputModeFlagsEXT operator~( VideoEncodeH265InputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoEncodeH265InputModeFlagsEXT( bits ) ); - } - - using VideoEncodeH265OutputModeFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( VideoEncodeH265OutputModeFlagBitsEXT::eFrame ) | VkFlags( VideoEncodeH265OutputModeFlagBitsEXT::eSliceSegment ) | - VkFlags( VideoEncodeH265OutputModeFlagBitsEXT::eNonVcl ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265OutputModeFlagsEXT operator|( VideoEncodeH265OutputModeFlagBitsEXT bit0, - VideoEncodeH265OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH265OutputModeFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265OutputModeFlagsEXT operator&( VideoEncodeH265OutputModeFlagBitsEXT bit0, - VideoEncodeH265OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH265OutputModeFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265OutputModeFlagsEXT operator^( VideoEncodeH265OutputModeFlagBitsEXT bit0, - VideoEncodeH265OutputModeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH265OutputModeFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265OutputModeFlagsEXT operator~( VideoEncodeH265OutputModeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoEncodeH265OutputModeFlagsEXT( bits ) ); - } - - using VideoEncodeH265CtbSizeFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( VideoEncodeH265CtbSizeFlagBitsEXT::e16 ) | VkFlags( VideoEncodeH265CtbSizeFlagBitsEXT::e32 ) | - VkFlags( VideoEncodeH265CtbSizeFlagBitsEXT::e64 ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265CtbSizeFlagsEXT operator|( VideoEncodeH265CtbSizeFlagBitsEXT bit0, - VideoEncodeH265CtbSizeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH265CtbSizeFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265CtbSizeFlagsEXT operator&( VideoEncodeH265CtbSizeFlagBitsEXT bit0, - VideoEncodeH265CtbSizeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH265CtbSizeFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265CtbSizeFlagsEXT operator^( VideoEncodeH265CtbSizeFlagBitsEXT bit0, - VideoEncodeH265CtbSizeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH265CtbSizeFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265CtbSizeFlagsEXT operator~( VideoEncodeH265CtbSizeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoEncodeH265CtbSizeFlagsEXT( bits ) ); - } - - using VideoEncodeH265TransformBlockSizeFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( VideoEncodeH265TransformBlockSizeFlagBitsEXT::e4 ) | VkFlags( VideoEncodeH265TransformBlockSizeFlagBitsEXT::e8 ) | - VkFlags( VideoEncodeH265TransformBlockSizeFlagBitsEXT::e16 ) | VkFlags( VideoEncodeH265TransformBlockSizeFlagBitsEXT::e32 ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265TransformBlockSizeFlagsEXT - operator|( VideoEncodeH265TransformBlockSizeFlagBitsEXT bit0, VideoEncodeH265TransformBlockSizeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH265TransformBlockSizeFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265TransformBlockSizeFlagsEXT - operator&( VideoEncodeH265TransformBlockSizeFlagBitsEXT bit0, VideoEncodeH265TransformBlockSizeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH265TransformBlockSizeFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265TransformBlockSizeFlagsEXT - operator^( VideoEncodeH265TransformBlockSizeFlagBitsEXT bit0, VideoEncodeH265TransformBlockSizeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeH265TransformBlockSizeFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeH265TransformBlockSizeFlagsEXT operator~( VideoEncodeH265TransformBlockSizeFlagBitsEXT bits ) - VULKAN_HPP_NOEXCEPT - { - return ~( VideoEncodeH265TransformBlockSizeFlagsEXT( bits ) ); - } - -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_decode_h264 === - - using VideoDecodeH264PictureLayoutFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( VideoDecodeH264PictureLayoutFlagBitsEXT::eProgressive ) | - VkFlags( VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedInterleavedLines ) | - VkFlags( VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedSeparatePlanes ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureLayoutFlagsEXT operator|( VideoDecodeH264PictureLayoutFlagBitsEXT bit0, - VideoDecodeH264PictureLayoutFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoDecodeH264PictureLayoutFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureLayoutFlagsEXT operator&( VideoDecodeH264PictureLayoutFlagBitsEXT bit0, - VideoDecodeH264PictureLayoutFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoDecodeH264PictureLayoutFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureLayoutFlagsEXT operator^( VideoDecodeH264PictureLayoutFlagBitsEXT bit0, - VideoDecodeH264PictureLayoutFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoDecodeH264PictureLayoutFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureLayoutFlagsEXT operator~( VideoDecodeH264PictureLayoutFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoDecodeH264PictureLayoutFlagsEXT( bits ) ); - } - -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#if defined( VK_USE_PLATFORM_GGP ) - //=== VK_GGP_stream_descriptor_surface === - - using StreamDescriptorSurfaceCreateFlagsGGP = Flags; - -#endif /*VK_USE_PLATFORM_GGP*/ - - //=== VK_NV_external_memory_capabilities === - - using ExternalMemoryHandleTypeFlagsNV = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) | VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) | - VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) | VkFlags( ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, - ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator&( ExternalMemoryHandleTypeFlagBitsNV bit0, - ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryHandleTypeFlagsNV( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator^( ExternalMemoryHandleTypeFlagBitsNV bit0, - ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryHandleTypeFlagsNV( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ExternalMemoryHandleTypeFlagsNV( bits ) ); - } - - using ExternalMemoryFeatureFlagsNV = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) | VkFlags( ExternalMemoryFeatureFlagBitsNV::eExportable ) | - VkFlags( ExternalMemoryFeatureFlagBitsNV::eImportable ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, - ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator&( ExternalMemoryFeatureFlagBitsNV bit0, - ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryFeatureFlagsNV( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator^( ExternalMemoryFeatureFlagBitsNV bit0, - ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExternalMemoryFeatureFlagsNV( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ExternalMemoryFeatureFlagsNV( bits ) ); - } - -#if defined( VK_USE_PLATFORM_VI_NN ) - //=== VK_NN_vi_surface === - - using ViSurfaceCreateFlagsNN = Flags; - -#endif /*VK_USE_PLATFORM_VI_NN*/ - - //=== VK_EXT_conditional_rendering === - - using ConditionalRenderingFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ConditionalRenderingFlagBitsEXT::eInverted ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator|( ConditionalRenderingFlagBitsEXT bit0, - ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return ConditionalRenderingFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator&( ConditionalRenderingFlagBitsEXT bit0, - ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return ConditionalRenderingFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator^( ConditionalRenderingFlagBitsEXT bit0, - ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return ConditionalRenderingFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator~( ConditionalRenderingFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ConditionalRenderingFlagsEXT( bits ) ); - } - - //=== VK_EXT_display_surface_counter === - - using SurfaceCounterFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( SurfaceCounterFlagBitsEXT::eVblank ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return SurfaceCounterFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator&( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return SurfaceCounterFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator^( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return SurfaceCounterFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( SurfaceCounterFlagsEXT( bits ) ); - } - - //=== VK_NV_viewport_swizzle === - - using PipelineViewportSwizzleStateCreateFlagsNV = Flags; - - //=== VK_EXT_discard_rectangles === - - using PipelineDiscardRectangleStateCreateFlagsEXT = Flags; - - //=== VK_EXT_conservative_rasterization === - - using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags; - - //=== VK_EXT_depth_clip_enable === - - using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags; - - //=== VK_KHR_performance_query === - - using PerformanceCounterDescriptionFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = - VkFlags( PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) | VkFlags( PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator|( PerformanceCounterDescriptionFlagBitsKHR bit0, - PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return PerformanceCounterDescriptionFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator&( PerformanceCounterDescriptionFlagBitsKHR bit0, - PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return PerformanceCounterDescriptionFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator^( PerformanceCounterDescriptionFlagBitsKHR bit0, - PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return PerformanceCounterDescriptionFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator~( PerformanceCounterDescriptionFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( PerformanceCounterDescriptionFlagsKHR( bits ) ); - } - - using AcquireProfilingLockFlagsKHR = Flags; - -#if defined( VK_USE_PLATFORM_IOS_MVK ) - //=== VK_MVK_ios_surface === - - using IOSSurfaceCreateFlagsMVK = Flags; - -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - -#if defined( VK_USE_PLATFORM_MACOS_MVK ) - //=== VK_MVK_macos_surface === - - using MacOSSurfaceCreateFlagsMVK = Flags; - -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - - //=== VK_EXT_debug_utils === - - using DebugUtilsMessageSeverityFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) | VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) | - VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) | VkFlags( DebugUtilsMessageSeverityFlagBitsEXT::eError ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator|( DebugUtilsMessageSeverityFlagBitsEXT bit0, - DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugUtilsMessageSeverityFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator&( DebugUtilsMessageSeverityFlagBitsEXT bit0, - DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugUtilsMessageSeverityFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator^( DebugUtilsMessageSeverityFlagBitsEXT bit0, - DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugUtilsMessageSeverityFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator~( DebugUtilsMessageSeverityFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) ); - } - - using DebugUtilsMessageTypeFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) | VkFlags( DebugUtilsMessageTypeFlagBitsEXT::eValidation ) | - VkFlags( DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, - DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugUtilsMessageTypeFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator&( DebugUtilsMessageTypeFlagBitsEXT bit0, - DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugUtilsMessageTypeFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator^( DebugUtilsMessageTypeFlagBitsEXT bit0, - DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return DebugUtilsMessageTypeFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator~( DebugUtilsMessageTypeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DebugUtilsMessageTypeFlagsEXT( bits ) ); - } - - using DebugUtilsMessengerCallbackDataFlagsEXT = Flags; - - using DebugUtilsMessengerCreateFlagsEXT = Flags; - - //=== VK_NV_fragment_coverage_to_color === - - using PipelineCoverageToColorStateCreateFlagsNV = Flags; - - //=== VK_KHR_acceleration_structure === - - using GeometryFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( GeometryFlagBitsKHR::eOpaque ) | VkFlags( GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator|( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return GeometryFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator&( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return GeometryFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator^( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return GeometryFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator~( GeometryFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( GeometryFlagsKHR( bits ) ); - } - - using GeometryFlagsNV = GeometryFlagsKHR; - - using GeometryInstanceFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) | VkFlags( GeometryInstanceFlagBitsKHR::eTriangleFlipFacing ) | - VkFlags( GeometryInstanceFlagBitsKHR::eForceOpaque ) | VkFlags( GeometryInstanceFlagBitsKHR::eForceNoOpaque ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator|( GeometryInstanceFlagBitsKHR bit0, - GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return GeometryInstanceFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator&( GeometryInstanceFlagBitsKHR bit0, - GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return GeometryInstanceFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator^( GeometryInstanceFlagBitsKHR bit0, - GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return GeometryInstanceFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator~( GeometryInstanceFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( GeometryInstanceFlagsKHR( bits ) ); - } - - using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR; - - using BuildAccelerationStructureFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) | VkFlags( BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) | - VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) | VkFlags( BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) | - VkFlags( BuildAccelerationStructureFlagBitsKHR::eLowMemory ) | VkFlags( BuildAccelerationStructureFlagBitsKHR::eMotionNV ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator|( BuildAccelerationStructureFlagBitsKHR bit0, - BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return BuildAccelerationStructureFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator&( BuildAccelerationStructureFlagBitsKHR bit0, - BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return BuildAccelerationStructureFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator^( BuildAccelerationStructureFlagBitsKHR bit0, - BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return BuildAccelerationStructureFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator~( BuildAccelerationStructureFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( BuildAccelerationStructureFlagsKHR( bits ) ); - } - - using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR; - - using AccelerationStructureCreateFlagsKHR = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay ) | VkFlags( AccelerationStructureCreateFlagBitsKHR::eMotionNV ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator|( AccelerationStructureCreateFlagBitsKHR bit0, - AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return AccelerationStructureCreateFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator&( AccelerationStructureCreateFlagBitsKHR bit0, - AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return AccelerationStructureCreateFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator^( AccelerationStructureCreateFlagBitsKHR bit0, - AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return AccelerationStructureCreateFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator~( AccelerationStructureCreateFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( AccelerationStructureCreateFlagsKHR( bits ) ); - } - - //=== VK_NV_framebuffer_mixed_samples === - - using PipelineCoverageModulationStateCreateFlagsNV = Flags; - - //=== VK_EXT_validation_cache === - - using ValidationCacheCreateFlagsEXT = Flags; - - //=== VK_AMD_pipeline_compiler_control === - - using PipelineCompilerControlFlagsAMD = Flags; - -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_imagepipe_surface === - - using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags; - -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - -#if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_surface === - - using MetalSurfaceCreateFlagsEXT = Flags; - -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_AMD_shader_core_properties2 === - - using ShaderCorePropertiesFlagsAMD = Flags; - - //=== VK_NV_coverage_reduction_mode === - - using PipelineCoverageReductionStateCreateFlagsNV = Flags; - - //=== VK_EXT_headless_surface === - - using HeadlessSurfaceCreateFlagsEXT = Flags; - - //=== VK_NV_device_generated_commands === - - using IndirectStateFlagsNV = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( IndirectStateFlagBitsNV::eFlagFrontface ) - }; + eVulkanIndexBuffer = VK_INDIRECT_COMMANDS_INPUT_MODE_VULKAN_INDEX_BUFFER_EXT, + eDxgiIndexBuffer = VK_INDIRECT_COMMANDS_INPUT_MODE_DXGI_INDEX_BUFFER_EXT }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator|( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return IndirectStateFlagsNV( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator&( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return IndirectStateFlagsNV( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator^( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return IndirectStateFlagsNV( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator~( IndirectStateFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT - { - return ~( IndirectStateFlagsNV( bits ) ); - } - - using IndirectCommandsLayoutUsageFlagsNV = Flags; + using IndirectCommandsInputModeFlagsEXT = Flags; template <> - struct FlagTraits + struct FlagTraits { - enum : VkFlags - { - allFlags = VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) | VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) | - VkFlags( IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) - }; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR IndirectCommandsInputModeFlagsEXT allFlags = + IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer | IndirectCommandsInputModeFlagBitsEXT::eDxgiIndexBuffer; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator|( IndirectCommandsLayoutUsageFlagBitsNV bit0, - IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return IndirectCommandsLayoutUsageFlagsNV( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator&( IndirectCommandsLayoutUsageFlagBitsNV bit0, - IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return IndirectCommandsLayoutUsageFlagsNV( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator^( IndirectCommandsLayoutUsageFlagBitsNV bit0, - IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return IndirectCommandsLayoutUsageFlagsNV( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator~( IndirectCommandsLayoutUsageFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT - { - return ~( IndirectCommandsLayoutUsageFlagsNV( bits ) ); - } - - //=== VK_EXT_device_memory_report === - - using DeviceMemoryReportFlagsEXT = Flags; - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_encode_queue === - - using VideoEncodeFlagsKHR = Flags; - - using VideoEncodeCapabilityFlagsKHR = Flags; + //=== VK_KHR_maintenance8 === - template <> - struct FlagTraits + enum class AccessFlagBits3KHR : VkAccessFlags3KHR { - enum : VkFlags - { - allFlags = VkFlags( VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes ) - }; + eNone = VK_ACCESS_3_NONE_KHR }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeCapabilityFlagsKHR operator|( VideoEncodeCapabilityFlagBitsKHR bit0, - VideoEncodeCapabilityFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeCapabilityFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeCapabilityFlagsKHR operator&( VideoEncodeCapabilityFlagBitsKHR bit0, - VideoEncodeCapabilityFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeCapabilityFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeCapabilityFlagsKHR operator^( VideoEncodeCapabilityFlagBitsKHR bit0, - VideoEncodeCapabilityFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeCapabilityFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeCapabilityFlagsKHR operator~( VideoEncodeCapabilityFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoEncodeCapabilityFlagsKHR( bits ) ); - } - - using VideoEncodeUsageFlagsKHR = Flags; + using AccessFlags3KHR = Flags; template <> - struct FlagTraits + struct FlagTraits { - enum : VkFlags - { - allFlags = VkFlags( VideoEncodeUsageFlagBitsKHR::eDefault ) | VkFlags( VideoEncodeUsageFlagBitsKHR::eTranscoding ) | - VkFlags( VideoEncodeUsageFlagBitsKHR::eStreaming ) | VkFlags( VideoEncodeUsageFlagBitsKHR::eRecording ) | - VkFlags( VideoEncodeUsageFlagBitsKHR::eConferencing ) - }; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR AccessFlags3KHR allFlags = AccessFlagBits3KHR::eNone; }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeUsageFlagsKHR operator|( VideoEncodeUsageFlagBitsKHR bit0, - VideoEncodeUsageFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeUsageFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeUsageFlagsKHR operator&( VideoEncodeUsageFlagBitsKHR bit0, - VideoEncodeUsageFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeUsageFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeUsageFlagsKHR operator^( VideoEncodeUsageFlagBitsKHR bit0, - VideoEncodeUsageFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeUsageFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeUsageFlagsKHR operator~( VideoEncodeUsageFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoEncodeUsageFlagsKHR( bits ) ); - } - - using VideoEncodeContentFlagsKHR = Flags; + //=== VK_EXT_depth_clamp_control === - template <> - struct FlagTraits + enum class DepthClampModeEXT { - enum : VkFlags - { - allFlags = VkFlags( VideoEncodeContentFlagBitsKHR::eDefault ) | VkFlags( VideoEncodeContentFlagBitsKHR::eCamera ) | - VkFlags( VideoEncodeContentFlagBitsKHR::eDesktop ) | VkFlags( VideoEncodeContentFlagBitsKHR::eRendered ) - }; + eViewportRange = VK_DEPTH_CLAMP_MODE_VIEWPORT_RANGE_EXT, + eUserDefinedRange = VK_DEPTH_CLAMP_MODE_USER_DEFINED_RANGE_EXT }; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeContentFlagsKHR operator|( VideoEncodeContentFlagBitsKHR bit0, - VideoEncodeContentFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeContentFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeContentFlagsKHR operator&( VideoEncodeContentFlagBitsKHR bit0, - VideoEncodeContentFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeContentFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeContentFlagsKHR operator^( VideoEncodeContentFlagBitsKHR bit0, - VideoEncodeContentFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeContentFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeContentFlagsKHR operator~( VideoEncodeContentFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoEncodeContentFlagsKHR( bits ) ); - } - - using VideoEncodeRateControlFlagsKHR = Flags; - - using VideoEncodeRateControlModeFlagsKHR = Flags; + //=========================================================== + //=== Mapping from ObjectType to DebugReportObjectTypeEXT === + //=========================================================== - template <> - struct FlagTraits + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType ) { - enum : VkFlags + switch ( objectType ) { - allFlags = VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eNone ) | VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eCbr ) | - VkFlags( VideoEncodeRateControlModeFlagBitsKHR::eVbr ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR operator|( VideoEncodeRateControlModeFlagBitsKHR bit0, - VideoEncodeRateControlModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeRateControlModeFlagsKHR( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR operator&( VideoEncodeRateControlModeFlagBitsKHR bit0, - VideoEncodeRateControlModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeRateControlModeFlagsKHR( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR operator^( VideoEncodeRateControlModeFlagBitsKHR bit0, - VideoEncodeRateControlModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT - { - return VideoEncodeRateControlModeFlagsKHR( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR VideoEncodeRateControlModeFlagsKHR operator~( VideoEncodeRateControlModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT - { - return ~( VideoEncodeRateControlModeFlagsKHR( bits ) ); - } + //=== VK_VERSION_1_0 === + case VULKAN_HPP_NAMESPACE::ObjectType::eInstance: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance; + case VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice; + case VULKAN_HPP_NAMESPACE::ObjectType::eDevice: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice; + case VULKAN_HPP_NAMESPACE::ObjectType::eQueue: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue; + case VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory; + case VULKAN_HPP_NAMESPACE::ObjectType::eFence: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence; + case VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore; + case VULKAN_HPP_NAMESPACE::ObjectType::eEvent: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent; + case VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool; + case VULKAN_HPP_NAMESPACE::ObjectType::eBuffer: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer; + case VULKAN_HPP_NAMESPACE::ObjectType::eBufferView: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView; + case VULKAN_HPP_NAMESPACE::ObjectType::eImage: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage; + case VULKAN_HPP_NAMESPACE::ObjectType::eImageView: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView; + case VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule; + case VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache; + case VULKAN_HPP_NAMESPACE::ObjectType::ePipeline: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline; + case VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout; + case VULKAN_HPP_NAMESPACE::ObjectType::eSampler: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler; + case VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool; + case VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet; + case VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout; + case VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer; + case VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass; + case VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool; + case VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer; + + //=== VK_VERSION_1_1 === + case VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion; + case VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate; + + //=== VK_VERSION_1_3 === + case VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_KHR_surface === + case VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR; + + //=== VK_KHR_swapchain === + case VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR; + + //=== VK_KHR_display === + case VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR; + case VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR; + + //=== VK_EXT_debug_report === + case VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + case VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + case VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_NVX_binary_import === + case VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX; + case VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX; + + //=== VK_EXT_debug_utils === + case VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_KHR_acceleration_structure === + case VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + case VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT; + + //=== VK_NV_ray_tracing === + case VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV; + + //=== VK_INTEL_performance_query === + case VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_KHR_deferred_host_operations === + case VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_NV_device_generated_commands === + case VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + case VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaModuleNV; + case VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaFunctionNV; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_NV_device_diagnostics_config === - - using DeviceDiagnosticsConfigFlagsNV = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) | VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) | - VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) | - VkFlags( DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderErrorReporting ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator|( DeviceDiagnosticsConfigFlagBitsNV bit0, - DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return DeviceDiagnosticsConfigFlagsNV( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator&( DeviceDiagnosticsConfigFlagBitsNV bit0, - DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return DeviceDiagnosticsConfigFlagsNV( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator^( DeviceDiagnosticsConfigFlagBitsNV bit0, - DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT - { - return DeviceDiagnosticsConfigFlagsNV( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator~( DeviceDiagnosticsConfigFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT - { - return ~( DeviceDiagnosticsConfigFlagsNV( bits ) ); - } - -#if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === - - using ExportMetalObjectTypeFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ExportMetalObjectTypeFlagBitsEXT::eMetalDevice ) | VkFlags( ExportMetalObjectTypeFlagBitsEXT::eMetalCommandQueue ) | - VkFlags( ExportMetalObjectTypeFlagBitsEXT::eMetalBuffer ) | VkFlags( ExportMetalObjectTypeFlagBitsEXT::eMetalTexture ) | - VkFlags( ExportMetalObjectTypeFlagBitsEXT::eMetalIosurface ) | VkFlags( ExportMetalObjectTypeFlagBitsEXT::eMetalSharedEvent ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExportMetalObjectTypeFlagsEXT operator|( ExportMetalObjectTypeFlagBitsEXT bit0, - ExportMetalObjectTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExportMetalObjectTypeFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExportMetalObjectTypeFlagsEXT operator&( ExportMetalObjectTypeFlagBitsEXT bit0, - ExportMetalObjectTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExportMetalObjectTypeFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExportMetalObjectTypeFlagsEXT operator^( ExportMetalObjectTypeFlagBitsEXT bit0, - ExportMetalObjectTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return ExportMetalObjectTypeFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExportMetalObjectTypeFlagsEXT operator~( ExportMetalObjectTypeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ExportMetalObjectTypeFlagsEXT( bits ) ); - } - -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_EXT_graphics_pipeline_library === - - using GraphicsPipelineLibraryFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( GraphicsPipelineLibraryFlagBitsEXT::eVertexInputInterface ) | - VkFlags( GraphicsPipelineLibraryFlagBitsEXT::ePreRasterizationShaders ) | VkFlags( GraphicsPipelineLibraryFlagBitsEXT::eFragmentShader ) | - VkFlags( GraphicsPipelineLibraryFlagBitsEXT::eFragmentOutputInterface ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryFlagsEXT operator|( GraphicsPipelineLibraryFlagBitsEXT bit0, - GraphicsPipelineLibraryFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return GraphicsPipelineLibraryFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryFlagsEXT operator&( GraphicsPipelineLibraryFlagBitsEXT bit0, - GraphicsPipelineLibraryFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return GraphicsPipelineLibraryFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryFlagsEXT operator^( GraphicsPipelineLibraryFlagBitsEXT bit0, - GraphicsPipelineLibraryFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return GraphicsPipelineLibraryFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryFlagsEXT operator~( GraphicsPipelineLibraryFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( GraphicsPipelineLibraryFlagsEXT( bits ) ); - } - - //=== VK_NV_ray_tracing_motion_blur === - - using AccelerationStructureMotionInfoFlagsNV = Flags; - - using AccelerationStructureMotionInstanceFlagsNV = Flags; - - //=== VK_EXT_image_compression_control === - - using ImageCompressionFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ImageCompressionFlagBitsEXT::eDefault ) | VkFlags( ImageCompressionFlagBitsEXT::eFixedRateDefault ) | - VkFlags( ImageCompressionFlagBitsEXT::eFixedRateExplicit ) | VkFlags( ImageCompressionFlagBitsEXT::eDisabled ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCompressionFlagsEXT operator|( ImageCompressionFlagBitsEXT bit0, - ImageCompressionFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageCompressionFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCompressionFlagsEXT operator&( ImageCompressionFlagBitsEXT bit0, - ImageCompressionFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageCompressionFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCompressionFlagsEXT operator^( ImageCompressionFlagBitsEXT bit0, - ImageCompressionFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageCompressionFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCompressionFlagsEXT operator~( ImageCompressionFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ImageCompressionFlagsEXT( bits ) ); - } - - using ImageCompressionFixedRateFlagsEXT = Flags; - - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ImageCompressionFixedRateFlagBitsEXT::eNone ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e1Bpc ) | - VkFlags( ImageCompressionFixedRateFlagBitsEXT::e2Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e3Bpc ) | - VkFlags( ImageCompressionFixedRateFlagBitsEXT::e4Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e5Bpc ) | - VkFlags( ImageCompressionFixedRateFlagBitsEXT::e6Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e7Bpc ) | - VkFlags( ImageCompressionFixedRateFlagBitsEXT::e8Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e9Bpc ) | - VkFlags( ImageCompressionFixedRateFlagBitsEXT::e10Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e11Bpc ) | - VkFlags( ImageCompressionFixedRateFlagBitsEXT::e12Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e13Bpc ) | - VkFlags( ImageCompressionFixedRateFlagBitsEXT::e14Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e15Bpc ) | - VkFlags( ImageCompressionFixedRateFlagBitsEXT::e16Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e17Bpc ) | - VkFlags( ImageCompressionFixedRateFlagBitsEXT::e18Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e19Bpc ) | - VkFlags( ImageCompressionFixedRateFlagBitsEXT::e20Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e21Bpc ) | - VkFlags( ImageCompressionFixedRateFlagBitsEXT::e22Bpc ) | VkFlags( ImageCompressionFixedRateFlagBitsEXT::e23Bpc ) | - VkFlags( ImageCompressionFixedRateFlagBitsEXT::e24Bpc ) - }; - }; - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCompressionFixedRateFlagsEXT operator|( ImageCompressionFixedRateFlagBitsEXT bit0, - ImageCompressionFixedRateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageCompressionFixedRateFlagsEXT( bit0 ) | bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCompressionFixedRateFlagsEXT operator&( ImageCompressionFixedRateFlagBitsEXT bit0, - ImageCompressionFixedRateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageCompressionFixedRateFlagsEXT( bit0 ) & bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCompressionFixedRateFlagsEXT operator^( ImageCompressionFixedRateFlagBitsEXT bit0, - ImageCompressionFixedRateFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageCompressionFixedRateFlagsEXT( bit0 ) ^ bit1; - } - - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCompressionFixedRateFlagsEXT operator~( ImageCompressionFixedRateFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ImageCompressionFixedRateFlagsEXT( bits ) ); - } - -#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) - //=== VK_EXT_directfb_surface === - - using DirectFBSurfaceCreateFlagsEXT = Flags; - -#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - #if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === - - using ImageFormatConstraintsFlagsFUCHSIA = Flags; + //=== VK_FUCHSIA_buffer_collection === + case VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - using ImageConstraintsInfoFlagsFUCHSIA = Flags; + //=== VK_EXT_opacity_micromap === + case VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - template <> - struct FlagTraits - { - enum : VkFlags - { - allFlags = VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely ) | VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften ) | - VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely ) | VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften ) | - VkFlags( ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional ) - }; - }; + //=== VK_NV_optical_flow === + case VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFlagsFUCHSIA operator|( ImageConstraintsInfoFlagBitsFUCHSIA bit0, - ImageConstraintsInfoFlagBitsFUCHSIA bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageConstraintsInfoFlagsFUCHSIA( bit0 ) | bit1; - } + //=== VK_EXT_shader_object === + case VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFlagsFUCHSIA operator&( ImageConstraintsInfoFlagBitsFUCHSIA bit0, - ImageConstraintsInfoFlagBitsFUCHSIA bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageConstraintsInfoFlagsFUCHSIA( bit0 ) & bit1; - } + //=== VK_KHR_pipeline_binary === + case VULKAN_HPP_NAMESPACE::ObjectType::ePipelineBinaryKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFlagsFUCHSIA operator^( ImageConstraintsInfoFlagBitsFUCHSIA bit0, - ImageConstraintsInfoFlagBitsFUCHSIA bit1 ) VULKAN_HPP_NOEXCEPT - { - return ImageConstraintsInfoFlagsFUCHSIA( bit0 ) ^ bit1; - } + //=== VK_EXT_device_generated_commands === + case VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutEXT: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + case VULKAN_HPP_NAMESPACE::ObjectType::eIndirectExecutionSetEXT: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFlagsFUCHSIA operator~( ImageConstraintsInfoFlagBitsFUCHSIA bits ) VULKAN_HPP_NOEXCEPT - { - return ~( ImageConstraintsInfoFlagsFUCHSIA( bits ) ); + default: VULKAN_HPP_ASSERT( false && "unknown ObjectType" ); return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + } } -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - -#if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_screen_surface === - - using ScreenSurfaceCreateFlagsQNX = Flags; - -#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - } // namespace VULKAN_HPP_NAMESPACE #endif diff --git a/src/libraries/vulkanheaders/vulkan_extension_inspection.hpp b/src/libraries/vulkanheaders/vulkan_extension_inspection.hpp new file mode 100644 index 000000000..d64a4aaa7 --- /dev/null +++ b/src/libraries/vulkanheaders/vulkan_extension_inspection.hpp @@ -0,0 +1,3452 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_EXTENSION_INSPECTION_HPP +#define VULKAN_EXTENSION_INSPECTION_HPP + +#if defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) +import VULKAN_HPP_STD_MODULE; +#else +# include +# include +# include +# include +# include +#endif + +namespace VULKAN_HPP_NAMESPACE +{ + //====================================== + //=== Extension inspection functions === + //====================================== + + std::set const & getDeviceExtensions(); + std::set const & getInstanceExtensions(); + std::map const & getDeprecatedExtensions(); + std::map>> const & getExtensionDepends( std::string const & extension ); + std::pair> const &> getExtensionDepends( std::string const & version, std::string const & extension ); + std::map const & getObsoletedExtensions(); + std::map const & getPromotedExtensions(); + VULKAN_HPP_CONSTEXPR_20 std::string getExtensionDeprecatedBy( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 std::string getExtensionObsoletedBy( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 std::string getExtensionPromotedTo( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 bool isDeprecatedExtension( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 bool isDeviceExtension( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 bool isObsoletedExtension( std::string const & extension ); + VULKAN_HPP_CONSTEXPR_20 bool isPromotedExtension( std::string const & extension ); + + //===================================================== + //=== Extension inspection function implementations === + //===================================================== + + VULKAN_HPP_INLINE std::map const & getDeprecatedExtensions() + { + static const std::map deprecatedExtensions = { + { "VK_EXT_debug_report", "VK_EXT_debug_utils" }, + { "VK_NV_glsl_shader", "" }, + { "VK_NV_dedicated_allocation", "VK_KHR_dedicated_allocation" }, + { "VK_AMD_gpu_shader_half_float", "VK_KHR_shader_float16_int8" }, + { "VK_IMG_format_pvrtc", "" }, + { "VK_NV_external_memory_capabilities", "VK_KHR_external_memory_capabilities" }, + { "VK_NV_external_memory", "VK_KHR_external_memory" }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_NV_external_memory_win32", "VK_KHR_external_memory_win32" }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_EXT_validation_flags", "VK_EXT_layer_settings" }, + { "VK_EXT_shader_subgroup_ballot", "VK_VERSION_1_2" }, + { "VK_EXT_shader_subgroup_vote", "VK_VERSION_1_1" }, +#if defined( VK_USE_PLATFORM_IOS_MVK ) + { "VK_MVK_ios_surface", "VK_EXT_metal_surface" }, +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + { "VK_MVK_macos_surface", "VK_EXT_metal_surface" }, +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + { "VK_AMD_gpu_shader_int16", "VK_KHR_shader_float16_int8" }, + { "VK_NV_ray_tracing", "VK_KHR_ray_tracing_pipeline" }, + { "VK_EXT_buffer_device_address", "VK_KHR_buffer_device_address" }, + { "VK_EXT_validation_features", "VK_EXT_layer_settings" } + }; + return deprecatedExtensions; + } + + VULKAN_HPP_INLINE std::set const & getDeviceExtensions() + { + static const std::set deviceExtensions = { + "VK_KHR_swapchain", + "VK_KHR_display_swapchain", + "VK_NV_glsl_shader", + "VK_EXT_depth_range_unrestricted", + "VK_KHR_sampler_mirror_clamp_to_edge", + "VK_IMG_filter_cubic", + "VK_AMD_rasterization_order", + "VK_AMD_shader_trinary_minmax", + "VK_AMD_shader_explicit_vertex_parameter", + "VK_EXT_debug_marker", + "VK_KHR_video_queue", + "VK_KHR_video_decode_queue", + "VK_AMD_gcn_shader", + "VK_NV_dedicated_allocation", + "VK_EXT_transform_feedback", + "VK_NVX_binary_import", + "VK_NVX_image_view_handle", + "VK_AMD_draw_indirect_count", + "VK_AMD_negative_viewport_height", + "VK_AMD_gpu_shader_half_float", + "VK_AMD_shader_ballot", + "VK_KHR_video_encode_h264", + "VK_KHR_video_encode_h265", + "VK_KHR_video_decode_h264", + "VK_AMD_texture_gather_bias_lod", + "VK_AMD_shader_info", + "VK_KHR_dynamic_rendering", + "VK_AMD_shader_image_load_store_lod", + "VK_NV_corner_sampled_image", + "VK_KHR_multiview", + "VK_IMG_format_pvrtc", + "VK_NV_external_memory", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_NV_external_memory_win32", + "VK_NV_win32_keyed_mutex", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_device_group", + "VK_KHR_shader_draw_parameters", + "VK_EXT_shader_subgroup_ballot", + "VK_EXT_shader_subgroup_vote", + "VK_EXT_texture_compression_astc_hdr", + "VK_EXT_astc_decode_mode", + "VK_EXT_pipeline_robustness", + "VK_KHR_maintenance1", + "VK_KHR_external_memory", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_KHR_external_memory_win32", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_external_memory_fd", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_KHR_win32_keyed_mutex", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_external_semaphore", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_KHR_external_semaphore_win32", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_external_semaphore_fd", + "VK_KHR_push_descriptor", + "VK_EXT_conditional_rendering", + "VK_KHR_shader_float16_int8", + "VK_KHR_16bit_storage", + "VK_KHR_incremental_present", + "VK_KHR_descriptor_update_template", + "VK_NV_clip_space_w_scaling", + "VK_EXT_display_control", + "VK_GOOGLE_display_timing", + "VK_NV_sample_mask_override_coverage", + "VK_NV_geometry_shader_passthrough", + "VK_NV_viewport_array2", + "VK_NVX_multiview_per_view_attributes", + "VK_NV_viewport_swizzle", + "VK_EXT_discard_rectangles", + "VK_EXT_conservative_rasterization", + "VK_EXT_depth_clip_enable", + "VK_EXT_hdr_metadata", + "VK_KHR_imageless_framebuffer", + "VK_KHR_create_renderpass2", + "VK_IMG_relaxed_line_rasterization", + "VK_KHR_shared_presentable_image", + "VK_KHR_external_fence", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_KHR_external_fence_win32", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_external_fence_fd", + "VK_KHR_performance_query", + "VK_KHR_maintenance2", + "VK_KHR_variable_pointers", + "VK_EXT_external_memory_dma_buf", + "VK_EXT_queue_family_foreign", + "VK_KHR_dedicated_allocation", +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + "VK_ANDROID_external_memory_android_hardware_buffer", +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + "VK_EXT_sampler_filter_minmax", + "VK_KHR_storage_buffer_storage_class", + "VK_AMD_gpu_shader_int16", +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + "VK_AMDX_shader_enqueue", +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + "VK_AMD_mixed_attachment_samples", + "VK_AMD_shader_fragment_mask", + "VK_EXT_inline_uniform_block", + "VK_EXT_shader_stencil_export", + "VK_EXT_sample_locations", + "VK_KHR_relaxed_block_layout", + "VK_KHR_get_memory_requirements2", + "VK_KHR_image_format_list", + "VK_EXT_blend_operation_advanced", + "VK_NV_fragment_coverage_to_color", + "VK_KHR_acceleration_structure", + "VK_KHR_ray_tracing_pipeline", + "VK_KHR_ray_query", + "VK_NV_framebuffer_mixed_samples", + "VK_NV_fill_rectangle", + "VK_NV_shader_sm_builtins", + "VK_EXT_post_depth_coverage", + "VK_KHR_sampler_ycbcr_conversion", + "VK_KHR_bind_memory2", + "VK_EXT_image_drm_format_modifier", + "VK_EXT_validation_cache", + "VK_EXT_descriptor_indexing", + "VK_EXT_shader_viewport_index_layer", +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + "VK_KHR_portability_subset", +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + "VK_NV_shading_rate_image", + "VK_NV_ray_tracing", + "VK_NV_representative_fragment_test", + "VK_KHR_maintenance3", + "VK_KHR_draw_indirect_count", + "VK_EXT_filter_cubic", + "VK_QCOM_render_pass_shader_resolve", + "VK_EXT_global_priority", + "VK_KHR_shader_subgroup_extended_types", + "VK_KHR_8bit_storage", + "VK_EXT_external_memory_host", + "VK_AMD_buffer_marker", + "VK_KHR_shader_atomic_int64", + "VK_KHR_shader_clock", + "VK_AMD_pipeline_compiler_control", + "VK_EXT_calibrated_timestamps", + "VK_AMD_shader_core_properties", + "VK_KHR_video_decode_h265", + "VK_KHR_global_priority", + "VK_AMD_memory_overallocation_behavior", + "VK_EXT_vertex_attribute_divisor", +#if defined( VK_USE_PLATFORM_GGP ) + "VK_GGP_frame_token", +#endif /*VK_USE_PLATFORM_GGP*/ + "VK_EXT_pipeline_creation_feedback", + "VK_KHR_driver_properties", + "VK_KHR_shader_float_controls", + "VK_NV_shader_subgroup_partitioned", + "VK_KHR_depth_stencil_resolve", + "VK_KHR_swapchain_mutable_format", + "VK_NV_compute_shader_derivatives", + "VK_NV_mesh_shader", + "VK_NV_fragment_shader_barycentric", + "VK_NV_shader_image_footprint", + "VK_NV_scissor_exclusive", + "VK_NV_device_diagnostic_checkpoints", + "VK_KHR_timeline_semaphore", + "VK_INTEL_shader_integer_functions2", + "VK_INTEL_performance_query", + "VK_KHR_vulkan_memory_model", + "VK_EXT_pci_bus_info", + "VK_AMD_display_native_hdr", + "VK_KHR_shader_terminate_invocation", + "VK_EXT_fragment_density_map", + "VK_EXT_scalar_block_layout", + "VK_GOOGLE_hlsl_functionality1", + "VK_GOOGLE_decorate_string", + "VK_EXT_subgroup_size_control", + "VK_KHR_fragment_shading_rate", + "VK_AMD_shader_core_properties2", + "VK_AMD_device_coherent_memory", + "VK_KHR_dynamic_rendering_local_read", + "VK_EXT_shader_image_atomic_int64", + "VK_KHR_shader_quad_control", + "VK_KHR_spirv_1_4", + "VK_EXT_memory_budget", + "VK_EXT_memory_priority", + "VK_NV_dedicated_allocation_image_aliasing", + "VK_KHR_separate_depth_stencil_layouts", + "VK_EXT_buffer_device_address", + "VK_EXT_tooling_info", + "VK_EXT_separate_stencil_usage", + "VK_KHR_present_wait", + "VK_NV_cooperative_matrix", + "VK_NV_coverage_reduction_mode", + "VK_EXT_fragment_shader_interlock", + "VK_EXT_ycbcr_image_arrays", + "VK_KHR_uniform_buffer_standard_layout", + "VK_EXT_provoking_vertex", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_EXT_full_screen_exclusive", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_KHR_buffer_device_address", + "VK_EXT_line_rasterization", + "VK_EXT_shader_atomic_float", + "VK_EXT_host_query_reset", + "VK_EXT_index_type_uint8", + "VK_EXT_extended_dynamic_state", + "VK_KHR_deferred_host_operations", + "VK_KHR_pipeline_executable_properties", + "VK_EXT_host_image_copy", + "VK_KHR_map_memory2", + "VK_EXT_map_memory_placed", + "VK_EXT_shader_atomic_float2", + "VK_EXT_swapchain_maintenance1", + "VK_EXT_shader_demote_to_helper_invocation", + "VK_NV_device_generated_commands", + "VK_NV_inherited_viewport_scissor", + "VK_KHR_shader_integer_dot_product", + "VK_EXT_texel_buffer_alignment", + "VK_QCOM_render_pass_transform", + "VK_EXT_depth_bias_control", + "VK_EXT_device_memory_report", + "VK_EXT_robustness2", + "VK_EXT_custom_border_color", + "VK_GOOGLE_user_type", + "VK_KHR_pipeline_library", + "VK_NV_present_barrier", + "VK_KHR_shader_non_semantic_info", + "VK_KHR_present_id", + "VK_EXT_private_data", + "VK_EXT_pipeline_creation_cache_control", + "VK_KHR_video_encode_queue", + "VK_NV_device_diagnostics_config", + "VK_QCOM_render_pass_store_ops", +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + "VK_NV_cuda_kernel_launch", +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + "VK_NV_low_latency", +#if defined( VK_USE_PLATFORM_METAL_EXT ) + "VK_EXT_metal_objects", +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + "VK_KHR_synchronization2", + "VK_EXT_descriptor_buffer", + "VK_EXT_graphics_pipeline_library", + "VK_AMD_shader_early_and_late_fragment_tests", + "VK_KHR_fragment_shader_barycentric", + "VK_KHR_shader_subgroup_uniform_control_flow", + "VK_KHR_zero_initialize_workgroup_memory", + "VK_NV_fragment_shading_rate_enums", + "VK_NV_ray_tracing_motion_blur", + "VK_EXT_mesh_shader", + "VK_EXT_ycbcr_2plane_444_formats", + "VK_EXT_fragment_density_map2", + "VK_QCOM_rotated_copy_commands", + "VK_EXT_image_robustness", + "VK_KHR_workgroup_memory_explicit_layout", + "VK_KHR_copy_commands2", + "VK_EXT_image_compression_control", + "VK_EXT_attachment_feedback_loop_layout", + "VK_EXT_4444_formats", + "VK_EXT_device_fault", + "VK_ARM_rasterization_order_attachment_access", + "VK_EXT_rgba10x6_formats", +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_NV_acquire_winrt_display", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_VALVE_mutable_descriptor_type", + "VK_EXT_vertex_input_dynamic_state", + "VK_EXT_physical_device_drm", + "VK_EXT_device_address_binding_report", + "VK_EXT_depth_clip_control", + "VK_EXT_primitive_topology_list_restart", + "VK_KHR_format_feature_flags2", + "VK_EXT_present_mode_fifo_latest_ready", +#if defined( VK_USE_PLATFORM_FUCHSIA ) + "VK_FUCHSIA_external_memory", + "VK_FUCHSIA_external_semaphore", + "VK_FUCHSIA_buffer_collection", +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + "VK_HUAWEI_subpass_shading", + "VK_HUAWEI_invocation_mask", + "VK_NV_external_memory_rdma", + "VK_EXT_pipeline_properties", + "VK_EXT_frame_boundary", + "VK_EXT_multisampled_render_to_single_sampled", + "VK_EXT_extended_dynamic_state2", + "VK_EXT_color_write_enable", + "VK_EXT_primitives_generated_query", + "VK_KHR_ray_tracing_maintenance1", + "VK_EXT_global_priority_query", + "VK_EXT_image_view_min_lod", + "VK_EXT_multi_draw", + "VK_EXT_image_2d_view_of_3d", + "VK_EXT_shader_tile_image", + "VK_EXT_opacity_micromap", +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + "VK_NV_displacement_micromap", +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + "VK_EXT_load_store_op_none", + "VK_HUAWEI_cluster_culling_shader", + "VK_EXT_border_color_swizzle", + "VK_EXT_pageable_device_local_memory", + "VK_KHR_maintenance4", + "VK_ARM_shader_core_properties", + "VK_KHR_shader_subgroup_rotate", + "VK_ARM_scheduling_controls", + "VK_EXT_image_sliced_view_of_3d", + "VK_VALVE_descriptor_set_host_mapping", + "VK_EXT_depth_clamp_zero_one", + "VK_EXT_non_seamless_cube_map", + "VK_ARM_render_pass_striped", + "VK_QCOM_fragment_density_map_offset", + "VK_NV_copy_memory_indirect", + "VK_NV_memory_decompression", + "VK_NV_device_generated_commands_compute", + "VK_NV_ray_tracing_linear_swept_spheres", + "VK_NV_linear_color_attachment", + "VK_KHR_shader_maximal_reconvergence", + "VK_EXT_image_compression_control_swapchain", + "VK_QCOM_image_processing", + "VK_EXT_nested_command_buffer", + "VK_EXT_external_memory_acquire_unmodified", + "VK_EXT_extended_dynamic_state3", + "VK_EXT_subpass_merge_feedback", + "VK_EXT_shader_module_identifier", + "VK_EXT_rasterization_order_attachment_access", + "VK_NV_optical_flow", + "VK_EXT_legacy_dithering", + "VK_EXT_pipeline_protected_access", +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + "VK_ANDROID_external_format_resolve", +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + "VK_KHR_maintenance5", + "VK_AMD_anti_lag", + "VK_KHR_ray_tracing_position_fetch", + "VK_EXT_shader_object", + "VK_KHR_pipeline_binary", + "VK_QCOM_tile_properties", + "VK_SEC_amigo_profiling", + "VK_QCOM_multiview_per_view_viewports", + "VK_NV_ray_tracing_invocation_reorder", + "VK_NV_cooperative_vector", + "VK_NV_extended_sparse_address_space", + "VK_EXT_mutable_descriptor_type", + "VK_EXT_legacy_vertex_attributes", + "VK_ARM_shader_core_builtins", + "VK_EXT_pipeline_library_group_handles", + "VK_EXT_dynamic_rendering_unused_attachments", + "VK_NV_low_latency2", + "VK_KHR_cooperative_matrix", + "VK_QCOM_multiview_per_view_render_areas", + "VK_KHR_compute_shader_derivatives", + "VK_KHR_video_decode_av1", + "VK_KHR_video_encode_av1", + "VK_KHR_video_maintenance1", + "VK_NV_per_stage_descriptor_set", + "VK_QCOM_image_processing2", + "VK_QCOM_filter_cubic_weights", + "VK_QCOM_ycbcr_degamma", + "VK_QCOM_filter_cubic_clamp", + "VK_EXT_attachment_feedback_loop_dynamic_state", + "VK_KHR_vertex_attribute_divisor", + "VK_KHR_load_store_op_none", + "VK_KHR_shader_float_controls2", +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + "VK_QNX_external_memory_screen_buffer", +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + "VK_MSFT_layered_driver", + "VK_KHR_index_type_uint8", + "VK_KHR_line_rasterization", + "VK_KHR_calibrated_timestamps", + "VK_KHR_shader_expect_assume", + "VK_KHR_maintenance6", + "VK_NV_descriptor_pool_overallocation", + "VK_KHR_video_encode_quantization_map", + "VK_NV_raw_access_chains", + "VK_KHR_shader_relaxed_extended_instruction", + "VK_NV_command_buffer_inheritance", + "VK_KHR_maintenance7", + "VK_NV_shader_atomic_float16_vector", + "VK_EXT_shader_replicated_composites", + "VK_NV_ray_tracing_validation", + "VK_NV_cluster_acceleration_structure", + "VK_NV_partitioned_acceleration_structure", + "VK_EXT_device_generated_commands", + "VK_KHR_maintenance8", + "VK_MESA_image_alignment_control", + "VK_EXT_depth_clamp_control", + "VK_KHR_video_maintenance2", + "VK_HUAWEI_hdr_vivid", + "VK_NV_cooperative_matrix2", + "VK_ARM_pipeline_opacity_micromap", +#if defined( VK_USE_PLATFORM_METAL_EXT ) + "VK_EXT_external_memory_metal", +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + "VK_KHR_depth_clamp_zero_one", + "VK_EXT_vertex_attribute_robustness" + }; + return deviceExtensions; + } + + VULKAN_HPP_INLINE std::set const & getInstanceExtensions() + { + static const std::set instanceExtensions = { + "VK_KHR_surface", + "VK_KHR_display", +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + "VK_KHR_xlib_surface", +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + "VK_KHR_xcb_surface", +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + "VK_KHR_wayland_surface", +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + "VK_KHR_android_surface", +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + "VK_KHR_win32_surface", +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + "VK_EXT_debug_report", +#if defined( VK_USE_PLATFORM_GGP ) + "VK_GGP_stream_descriptor_surface", +#endif /*VK_USE_PLATFORM_GGP*/ + "VK_NV_external_memory_capabilities", + "VK_KHR_get_physical_device_properties2", + "VK_EXT_validation_flags", +#if defined( VK_USE_PLATFORM_VI_NN ) + "VK_NN_vi_surface", +#endif /*VK_USE_PLATFORM_VI_NN*/ + "VK_KHR_device_group_creation", + "VK_KHR_external_memory_capabilities", + "VK_KHR_external_semaphore_capabilities", + "VK_EXT_direct_mode_display", +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + "VK_EXT_acquire_xlib_display", +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + "VK_EXT_display_surface_counter", + "VK_EXT_swapchain_colorspace", + "VK_KHR_external_fence_capabilities", + "VK_KHR_get_surface_capabilities2", + "VK_KHR_get_display_properties2", +#if defined( VK_USE_PLATFORM_IOS_MVK ) + "VK_MVK_ios_surface", +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + "VK_MVK_macos_surface", +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + "VK_EXT_debug_utils", +#if defined( VK_USE_PLATFORM_FUCHSIA ) + "VK_FUCHSIA_imagepipe_surface", +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + "VK_EXT_metal_surface", +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + "VK_KHR_surface_protected_capabilities", + "VK_EXT_validation_features", + "VK_EXT_headless_surface", + "VK_EXT_surface_maintenance1", + "VK_EXT_acquire_drm_display", +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + "VK_EXT_directfb_surface", +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + "VK_QNX_screen_surface", +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + "VK_KHR_portability_enumeration", + "VK_GOOGLE_surfaceless_query", + "VK_LUNARG_direct_driver_loading", + "VK_EXT_layer_settings", + "VK_NV_display_stereo" + }; + return instanceExtensions; + } + + VULKAN_HPP_INLINE std::map>> const & getExtensionDepends( std::string const & extension ) + { + static const std::map>> noDependencies; + static const std::map>>> dependencies = { + { "VK_KHR_swapchain", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_KHR_display", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_KHR_display_swapchain", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + "VK_KHR_display", + } } } } }, +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + { "VK_KHR_xlib_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + { "VK_KHR_xcb_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + { "VK_KHR_wayland_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + { "VK_KHR_android_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_KHR_win32_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_EXT_debug_marker", + { { "VK_VERSION_1_0", + { { + "VK_EXT_debug_report", + } } } } }, + { "VK_KHR_video_queue", + { { "VK_VERSION_1_1", + { { + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_KHR_video_decode_queue", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_queue", + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_transform_feedback", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_video_encode_h264", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_encode_queue", + } } } } }, + { "VK_KHR_video_encode_h265", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_encode_queue", + } } } } }, + { "VK_KHR_video_decode_h264", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_decode_queue", + } } } } }, + { "VK_AMD_texture_gather_bias_lod", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_dynamic_rendering", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_depth_stencil_resolve", + } } }, + { "VK_VERSION_1_2", { {} } } } }, +#if defined( VK_USE_PLATFORM_GGP ) + { "VK_GGP_stream_descriptor_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_GGP*/ + { "VK_NV_corner_sampled_image", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_multiview", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_external_memory", + { { "VK_VERSION_1_0", + { { + "VK_NV_external_memory_capabilities", + } } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_NV_external_memory_win32", + { { "VK_VERSION_1_0", + { { + "VK_NV_external_memory", + } } } } }, + { "VK_NV_win32_keyed_mutex", + { { "VK_VERSION_1_0", + { { + "VK_NV_external_memory_win32", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_device_group", + { { "VK_VERSION_1_0", + { { + "VK_KHR_device_group_creation", + } } } } }, +#if defined( VK_USE_PLATFORM_VI_NN ) + { "VK_NN_vi_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_VI_NN*/ + { "VK_EXT_texture_compression_astc_hdr", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_astc_decode_mode", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_pipeline_robustness", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_external_memory_capabilities", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_external_memory", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory_capabilities", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_KHR_external_memory_win32", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_external_memory_fd", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_KHR_win32_keyed_mutex", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory_win32", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_external_semaphore_capabilities", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_external_semaphore", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_semaphore_capabilities", + } } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_KHR_external_semaphore_win32", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_semaphore", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_external_semaphore_fd", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_semaphore", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_push_descriptor", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_conditional_rendering", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_float16_int8", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_16bit_storage", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_storage_buffer_storage_class", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_incremental_present", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + } } } } }, + { "VK_EXT_direct_mode_display", + { { "VK_VERSION_1_0", + { { + "VK_KHR_display", + } } } } }, +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + { "VK_EXT_acquire_xlib_display", + { { "VK_VERSION_1_0", + { { + "VK_EXT_direct_mode_display", + } } } } }, +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + { "VK_EXT_display_surface_counter", + { { "VK_VERSION_1_0", + { { + "VK_KHR_display", + } } } } }, + { "VK_EXT_display_control", + { { "VK_VERSION_1_0", + { { + "VK_EXT_display_surface_counter", + "VK_KHR_swapchain", + } } } } }, + { "VK_GOOGLE_display_timing", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + } } } } }, + { "VK_NVX_multiview_per_view_attributes", + { { "VK_VERSION_1_0", + { { + "VK_KHR_multiview", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_discard_rectangles", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_conservative_rasterization", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_depth_clip_enable", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_swapchain_colorspace", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_EXT_hdr_metadata", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + } } } } }, + { "VK_KHR_imageless_framebuffer", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_maintenance2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_image_format_list", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_KHR_create_renderpass2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_multiview", + "VK_KHR_maintenance2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_IMG_relaxed_line_rasterization", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shared_presentable_image", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + "VK_KHR_get_surface_capabilities2", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_swapchain", + "VK_KHR_get_surface_capabilities2", + } } } } }, + { "VK_KHR_external_fence_capabilities", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_external_fence", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_fence_capabilities", + } } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_KHR_external_fence_win32", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_fence", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_external_fence_fd", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_fence", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_performance_query", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_get_surface_capabilities2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_KHR_variable_pointers", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_storage_buffer_storage_class", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_get_display_properties2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_display", + } } } } }, +#if defined( VK_USE_PLATFORM_IOS_MVK ) + { "VK_MVK_ios_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + { "VK_MVK_macos_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + { "VK_EXT_external_memory_dma_buf", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory_fd", + } } } } }, + { "VK_EXT_queue_family_foreign", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_dedicated_allocation", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_memory_requirements2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + { "VK_ANDROID_external_memory_android_hardware_buffer", + { { "VK_VERSION_1_0", + { { + "VK_KHR_sampler_ycbcr_conversion", + "VK_KHR_external_memory", + "VK_KHR_dedicated_allocation", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_queue_family_foreign", + } } } } }, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + { "VK_EXT_sampler_filter_minmax", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + { "VK_AMDX_shader_enqueue", + { { "VK_VERSION_1_0", + { { + "VK_KHR_synchronization2", + "VK_KHR_spirv_1_4", + "VK_EXT_extended_dynamic_state", + } } }, + { "VK_VERSION_1_3", + { { + "VK_KHR_maintenance5", + "VK_KHR_pipeline_library", + } } } } }, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + { "VK_EXT_inline_uniform_block", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_maintenance1", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_sample_locations", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_blend_operation_advanced", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_acceleration_structure", + { { "VK_VERSION_1_1", + { { + "VK_EXT_descriptor_indexing", + "VK_KHR_buffer_device_address", + } } }, + { "VK_VERSION_1_2", + { { + "VK_KHR_deferred_host_operations", + } } } } }, + { "VK_KHR_ray_tracing_pipeline", + { { "VK_VERSION_1_0", + { { + "VK_KHR_spirv_1_4", + "VK_KHR_acceleration_structure", + } } } } }, + { "VK_KHR_ray_query", + { { "VK_VERSION_1_0", + { { + "VK_KHR_spirv_1_4", + "VK_KHR_acceleration_structure", + } } } } }, + { "VK_NV_shader_sm_builtins", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_sampler_ycbcr_conversion", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance1", + "VK_KHR_bind_memory2", + "VK_KHR_get_memory_requirements2", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_image_drm_format_modifier", + { { "VK_VERSION_1_0", + { { + "VK_KHR_bind_memory2", + "VK_KHR_get_physical_device_properties2", + "VK_KHR_sampler_ycbcr_conversion", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_image_format_list", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_EXT_descriptor_indexing", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_maintenance3", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + { "VK_KHR_portability_subset", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + { "VK_NV_shading_rate_image", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_ray_tracing", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_get_memory_requirements2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_representative_fragment_test", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_maintenance3", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_subgroup_extended_types", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_8bit_storage", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_storage_buffer_storage_class", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_external_memory_host", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_atomic_int64", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_clock", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_calibrated_timestamps", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_AMD_shader_core_properties", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_video_decode_h265", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_decode_queue", + } } } } }, + { "VK_KHR_global_priority", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_vertex_attribute_divisor", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_GGP ) + { "VK_GGP_frame_token", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + "VK_GGP_stream_descriptor_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_GGP*/ + { "VK_KHR_driver_properties", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_float_controls", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_shader_subgroup_partitioned", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_depth_stencil_resolve", + { { "VK_VERSION_1_0", + { { + "VK_KHR_create_renderpass2", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_KHR_swapchain_mutable_format", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + "VK_KHR_maintenance2", + "VK_KHR_image_format_list", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_swapchain", + "VK_KHR_image_format_list", + } } }, + { "VK_VERSION_1_2", + { { + "VK_KHR_swapchain", + } } } } }, + { "VK_NV_compute_shader_derivatives", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_mesh_shader", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_fragment_shader_barycentric", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_shader_image_footprint", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_scissor_exclusive", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_device_diagnostic_checkpoints", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_timeline_semaphore", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_INTEL_shader_integer_functions2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_vulkan_memory_model", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_pci_bus_info", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_AMD_display_native_hdr", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_get_surface_capabilities2", + "VK_KHR_swapchain", + } } } } }, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + { "VK_FUCHSIA_imagepipe_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + { "VK_KHR_shader_terminate_invocation", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_METAL_EXT ) + { "VK_EXT_metal_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + { "VK_EXT_fragment_density_map", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_scalar_block_layout", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_subgroup_size_control", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_fragment_shading_rate", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_create_renderpass2", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_AMD_shader_core_properties2", + { { "VK_VERSION_1_0", + { { + "VK_AMD_shader_core_properties", + } } } } }, + { "VK_AMD_device_coherent_memory", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_dynamic_rendering_local_read", + { { "VK_VERSION_1_0", + { { + "VK_KHR_dynamic_rendering", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_shader_image_atomic_int64", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_quad_control", + { { "VK_VERSION_1_1", + { { + "VK_KHR_vulkan_memory_model", + "VK_KHR_shader_maximal_reconvergence", + } } } } }, + { "VK_KHR_spirv_1_4", + { { "VK_VERSION_1_1", + { { + "VK_KHR_shader_float_controls", + } } } } }, + { "VK_EXT_memory_budget", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_memory_priority", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_surface_protected_capabilities", + { { "VK_VERSION_1_1", + { { + "VK_KHR_get_surface_capabilities2", + } } } } }, + { "VK_NV_dedicated_allocation_image_aliasing", + { { "VK_VERSION_1_0", + { { + "VK_KHR_dedicated_allocation", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_separate_depth_stencil_layouts", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_create_renderpass2", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_EXT_buffer_device_address", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_present_wait", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + "VK_KHR_present_id", + } } } } }, + { "VK_NV_cooperative_matrix", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_coverage_reduction_mode", + { { "VK_VERSION_1_0", + { { + "VK_NV_framebuffer_mixed_samples", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_fragment_shader_interlock", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_ycbcr_image_arrays", + { { "VK_VERSION_1_0", + { { + "VK_KHR_sampler_ycbcr_conversion", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_uniform_buffer_standard_layout", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_provoking_vertex", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_EXT_full_screen_exclusive", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_surface", + "VK_KHR_get_surface_capabilities2", + "VK_KHR_swapchain", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_EXT_headless_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_KHR_buffer_device_address", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + "VK_KHR_device_group", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_line_rasterization", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_shader_atomic_float", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_host_query_reset", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_index_type_uint8", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_extended_dynamic_state", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_pipeline_executable_properties", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_host_image_copy", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_copy_commands2", + "VK_KHR_format_feature_flags2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_map_memory_placed", + { { "VK_VERSION_1_0", + { { + "VK_KHR_map_memory2", + } } }, + { "VK_VERSION_1_4", { {} } } } }, + { "VK_EXT_shader_atomic_float2", + { { "VK_VERSION_1_0", + { { + "VK_EXT_shader_atomic_float", + } } } } }, + { "VK_EXT_surface_maintenance1", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + "VK_KHR_get_surface_capabilities2", + } } } } }, + { "VK_EXT_swapchain_maintenance1", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + "VK_EXT_surface_maintenance1", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_shader_demote_to_helper_invocation", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_device_generated_commands", + { { "VK_VERSION_1_1", + { { + "VK_KHR_buffer_device_address", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_NV_inherited_viewport_scissor", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_integer_dot_product", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_texel_buffer_alignment", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_depth_bias_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_device_memory_report", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_acquire_drm_display", + { { "VK_VERSION_1_0", + { { + "VK_EXT_direct_mode_display", + } } } } }, + { "VK_EXT_robustness2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_custom_border_color", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_present_barrier", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_surface", + "VK_KHR_get_surface_capabilities2", + "VK_KHR_swapchain", + } } } } }, + { "VK_KHR_present_id", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_private_data", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_pipeline_creation_cache_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_video_encode_queue", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_queue", + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_NV_device_diagnostics_config", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_synchronization2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_descriptor_buffer", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_buffer_device_address", + "VK_EXT_descriptor_indexing", + } } }, + { "VK_VERSION_1_2", + { { + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_graphics_pipeline_library", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_pipeline_library", + } } } } }, + { "VK_AMD_shader_early_and_late_fragment_tests", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_fragment_shader_barycentric", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_subgroup_uniform_control_flow", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_zero_initialize_workgroup_memory", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_fragment_shading_rate_enums", + { { "VK_VERSION_1_0", + { { + "VK_KHR_fragment_shading_rate", + } } } } }, + { "VK_NV_ray_tracing_motion_blur", + { { "VK_VERSION_1_0", + { { + "VK_KHR_ray_tracing_pipeline", + } } } } }, + { "VK_EXT_mesh_shader", + { { "VK_VERSION_1_0", + { { + "VK_KHR_spirv_1_4", + } } } } }, + { "VK_EXT_ycbcr_2plane_444_formats", + { { "VK_VERSION_1_0", + { { + "VK_KHR_sampler_ycbcr_conversion", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_fragment_density_map2", + { { "VK_VERSION_1_0", + { { + "VK_EXT_fragment_density_map", + } } } } }, + { "VK_QCOM_rotated_copy_commands", + { { "VK_VERSION_1_0", + { { + "VK_KHR_copy_commands2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_image_robustness", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_workgroup_memory_explicit_layout", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_copy_commands2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_image_compression_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_attachment_feedback_loop_layout", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_4444_formats", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_device_fault", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_ARM_rasterization_order_attachment_access", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_rgba10x6_formats", + { { "VK_VERSION_1_0", + { { + "VK_KHR_sampler_ycbcr_conversion", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_NV_acquire_winrt_display", + { { "VK_VERSION_1_0", + { { + "VK_EXT_direct_mode_display", + } } } } }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + { "VK_EXT_directfb_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + { "VK_VALVE_mutable_descriptor_type", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance3", + } } } } }, + { "VK_EXT_vertex_input_dynamic_state", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_physical_device_drm", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_device_address_binding_report", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_debug_utils", + } } } } }, + { "VK_EXT_depth_clip_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_primitive_topology_list_restart", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_format_feature_flags2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_present_mode_fifo_latest_ready", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + } } } } }, +#if defined( VK_USE_PLATFORM_FUCHSIA ) + { "VK_FUCHSIA_external_memory", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory_capabilities", + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_FUCHSIA_external_semaphore", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_semaphore_capabilities", + "VK_KHR_external_semaphore", + } } } } }, + { "VK_FUCHSIA_buffer_collection", + { { "VK_VERSION_1_0", + { { + "VK_FUCHSIA_external_memory", + "VK_KHR_sampler_ycbcr_conversion", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + { "VK_HUAWEI_subpass_shading", + { { "VK_VERSION_1_0", + { { + "VK_KHR_create_renderpass2", + } } }, + { "VK_VERSION_1_2", + { { + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_HUAWEI_invocation_mask", + { { "VK_VERSION_1_0", + { { + "VK_KHR_ray_tracing_pipeline", + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_NV_external_memory_rdma", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_pipeline_properties", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_multisampled_render_to_single_sampled", + { { "VK_VERSION_1_0", + { { + "VK_KHR_create_renderpass2", + "VK_KHR_depth_stencil_resolve", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_EXT_extended_dynamic_state2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + { "VK_QNX_screen_surface", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + { "VK_EXT_color_write_enable", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_primitives_generated_query", + { { "VK_VERSION_1_0", + { { + "VK_EXT_transform_feedback", + } } } } }, + { "VK_KHR_ray_tracing_maintenance1", + { { "VK_VERSION_1_0", + { { + "VK_KHR_acceleration_structure", + } } } } }, + { "VK_EXT_global_priority_query", + { { "VK_VERSION_1_0", + { { + "VK_EXT_global_priority", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_image_view_min_lod", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_multi_draw", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_image_2d_view_of_3d", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance1", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_shader_tile_image", { { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_opacity_micromap", + { { "VK_VERSION_1_0", + { { + "VK_KHR_acceleration_structure", + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + { "VK_NV_displacement_micromap", + { { "VK_VERSION_1_0", + { { + "VK_EXT_opacity_micromap", + } } } } }, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + { "VK_HUAWEI_cluster_culling_shader", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_border_color_swizzle", + { { "VK_VERSION_1_0", + { { + "VK_EXT_custom_border_color", + } } } } }, + { "VK_EXT_pageable_device_local_memory", + { { "VK_VERSION_1_0", + { { + "VK_EXT_memory_priority", + } } } } }, + { "VK_KHR_maintenance4", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_ARM_shader_core_properties", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_ARM_scheduling_controls", + { { "VK_VERSION_1_0", + { { + "VK_ARM_shader_core_builtins", + } } } } }, + { "VK_EXT_image_sliced_view_of_3d", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance1", + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_VALVE_descriptor_set_host_mapping", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_depth_clamp_zero_one", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_non_seamless_cube_map", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_ARM_render_pass_striped", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_QCOM_fragment_density_map_offset", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_fragment_density_map", + } } } } }, + { "VK_NV_copy_memory_indirect", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_buffer_device_address", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_NV_memory_decompression", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_buffer_device_address", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_NV_device_generated_commands_compute", + { { "VK_VERSION_1_0", + { { + "VK_NV_device_generated_commands", + } } } } }, + { "VK_NV_ray_tracing_linear_swept_spheres", + { { "VK_VERSION_1_0", + { { + "VK_KHR_ray_tracing_pipeline", + } } } } }, + { "VK_NV_linear_color_attachment", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_GOOGLE_surfaceless_query", + { { "VK_VERSION_1_0", + { { + "VK_KHR_surface", + } } } } }, + { "VK_KHR_shader_maximal_reconvergence", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_image_compression_control_swapchain", + { { "VK_VERSION_1_0", + { { + "VK_EXT_image_compression_control", + } } } } }, + { "VK_QCOM_image_processing", + { { "VK_VERSION_1_0", + { { + "VK_KHR_format_feature_flags2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_nested_command_buffer", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_external_memory_acquire_unmodified", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_extended_dynamic_state3", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_subpass_merge_feedback", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_shader_module_identifier", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_pipeline_creation_cache_control", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_rasterization_order_attachment_access", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_optical_flow", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_format_feature_flags2", + "VK_KHR_synchronization2", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_EXT_legacy_dithering", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_pipeline_protected_access", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + { "VK_ANDROID_external_format_resolve", + { { "VK_VERSION_1_0", + { { + "VK_ANDROID_external_memory_android_hardware_buffer", + } } } } }, +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + { "VK_KHR_maintenance5", + { { "VK_VERSION_1_1", + { { + "VK_KHR_dynamic_rendering", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_KHR_ray_tracing_position_fetch", + { { "VK_VERSION_1_0", + { { + "VK_KHR_acceleration_structure", + } } } } }, + { "VK_EXT_shader_object", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_dynamic_rendering", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_KHR_pipeline_binary", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance5", + } } } } }, + { "VK_QCOM_tile_properties", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_SEC_amigo_profiling", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_QCOM_multiview_per_view_viewports", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_ray_tracing_invocation_reorder", + { { "VK_VERSION_1_0", + { { + "VK_KHR_ray_tracing_pipeline", + } } } } }, + { "VK_EXT_mutable_descriptor_type", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance3", + } } } } }, + { "VK_EXT_legacy_vertex_attributes", + { { "VK_VERSION_1_0", + { { + "VK_EXT_vertex_input_dynamic_state", + } } } } }, + { "VK_ARM_shader_core_builtins", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_pipeline_library_group_handles", + { { "VK_VERSION_1_0", + { { + "VK_KHR_ray_tracing_pipeline", + "VK_KHR_pipeline_library", + } } } } }, + { "VK_EXT_dynamic_rendering_unused_attachments", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_dynamic_rendering", + } } }, + { "VK_VERSION_1_3", { {} } } } }, + { "VK_NV_low_latency2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_timeline_semaphore", + } } }, + { "VK_VERSION_1_2", { {} } } } }, + { "VK_KHR_cooperative_matrix", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_compute_shader_derivatives", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, + { "VK_KHR_video_decode_av1", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_decode_queue", + } } } } }, + { "VK_KHR_video_encode_av1", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_encode_queue", + } } } } }, + { "VK_KHR_video_maintenance1", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_queue", + } } } } }, + { "VK_NV_per_stage_descriptor_set", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance6", + } } }, + { "VK_VERSION_1_4", { {} } } } }, + { "VK_QCOM_image_processing2", + { { "VK_VERSION_1_0", + { { + "VK_QCOM_image_processing", + } } } } }, + { "VK_QCOM_filter_cubic_weights", + { { "VK_VERSION_1_0", + { { + "VK_EXT_filter_cubic", + } } } } }, + { "VK_QCOM_filter_cubic_clamp", + { { "VK_VERSION_1_0", + { { + "VK_EXT_filter_cubic", + "VK_EXT_sampler_filter_minmax", + } } }, + { "VK_VERSION_1_2", + { { + "VK_EXT_filter_cubic", + } } } } }, + { "VK_EXT_attachment_feedback_loop_dynamic_state", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_attachment_feedback_loop_layout", + } } } } }, + { "VK_KHR_vertex_attribute_divisor", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_float_controls2", + { { "VK_VERSION_1_1", + { { + "VK_KHR_shader_float_controls", + } } } } }, +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + { "VK_QNX_external_memory_screen_buffer", + { { "VK_VERSION_1_0", + { { + "VK_KHR_sampler_ycbcr_conversion", + "VK_KHR_external_memory", + "VK_KHR_dedicated_allocation", + } } }, + { "VK_VERSION_1_1", + { { + "VK_EXT_queue_family_foreign", + } } } } }, +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + { "VK_MSFT_layered_driver", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_index_type_uint8", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_line_rasterization", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_calibrated_timestamps", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_shader_expect_assume", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_maintenance6", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_descriptor_pool_overallocation", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_display_stereo", + { { "VK_VERSION_1_0", + { { + "VK_KHR_display", + "VK_KHR_get_display_properties2", + } } } } }, + { "VK_KHR_video_encode_quantization_map", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_encode_queue", + "VK_KHR_format_feature_flags2", + } } } } }, + { "VK_KHR_maintenance7", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_cluster_acceleration_structure", + { { "VK_VERSION_1_0", + { { + "VK_KHR_acceleration_structure", + } } } } }, + { "VK_NV_partitioned_acceleration_structure", + { { "VK_VERSION_1_0", + { { + "VK_KHR_acceleration_structure", + } } } } }, + { "VK_EXT_device_generated_commands", + { { "VK_VERSION_1_0", + { { + "VK_KHR_buffer_device_address", + "VK_KHR_maintenance5", + } } } } }, + { "VK_KHR_maintenance8", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_MESA_image_alignment_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_depth_clamp_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_video_maintenance2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_decode_queue", + "VK_KHR_video_encode_queue", + } } } } }, + { "VK_HUAWEI_hdr_vivid", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_swapchain", + "VK_EXT_hdr_metadata", + } } } } }, + { "VK_NV_cooperative_matrix2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_cooperative_matrix", + } } } } }, + { "VK_ARM_pipeline_opacity_micromap", + { { "VK_VERSION_1_0", + { { + "VK_EXT_opacity_micromap", + } } } } }, +#if defined( VK_USE_PLATFORM_METAL_EXT ) + { "VK_EXT_external_memory_metal", + { { "VK_VERSION_1_0", + { { + "VK_KHR_external_memory", + } } }, + { "VK_VERSION_1_1", { {} } } } }, +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + { "VK_KHR_depth_clamp_zero_one", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_vertex_attribute_robustness", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } } + }; + auto depIt = dependencies.find( extension ); + return ( depIt != dependencies.end() ) ? depIt->second : noDependencies; + } + + VULKAN_HPP_INLINE std::pair> const &> getExtensionDepends( std::string const & version, + std::string const & extension ) + { +#if !defined( NDEBUG ) + static std::set versions = { "VK_VERSION_1_0", "VK_VERSION_1_1", "VK_VERSION_1_2", "VK_VERSION_1_3", "VK_VERSION_1_4" }; + assert( versions.find( version ) != versions.end() ); +#endif + static std::vector> noDependencies; + + std::map>> const & dependencies = getExtensionDepends( extension ); + if ( dependencies.empty() ) + { + return { true, noDependencies }; + } + auto depIt = dependencies.lower_bound( version ); + if ( ( depIt == dependencies.end() ) || ( depIt->first != version ) ) + { + depIt = std::prev( depIt ); + } + if ( depIt == dependencies.end() ) + { + return { false, noDependencies }; + } + else + { + return { true, depIt->second }; + } + } + + VULKAN_HPP_INLINE std::map const & getObsoletedExtensions() + { + static const std::map obsoletedExtensions = { { "VK_AMD_negative_viewport_height", "VK_KHR_maintenance1" } }; + return obsoletedExtensions; + } + + VULKAN_HPP_INLINE std::map const & getPromotedExtensions() + { + static const std::map promotedExtensions = { + { "VK_KHR_sampler_mirror_clamp_to_edge", "VK_VERSION_1_2" }, + { "VK_EXT_debug_marker", "VK_EXT_debug_utils" }, + { "VK_AMD_draw_indirect_count", "VK_KHR_draw_indirect_count" }, + { "VK_KHR_dynamic_rendering", "VK_VERSION_1_3" }, + { "VK_KHR_multiview", "VK_VERSION_1_1" }, +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + { "VK_NV_win32_keyed_mutex", "VK_KHR_win32_keyed_mutex" }, +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + { "VK_KHR_get_physical_device_properties2", "VK_VERSION_1_1" }, + { "VK_KHR_device_group", "VK_VERSION_1_1" }, + { "VK_KHR_shader_draw_parameters", "VK_VERSION_1_1" }, + { "VK_EXT_texture_compression_astc_hdr", "VK_VERSION_1_3" }, + { "VK_EXT_pipeline_robustness", "VK_VERSION_1_4" }, + { "VK_KHR_maintenance1", "VK_VERSION_1_1" }, + { "VK_KHR_device_group_creation", "VK_VERSION_1_1" }, + { "VK_KHR_external_memory_capabilities", "VK_VERSION_1_1" }, + { "VK_KHR_external_memory", "VK_VERSION_1_1" }, + { "VK_KHR_external_semaphore_capabilities", "VK_VERSION_1_1" }, + { "VK_KHR_external_semaphore", "VK_VERSION_1_1" }, + { "VK_KHR_push_descriptor", "VK_VERSION_1_4" }, + { "VK_KHR_shader_float16_int8", "VK_VERSION_1_2" }, + { "VK_KHR_16bit_storage", "VK_VERSION_1_1" }, + { "VK_KHR_descriptor_update_template", "VK_VERSION_1_1" }, + { "VK_KHR_imageless_framebuffer", "VK_VERSION_1_2" }, + { "VK_KHR_create_renderpass2", "VK_VERSION_1_2" }, + { "VK_KHR_external_fence_capabilities", "VK_VERSION_1_1" }, + { "VK_KHR_external_fence", "VK_VERSION_1_1" }, + { "VK_KHR_maintenance2", "VK_VERSION_1_1" }, + { "VK_KHR_variable_pointers", "VK_VERSION_1_1" }, + { "VK_KHR_dedicated_allocation", "VK_VERSION_1_1" }, + { "VK_EXT_sampler_filter_minmax", "VK_VERSION_1_2" }, + { "VK_KHR_storage_buffer_storage_class", "VK_VERSION_1_1" }, + { "VK_EXT_inline_uniform_block", "VK_VERSION_1_3" }, + { "VK_KHR_relaxed_block_layout", "VK_VERSION_1_1" }, + { "VK_KHR_get_memory_requirements2", "VK_VERSION_1_1" }, + { "VK_KHR_image_format_list", "VK_VERSION_1_2" }, + { "VK_KHR_sampler_ycbcr_conversion", "VK_VERSION_1_1" }, + { "VK_KHR_bind_memory2", "VK_VERSION_1_1" }, + { "VK_EXT_descriptor_indexing", "VK_VERSION_1_2" }, + { "VK_EXT_shader_viewport_index_layer", "VK_VERSION_1_2" }, + { "VK_KHR_maintenance3", "VK_VERSION_1_1" }, + { "VK_KHR_draw_indirect_count", "VK_VERSION_1_2" }, + { "VK_EXT_global_priority", "VK_KHR_global_priority" }, + { "VK_KHR_shader_subgroup_extended_types", "VK_VERSION_1_2" }, + { "VK_KHR_8bit_storage", "VK_VERSION_1_2" }, + { "VK_KHR_shader_atomic_int64", "VK_VERSION_1_2" }, + { "VK_EXT_calibrated_timestamps", "VK_KHR_calibrated_timestamps" }, + { "VK_KHR_global_priority", "VK_VERSION_1_4" }, + { "VK_EXT_vertex_attribute_divisor", "VK_KHR_vertex_attribute_divisor" }, + { "VK_EXT_pipeline_creation_feedback", "VK_VERSION_1_3" }, + { "VK_KHR_driver_properties", "VK_VERSION_1_2" }, + { "VK_KHR_shader_float_controls", "VK_VERSION_1_2" }, + { "VK_KHR_depth_stencil_resolve", "VK_VERSION_1_2" }, + { "VK_NV_compute_shader_derivatives", "VK_KHR_compute_shader_derivatives" }, + { "VK_NV_fragment_shader_barycentric", "VK_KHR_fragment_shader_barycentric" }, + { "VK_KHR_timeline_semaphore", "VK_VERSION_1_2" }, + { "VK_KHR_vulkan_memory_model", "VK_VERSION_1_2" }, + { "VK_KHR_shader_terminate_invocation", "VK_VERSION_1_3" }, + { "VK_EXT_scalar_block_layout", "VK_VERSION_1_2" }, + { "VK_EXT_subgroup_size_control", "VK_VERSION_1_3" }, + { "VK_KHR_dynamic_rendering_local_read", "VK_VERSION_1_4" }, + { "VK_KHR_spirv_1_4", "VK_VERSION_1_2" }, + { "VK_KHR_separate_depth_stencil_layouts", "VK_VERSION_1_2" }, + { "VK_EXT_tooling_info", "VK_VERSION_1_3" }, + { "VK_EXT_separate_stencil_usage", "VK_VERSION_1_2" }, + { "VK_KHR_uniform_buffer_standard_layout", "VK_VERSION_1_2" }, + { "VK_KHR_buffer_device_address", "VK_VERSION_1_2" }, + { "VK_EXT_line_rasterization", "VK_KHR_line_rasterization" }, + { "VK_EXT_host_query_reset", "VK_VERSION_1_2" }, + { "VK_EXT_index_type_uint8", "VK_KHR_index_type_uint8" }, + { "VK_EXT_extended_dynamic_state", "VK_VERSION_1_3" }, + { "VK_EXT_host_image_copy", "VK_VERSION_1_4" }, + { "VK_KHR_map_memory2", "VK_VERSION_1_4" }, + { "VK_EXT_shader_demote_to_helper_invocation", "VK_VERSION_1_3" }, + { "VK_KHR_shader_integer_dot_product", "VK_VERSION_1_3" }, + { "VK_EXT_texel_buffer_alignment", "VK_VERSION_1_3" }, + { "VK_KHR_shader_non_semantic_info", "VK_VERSION_1_3" }, + { "VK_EXT_private_data", "VK_VERSION_1_3" }, + { "VK_EXT_pipeline_creation_cache_control", "VK_VERSION_1_3" }, + { "VK_KHR_synchronization2", "VK_VERSION_1_3" }, + { "VK_KHR_zero_initialize_workgroup_memory", "VK_VERSION_1_3" }, + { "VK_EXT_ycbcr_2plane_444_formats", "VK_VERSION_1_3" }, + { "VK_EXT_image_robustness", "VK_VERSION_1_3" }, + { "VK_KHR_copy_commands2", "VK_VERSION_1_3" }, + { "VK_EXT_4444_formats", "VK_VERSION_1_3" }, + { "VK_ARM_rasterization_order_attachment_access", "VK_EXT_rasterization_order_attachment_access" }, + { "VK_VALVE_mutable_descriptor_type", "VK_EXT_mutable_descriptor_type" }, + { "VK_KHR_format_feature_flags2", "VK_VERSION_1_3" }, + { "VK_EXT_extended_dynamic_state2", "VK_VERSION_1_3" }, + { "VK_EXT_global_priority_query", "VK_KHR_global_priority" }, + { "VK_EXT_load_store_op_none", "VK_KHR_load_store_op_none" }, + { "VK_KHR_maintenance4", "VK_VERSION_1_3" }, + { "VK_KHR_shader_subgroup_rotate", "VK_VERSION_1_4" }, + { "VK_EXT_depth_clamp_zero_one", "VK_KHR_depth_clamp_zero_one" }, + { "VK_EXT_pipeline_protected_access", "VK_VERSION_1_4" }, + { "VK_KHR_maintenance5", "VK_VERSION_1_4" }, + { "VK_KHR_vertex_attribute_divisor", "VK_VERSION_1_4" }, + { "VK_KHR_load_store_op_none", "VK_VERSION_1_4" }, + { "VK_KHR_shader_float_controls2", "VK_VERSION_1_4" }, + { "VK_KHR_index_type_uint8", "VK_VERSION_1_4" }, + { "VK_KHR_line_rasterization", "VK_VERSION_1_4" }, + { "VK_KHR_shader_expect_assume", "VK_VERSION_1_4" }, + { "VK_KHR_maintenance6", "VK_VERSION_1_4" } + }; + return promotedExtensions; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string getExtensionDeprecatedBy( std::string const & extension ) + { + if ( extension == "VK_EXT_debug_report" ) + { + return "VK_EXT_debug_utils"; + } + if ( extension == "VK_NV_glsl_shader" ) + { + return ""; + } + if ( extension == "VK_NV_dedicated_allocation" ) + { + return "VK_KHR_dedicated_allocation"; + } + if ( extension == "VK_AMD_gpu_shader_half_float" ) + { + return "VK_KHR_shader_float16_int8"; + } + if ( extension == "VK_IMG_format_pvrtc" ) + { + return ""; + } + if ( extension == "VK_NV_external_memory_capabilities" ) + { + return "VK_KHR_external_memory_capabilities"; + } + if ( extension == "VK_NV_external_memory" ) + { + return "VK_KHR_external_memory"; + } +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + if ( extension == "VK_NV_external_memory_win32" ) + { + return "VK_KHR_external_memory_win32"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + if ( extension == "VK_EXT_validation_flags" ) + { + return "VK_EXT_layer_settings"; + } + if ( extension == "VK_EXT_shader_subgroup_ballot" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_shader_subgroup_vote" ) + { + return "VK_VERSION_1_1"; + } +#if defined( VK_USE_PLATFORM_IOS_MVK ) + if ( extension == "VK_MVK_ios_surface" ) + { + return "VK_EXT_metal_surface"; + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + if ( extension == "VK_MVK_macos_surface" ) + { + return "VK_EXT_metal_surface"; + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + if ( extension == "VK_AMD_gpu_shader_int16" ) + { + return "VK_KHR_shader_float16_int8"; + } + if ( extension == "VK_NV_ray_tracing" ) + { + return "VK_KHR_ray_tracing_pipeline"; + } + if ( extension == "VK_EXT_buffer_device_address" ) + { + return "VK_KHR_buffer_device_address"; + } + if ( extension == "VK_EXT_validation_features" ) + { + return "VK_EXT_layer_settings"; + } + return ""; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string getExtensionObsoletedBy( std::string const & extension ) + { + if ( extension == "VK_AMD_negative_viewport_height" ) + { + return "VK_KHR_maintenance1"; + } + return ""; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string getExtensionPromotedTo( std::string const & extension ) + { + if ( extension == "VK_KHR_sampler_mirror_clamp_to_edge" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_debug_marker" ) + { + return "VK_EXT_debug_utils"; + } + if ( extension == "VK_AMD_draw_indirect_count" ) + { + return "VK_KHR_draw_indirect_count"; + } + if ( extension == "VK_KHR_dynamic_rendering" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_multiview" ) + { + return "VK_VERSION_1_1"; + } +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + if ( extension == "VK_NV_win32_keyed_mutex" ) + { + return "VK_KHR_win32_keyed_mutex"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + if ( extension == "VK_KHR_get_physical_device_properties2" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_device_group" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_shader_draw_parameters" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_EXT_texture_compression_astc_hdr" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_pipeline_robustness" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_maintenance1" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_device_group_creation" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_external_memory_capabilities" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_external_memory" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_external_semaphore_capabilities" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_external_semaphore" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_push_descriptor" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_shader_float16_int8" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_16bit_storage" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_descriptor_update_template" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_imageless_framebuffer" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_create_renderpass2" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_external_fence_capabilities" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_external_fence" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_maintenance2" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_variable_pointers" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_dedicated_allocation" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_EXT_sampler_filter_minmax" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_storage_buffer_storage_class" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_EXT_inline_uniform_block" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_relaxed_block_layout" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_get_memory_requirements2" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_image_format_list" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_sampler_ycbcr_conversion" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_bind_memory2" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_EXT_descriptor_indexing" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_shader_viewport_index_layer" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_maintenance3" ) + { + return "VK_VERSION_1_1"; + } + if ( extension == "VK_KHR_draw_indirect_count" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_global_priority" ) + { + return "VK_KHR_global_priority"; + } + if ( extension == "VK_KHR_shader_subgroup_extended_types" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_8bit_storage" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_shader_atomic_int64" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_calibrated_timestamps" ) + { + return "VK_KHR_calibrated_timestamps"; + } + if ( extension == "VK_KHR_global_priority" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_EXT_vertex_attribute_divisor" ) + { + return "VK_KHR_vertex_attribute_divisor"; + } + if ( extension == "VK_EXT_pipeline_creation_feedback" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_driver_properties" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_shader_float_controls" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_depth_stencil_resolve" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_NV_compute_shader_derivatives" ) + { + return "VK_KHR_compute_shader_derivatives"; + } + if ( extension == "VK_NV_fragment_shader_barycentric" ) + { + return "VK_KHR_fragment_shader_barycentric"; + } + if ( extension == "VK_KHR_timeline_semaphore" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_vulkan_memory_model" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_shader_terminate_invocation" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_scalar_block_layout" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_subgroup_size_control" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_dynamic_rendering_local_read" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_spirv_1_4" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_separate_depth_stencil_layouts" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_tooling_info" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_separate_stencil_usage" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_uniform_buffer_standard_layout" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_KHR_buffer_device_address" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_line_rasterization" ) + { + return "VK_KHR_line_rasterization"; + } + if ( extension == "VK_EXT_host_query_reset" ) + { + return "VK_VERSION_1_2"; + } + if ( extension == "VK_EXT_index_type_uint8" ) + { + return "VK_KHR_index_type_uint8"; + } + if ( extension == "VK_EXT_extended_dynamic_state" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_host_image_copy" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_map_memory2" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_shader_integer_dot_product" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_texel_buffer_alignment" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_shader_non_semantic_info" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_private_data" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_pipeline_creation_cache_control" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_synchronization2" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_zero_initialize_workgroup_memory" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_ycbcr_2plane_444_formats" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_image_robustness" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_copy_commands2" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_4444_formats" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_ARM_rasterization_order_attachment_access" ) + { + return "VK_EXT_rasterization_order_attachment_access"; + } + if ( extension == "VK_VALVE_mutable_descriptor_type" ) + { + return "VK_EXT_mutable_descriptor_type"; + } + if ( extension == "VK_KHR_format_feature_flags2" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_extended_dynamic_state2" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_EXT_global_priority_query" ) + { + return "VK_KHR_global_priority"; + } + if ( extension == "VK_EXT_load_store_op_none" ) + { + return "VK_KHR_load_store_op_none"; + } + if ( extension == "VK_KHR_maintenance4" ) + { + return "VK_VERSION_1_3"; + } + if ( extension == "VK_KHR_shader_subgroup_rotate" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_EXT_depth_clamp_zero_one" ) + { + return "VK_KHR_depth_clamp_zero_one"; + } + if ( extension == "VK_EXT_pipeline_protected_access" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_maintenance5" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_vertex_attribute_divisor" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_load_store_op_none" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_shader_float_controls2" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_index_type_uint8" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_line_rasterization" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_shader_expect_assume" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_maintenance6" ) + { + return "VK_VERSION_1_4"; + } + return ""; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isDeprecatedExtension( std::string const & extension ) + { + return ( extension == "VK_EXT_debug_report" ) || ( extension == "VK_NV_glsl_shader" ) || ( extension == "VK_NV_dedicated_allocation" ) || + ( extension == "VK_AMD_gpu_shader_half_float" ) || ( extension == "VK_IMG_format_pvrtc" ) || ( extension == "VK_NV_external_memory_capabilities" ) || + ( extension == "VK_NV_external_memory" ) || +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + ( extension == "VK_NV_external_memory_win32" ) || +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + ( extension == "VK_EXT_validation_flags" ) || ( extension == "VK_EXT_shader_subgroup_ballot" ) || ( extension == "VK_EXT_shader_subgroup_vote" ) || +#if defined( VK_USE_PLATFORM_IOS_MVK ) + ( extension == "VK_MVK_ios_surface" ) || +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + ( extension == "VK_MVK_macos_surface" ) || +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + ( extension == "VK_AMD_gpu_shader_int16" ) || ( extension == "VK_NV_ray_tracing" ) || ( extension == "VK_EXT_buffer_device_address" ) || + ( extension == "VK_EXT_validation_features" ); + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isDeviceExtension( std::string const & extension ) + { + return ( extension == "VK_KHR_swapchain" ) || ( extension == "VK_KHR_display_swapchain" ) || ( extension == "VK_NV_glsl_shader" ) || + ( extension == "VK_EXT_depth_range_unrestricted" ) || ( extension == "VK_KHR_sampler_mirror_clamp_to_edge" ) || + ( extension == "VK_IMG_filter_cubic" ) || ( extension == "VK_AMD_rasterization_order" ) || ( extension == "VK_AMD_shader_trinary_minmax" ) || + ( extension == "VK_AMD_shader_explicit_vertex_parameter" ) || ( extension == "VK_EXT_debug_marker" ) || ( extension == "VK_KHR_video_queue" ) || + ( extension == "VK_KHR_video_decode_queue" ) || ( extension == "VK_AMD_gcn_shader" ) || ( extension == "VK_NV_dedicated_allocation" ) || + ( extension == "VK_EXT_transform_feedback" ) || ( extension == "VK_NVX_binary_import" ) || ( extension == "VK_NVX_image_view_handle" ) || + ( extension == "VK_AMD_draw_indirect_count" ) || ( extension == "VK_AMD_negative_viewport_height" ) || + ( extension == "VK_AMD_gpu_shader_half_float" ) || ( extension == "VK_AMD_shader_ballot" ) || ( extension == "VK_KHR_video_encode_h264" ) || + ( extension == "VK_KHR_video_encode_h265" ) || ( extension == "VK_KHR_video_decode_h264" ) || ( extension == "VK_AMD_texture_gather_bias_lod" ) || + ( extension == "VK_AMD_shader_info" ) || ( extension == "VK_KHR_dynamic_rendering" ) || ( extension == "VK_AMD_shader_image_load_store_lod" ) || + ( extension == "VK_NV_corner_sampled_image" ) || ( extension == "VK_KHR_multiview" ) || ( extension == "VK_IMG_format_pvrtc" ) || + ( extension == "VK_NV_external_memory" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_NV_external_memory_win32" ) || ( extension == "VK_NV_win32_keyed_mutex" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_device_group" ) || ( extension == "VK_KHR_shader_draw_parameters" ) || ( extension == "VK_EXT_shader_subgroup_ballot" ) || + ( extension == "VK_EXT_shader_subgroup_vote" ) || ( extension == "VK_EXT_texture_compression_astc_hdr" ) || + ( extension == "VK_EXT_astc_decode_mode" ) || ( extension == "VK_EXT_pipeline_robustness" ) || ( extension == "VK_KHR_maintenance1" ) || + ( extension == "VK_KHR_external_memory" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_KHR_external_memory_win32" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_external_memory_fd" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_KHR_win32_keyed_mutex" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_external_semaphore" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_KHR_external_semaphore_win32" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_external_semaphore_fd" ) || ( extension == "VK_KHR_push_descriptor" ) || ( extension == "VK_EXT_conditional_rendering" ) || + ( extension == "VK_KHR_shader_float16_int8" ) || ( extension == "VK_KHR_16bit_storage" ) || ( extension == "VK_KHR_incremental_present" ) || + ( extension == "VK_KHR_descriptor_update_template" ) || ( extension == "VK_NV_clip_space_w_scaling" ) || ( extension == "VK_EXT_display_control" ) || + ( extension == "VK_GOOGLE_display_timing" ) || ( extension == "VK_NV_sample_mask_override_coverage" ) || + ( extension == "VK_NV_geometry_shader_passthrough" ) || ( extension == "VK_NV_viewport_array2" ) || + ( extension == "VK_NVX_multiview_per_view_attributes" ) || ( extension == "VK_NV_viewport_swizzle" ) || + ( extension == "VK_EXT_discard_rectangles" ) || ( extension == "VK_EXT_conservative_rasterization" ) || + ( extension == "VK_EXT_depth_clip_enable" ) || ( extension == "VK_EXT_hdr_metadata" ) || ( extension == "VK_KHR_imageless_framebuffer" ) || + ( extension == "VK_KHR_create_renderpass2" ) || ( extension == "VK_IMG_relaxed_line_rasterization" ) || + ( extension == "VK_KHR_shared_presentable_image" ) || ( extension == "VK_KHR_external_fence" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_KHR_external_fence_win32" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_external_fence_fd" ) || ( extension == "VK_KHR_performance_query" ) || ( extension == "VK_KHR_maintenance2" ) || + ( extension == "VK_KHR_variable_pointers" ) || ( extension == "VK_EXT_external_memory_dma_buf" ) || ( extension == "VK_EXT_queue_family_foreign" ) || + ( extension == "VK_KHR_dedicated_allocation" ) +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + || ( extension == "VK_ANDROID_external_memory_android_hardware_buffer" ) +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + || ( extension == "VK_EXT_sampler_filter_minmax" ) || ( extension == "VK_KHR_storage_buffer_storage_class" ) || + ( extension == "VK_AMD_gpu_shader_int16" ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + || ( extension == "VK_AMDX_shader_enqueue" ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + || ( extension == "VK_AMD_mixed_attachment_samples" ) || ( extension == "VK_AMD_shader_fragment_mask" ) || + ( extension == "VK_EXT_inline_uniform_block" ) || ( extension == "VK_EXT_shader_stencil_export" ) || ( extension == "VK_EXT_sample_locations" ) || + ( extension == "VK_KHR_relaxed_block_layout" ) || ( extension == "VK_KHR_get_memory_requirements2" ) || + ( extension == "VK_KHR_image_format_list" ) || ( extension == "VK_EXT_blend_operation_advanced" ) || + ( extension == "VK_NV_fragment_coverage_to_color" ) || ( extension == "VK_KHR_acceleration_structure" ) || + ( extension == "VK_KHR_ray_tracing_pipeline" ) || ( extension == "VK_KHR_ray_query" ) || ( extension == "VK_NV_framebuffer_mixed_samples" ) || + ( extension == "VK_NV_fill_rectangle" ) || ( extension == "VK_NV_shader_sm_builtins" ) || ( extension == "VK_EXT_post_depth_coverage" ) || + ( extension == "VK_KHR_sampler_ycbcr_conversion" ) || ( extension == "VK_KHR_bind_memory2" ) || + ( extension == "VK_EXT_image_drm_format_modifier" ) || ( extension == "VK_EXT_validation_cache" ) || ( extension == "VK_EXT_descriptor_indexing" ) || + ( extension == "VK_EXT_shader_viewport_index_layer" ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + || ( extension == "VK_KHR_portability_subset" ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + || ( extension == "VK_NV_shading_rate_image" ) || ( extension == "VK_NV_ray_tracing" ) || ( extension == "VK_NV_representative_fragment_test" ) || + ( extension == "VK_KHR_maintenance3" ) || ( extension == "VK_KHR_draw_indirect_count" ) || ( extension == "VK_EXT_filter_cubic" ) || + ( extension == "VK_QCOM_render_pass_shader_resolve" ) || ( extension == "VK_EXT_global_priority" ) || + ( extension == "VK_KHR_shader_subgroup_extended_types" ) || ( extension == "VK_KHR_8bit_storage" ) || + ( extension == "VK_EXT_external_memory_host" ) || ( extension == "VK_AMD_buffer_marker" ) || ( extension == "VK_KHR_shader_atomic_int64" ) || + ( extension == "VK_KHR_shader_clock" ) || ( extension == "VK_AMD_pipeline_compiler_control" ) || ( extension == "VK_EXT_calibrated_timestamps" ) || + ( extension == "VK_AMD_shader_core_properties" ) || ( extension == "VK_KHR_video_decode_h265" ) || ( extension == "VK_KHR_global_priority" ) || + ( extension == "VK_AMD_memory_overallocation_behavior" ) || ( extension == "VK_EXT_vertex_attribute_divisor" ) +#if defined( VK_USE_PLATFORM_GGP ) + || ( extension == "VK_GGP_frame_token" ) +#endif /*VK_USE_PLATFORM_GGP*/ + || ( extension == "VK_EXT_pipeline_creation_feedback" ) || ( extension == "VK_KHR_driver_properties" ) || + ( extension == "VK_KHR_shader_float_controls" ) || ( extension == "VK_NV_shader_subgroup_partitioned" ) || + ( extension == "VK_KHR_depth_stencil_resolve" ) || ( extension == "VK_KHR_swapchain_mutable_format" ) || + ( extension == "VK_NV_compute_shader_derivatives" ) || ( extension == "VK_NV_mesh_shader" ) || + ( extension == "VK_NV_fragment_shader_barycentric" ) || ( extension == "VK_NV_shader_image_footprint" ) || + ( extension == "VK_NV_scissor_exclusive" ) || ( extension == "VK_NV_device_diagnostic_checkpoints" ) || + ( extension == "VK_KHR_timeline_semaphore" ) || ( extension == "VK_INTEL_shader_integer_functions2" ) || + ( extension == "VK_INTEL_performance_query" ) || ( extension == "VK_KHR_vulkan_memory_model" ) || ( extension == "VK_EXT_pci_bus_info" ) || + ( extension == "VK_AMD_display_native_hdr" ) || ( extension == "VK_KHR_shader_terminate_invocation" ) || + ( extension == "VK_EXT_fragment_density_map" ) || ( extension == "VK_EXT_scalar_block_layout" ) || + ( extension == "VK_GOOGLE_hlsl_functionality1" ) || ( extension == "VK_GOOGLE_decorate_string" ) || + ( extension == "VK_EXT_subgroup_size_control" ) || ( extension == "VK_KHR_fragment_shading_rate" ) || + ( extension == "VK_AMD_shader_core_properties2" ) || ( extension == "VK_AMD_device_coherent_memory" ) || + ( extension == "VK_KHR_dynamic_rendering_local_read" ) || ( extension == "VK_EXT_shader_image_atomic_int64" ) || + ( extension == "VK_KHR_shader_quad_control" ) || ( extension == "VK_KHR_spirv_1_4" ) || ( extension == "VK_EXT_memory_budget" ) || + ( extension == "VK_EXT_memory_priority" ) || ( extension == "VK_NV_dedicated_allocation_image_aliasing" ) || + ( extension == "VK_KHR_separate_depth_stencil_layouts" ) || ( extension == "VK_EXT_buffer_device_address" ) || + ( extension == "VK_EXT_tooling_info" ) || ( extension == "VK_EXT_separate_stencil_usage" ) || ( extension == "VK_KHR_present_wait" ) || + ( extension == "VK_NV_cooperative_matrix" ) || ( extension == "VK_NV_coverage_reduction_mode" ) || + ( extension == "VK_EXT_fragment_shader_interlock" ) || ( extension == "VK_EXT_ycbcr_image_arrays" ) || + ( extension == "VK_KHR_uniform_buffer_standard_layout" ) || ( extension == "VK_EXT_provoking_vertex" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_EXT_full_screen_exclusive" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_KHR_buffer_device_address" ) || ( extension == "VK_EXT_line_rasterization" ) || ( extension == "VK_EXT_shader_atomic_float" ) || + ( extension == "VK_EXT_host_query_reset" ) || ( extension == "VK_EXT_index_type_uint8" ) || ( extension == "VK_EXT_extended_dynamic_state" ) || + ( extension == "VK_KHR_deferred_host_operations" ) || ( extension == "VK_KHR_pipeline_executable_properties" ) || + ( extension == "VK_EXT_host_image_copy" ) || ( extension == "VK_KHR_map_memory2" ) || ( extension == "VK_EXT_map_memory_placed" ) || + ( extension == "VK_EXT_shader_atomic_float2" ) || ( extension == "VK_EXT_swapchain_maintenance1" ) || + ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || ( extension == "VK_NV_device_generated_commands" ) || + ( extension == "VK_NV_inherited_viewport_scissor" ) || ( extension == "VK_KHR_shader_integer_dot_product" ) || + ( extension == "VK_EXT_texel_buffer_alignment" ) || ( extension == "VK_QCOM_render_pass_transform" ) || + ( extension == "VK_EXT_depth_bias_control" ) || ( extension == "VK_EXT_device_memory_report" ) || ( extension == "VK_EXT_robustness2" ) || + ( extension == "VK_EXT_custom_border_color" ) || ( extension == "VK_GOOGLE_user_type" ) || ( extension == "VK_KHR_pipeline_library" ) || + ( extension == "VK_NV_present_barrier" ) || ( extension == "VK_KHR_shader_non_semantic_info" ) || ( extension == "VK_KHR_present_id" ) || + ( extension == "VK_EXT_private_data" ) || ( extension == "VK_EXT_pipeline_creation_cache_control" ) || + ( extension == "VK_KHR_video_encode_queue" ) || ( extension == "VK_NV_device_diagnostics_config" ) || + ( extension == "VK_QCOM_render_pass_store_ops" ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + || ( extension == "VK_NV_cuda_kernel_launch" ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + || ( extension == "VK_NV_low_latency" ) +#if defined( VK_USE_PLATFORM_METAL_EXT ) + || ( extension == "VK_EXT_metal_objects" ) +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + || ( extension == "VK_KHR_synchronization2" ) || ( extension == "VK_EXT_descriptor_buffer" ) || ( extension == "VK_EXT_graphics_pipeline_library" ) || + ( extension == "VK_AMD_shader_early_and_late_fragment_tests" ) || ( extension == "VK_KHR_fragment_shader_barycentric" ) || + ( extension == "VK_KHR_shader_subgroup_uniform_control_flow" ) || ( extension == "VK_KHR_zero_initialize_workgroup_memory" ) || + ( extension == "VK_NV_fragment_shading_rate_enums" ) || ( extension == "VK_NV_ray_tracing_motion_blur" ) || ( extension == "VK_EXT_mesh_shader" ) || + ( extension == "VK_EXT_ycbcr_2plane_444_formats" ) || ( extension == "VK_EXT_fragment_density_map2" ) || + ( extension == "VK_QCOM_rotated_copy_commands" ) || ( extension == "VK_EXT_image_robustness" ) || + ( extension == "VK_KHR_workgroup_memory_explicit_layout" ) || ( extension == "VK_KHR_copy_commands2" ) || + ( extension == "VK_EXT_image_compression_control" ) || ( extension == "VK_EXT_attachment_feedback_loop_layout" ) || + ( extension == "VK_EXT_4444_formats" ) || ( extension == "VK_EXT_device_fault" ) || + ( extension == "VK_ARM_rasterization_order_attachment_access" ) || ( extension == "VK_EXT_rgba10x6_formats" ) +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_NV_acquire_winrt_display" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_VALVE_mutable_descriptor_type" ) || ( extension == "VK_EXT_vertex_input_dynamic_state" ) || + ( extension == "VK_EXT_physical_device_drm" ) || ( extension == "VK_EXT_device_address_binding_report" ) || + ( extension == "VK_EXT_depth_clip_control" ) || ( extension == "VK_EXT_primitive_topology_list_restart" ) || + ( extension == "VK_KHR_format_feature_flags2" ) || ( extension == "VK_EXT_present_mode_fifo_latest_ready" ) +#if defined( VK_USE_PLATFORM_FUCHSIA ) + || ( extension == "VK_FUCHSIA_external_memory" ) || ( extension == "VK_FUCHSIA_external_semaphore" ) || ( extension == "VK_FUCHSIA_buffer_collection" ) +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + || ( extension == "VK_HUAWEI_subpass_shading" ) || ( extension == "VK_HUAWEI_invocation_mask" ) || ( extension == "VK_NV_external_memory_rdma" ) || + ( extension == "VK_EXT_pipeline_properties" ) || ( extension == "VK_EXT_frame_boundary" ) || + ( extension == "VK_EXT_multisampled_render_to_single_sampled" ) || ( extension == "VK_EXT_extended_dynamic_state2" ) || + ( extension == "VK_EXT_color_write_enable" ) || ( extension == "VK_EXT_primitives_generated_query" ) || + ( extension == "VK_KHR_ray_tracing_maintenance1" ) || ( extension == "VK_EXT_global_priority_query" ) || + ( extension == "VK_EXT_image_view_min_lod" ) || ( extension == "VK_EXT_multi_draw" ) || ( extension == "VK_EXT_image_2d_view_of_3d" ) || + ( extension == "VK_EXT_shader_tile_image" ) || ( extension == "VK_EXT_opacity_micromap" ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + || ( extension == "VK_NV_displacement_micromap" ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + || ( extension == "VK_EXT_load_store_op_none" ) || ( extension == "VK_HUAWEI_cluster_culling_shader" ) || + ( extension == "VK_EXT_border_color_swizzle" ) || ( extension == "VK_EXT_pageable_device_local_memory" ) || ( extension == "VK_KHR_maintenance4" ) || + ( extension == "VK_ARM_shader_core_properties" ) || ( extension == "VK_KHR_shader_subgroup_rotate" ) || + ( extension == "VK_ARM_scheduling_controls" ) || ( extension == "VK_EXT_image_sliced_view_of_3d" ) || + ( extension == "VK_VALVE_descriptor_set_host_mapping" ) || ( extension == "VK_EXT_depth_clamp_zero_one" ) || + ( extension == "VK_EXT_non_seamless_cube_map" ) || ( extension == "VK_ARM_render_pass_striped" ) || + ( extension == "VK_QCOM_fragment_density_map_offset" ) || ( extension == "VK_NV_copy_memory_indirect" ) || + ( extension == "VK_NV_memory_decompression" ) || ( extension == "VK_NV_device_generated_commands_compute" ) || + ( extension == "VK_NV_ray_tracing_linear_swept_spheres" ) || ( extension == "VK_NV_linear_color_attachment" ) || + ( extension == "VK_KHR_shader_maximal_reconvergence" ) || ( extension == "VK_EXT_image_compression_control_swapchain" ) || + ( extension == "VK_QCOM_image_processing" ) || ( extension == "VK_EXT_nested_command_buffer" ) || + ( extension == "VK_EXT_external_memory_acquire_unmodified" ) || ( extension == "VK_EXT_extended_dynamic_state3" ) || + ( extension == "VK_EXT_subpass_merge_feedback" ) || ( extension == "VK_EXT_shader_module_identifier" ) || + ( extension == "VK_EXT_rasterization_order_attachment_access" ) || ( extension == "VK_NV_optical_flow" ) || + ( extension == "VK_EXT_legacy_dithering" ) || ( extension == "VK_EXT_pipeline_protected_access" ) +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + || ( extension == "VK_ANDROID_external_format_resolve" ) +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + || ( extension == "VK_KHR_maintenance5" ) || ( extension == "VK_AMD_anti_lag" ) || ( extension == "VK_KHR_ray_tracing_position_fetch" ) || + ( extension == "VK_EXT_shader_object" ) || ( extension == "VK_KHR_pipeline_binary" ) || ( extension == "VK_QCOM_tile_properties" ) || + ( extension == "VK_SEC_amigo_profiling" ) || ( extension == "VK_QCOM_multiview_per_view_viewports" ) || + ( extension == "VK_NV_ray_tracing_invocation_reorder" ) || ( extension == "VK_NV_cooperative_vector" ) || + ( extension == "VK_NV_extended_sparse_address_space" ) || ( extension == "VK_EXT_mutable_descriptor_type" ) || + ( extension == "VK_EXT_legacy_vertex_attributes" ) || ( extension == "VK_ARM_shader_core_builtins" ) || + ( extension == "VK_EXT_pipeline_library_group_handles" ) || ( extension == "VK_EXT_dynamic_rendering_unused_attachments" ) || + ( extension == "VK_NV_low_latency2" ) || ( extension == "VK_KHR_cooperative_matrix" ) || + ( extension == "VK_QCOM_multiview_per_view_render_areas" ) || ( extension == "VK_KHR_compute_shader_derivatives" ) || + ( extension == "VK_KHR_video_decode_av1" ) || ( extension == "VK_KHR_video_encode_av1" ) || ( extension == "VK_KHR_video_maintenance1" ) || + ( extension == "VK_NV_per_stage_descriptor_set" ) || ( extension == "VK_QCOM_image_processing2" ) || + ( extension == "VK_QCOM_filter_cubic_weights" ) || ( extension == "VK_QCOM_ycbcr_degamma" ) || ( extension == "VK_QCOM_filter_cubic_clamp" ) || + ( extension == "VK_EXT_attachment_feedback_loop_dynamic_state" ) || ( extension == "VK_KHR_vertex_attribute_divisor" ) || + ( extension == "VK_KHR_load_store_op_none" ) || ( extension == "VK_KHR_shader_float_controls2" ) +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + || ( extension == "VK_QNX_external_memory_screen_buffer" ) +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + || ( extension == "VK_MSFT_layered_driver" ) || ( extension == "VK_KHR_index_type_uint8" ) || ( extension == "VK_KHR_line_rasterization" ) || + ( extension == "VK_KHR_calibrated_timestamps" ) || ( extension == "VK_KHR_shader_expect_assume" ) || ( extension == "VK_KHR_maintenance6" ) || + ( extension == "VK_NV_descriptor_pool_overallocation" ) || ( extension == "VK_KHR_video_encode_quantization_map" ) || + ( extension == "VK_NV_raw_access_chains" ) || ( extension == "VK_KHR_shader_relaxed_extended_instruction" ) || + ( extension == "VK_NV_command_buffer_inheritance" ) || ( extension == "VK_KHR_maintenance7" ) || + ( extension == "VK_NV_shader_atomic_float16_vector" ) || ( extension == "VK_EXT_shader_replicated_composites" ) || + ( extension == "VK_NV_ray_tracing_validation" ) || ( extension == "VK_NV_cluster_acceleration_structure" ) || + ( extension == "VK_NV_partitioned_acceleration_structure" ) || ( extension == "VK_EXT_device_generated_commands" ) || + ( extension == "VK_KHR_maintenance8" ) || ( extension == "VK_MESA_image_alignment_control" ) || ( extension == "VK_EXT_depth_clamp_control" ) || + ( extension == "VK_KHR_video_maintenance2" ) || ( extension == "VK_HUAWEI_hdr_vivid" ) || ( extension == "VK_NV_cooperative_matrix2" ) || + ( extension == "VK_ARM_pipeline_opacity_micromap" ) +#if defined( VK_USE_PLATFORM_METAL_EXT ) + || ( extension == "VK_EXT_external_memory_metal" ) +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + || ( extension == "VK_KHR_depth_clamp_zero_one" ) || ( extension == "VK_EXT_vertex_attribute_robustness" ); + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & extension ) + { + return ( extension == "VK_KHR_surface" ) || ( extension == "VK_KHR_display" ) +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + || ( extension == "VK_KHR_xlib_surface" ) +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#if defined( VK_USE_PLATFORM_XCB_KHR ) + || ( extension == "VK_KHR_xcb_surface" ) +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + || ( extension == "VK_KHR_wayland_surface" ) +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + || ( extension == "VK_KHR_android_surface" ) +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + || ( extension == "VK_KHR_win32_surface" ) +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + || ( extension == "VK_EXT_debug_report" ) +#if defined( VK_USE_PLATFORM_GGP ) + || ( extension == "VK_GGP_stream_descriptor_surface" ) +#endif /*VK_USE_PLATFORM_GGP*/ + || ( extension == "VK_NV_external_memory_capabilities" ) || ( extension == "VK_KHR_get_physical_device_properties2" ) || + ( extension == "VK_EXT_validation_flags" ) +#if defined( VK_USE_PLATFORM_VI_NN ) + || ( extension == "VK_NN_vi_surface" ) +#endif /*VK_USE_PLATFORM_VI_NN*/ + || ( extension == "VK_KHR_device_group_creation" ) || ( extension == "VK_KHR_external_memory_capabilities" ) || + ( extension == "VK_KHR_external_semaphore_capabilities" ) || ( extension == "VK_EXT_direct_mode_display" ) +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + || ( extension == "VK_EXT_acquire_xlib_display" ) +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + || ( extension == "VK_EXT_display_surface_counter" ) || ( extension == "VK_EXT_swapchain_colorspace" ) || + ( extension == "VK_KHR_external_fence_capabilities" ) || ( extension == "VK_KHR_get_surface_capabilities2" ) || + ( extension == "VK_KHR_get_display_properties2" ) +#if defined( VK_USE_PLATFORM_IOS_MVK ) + || ( extension == "VK_MVK_ios_surface" ) +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + || ( extension == "VK_MVK_macos_surface" ) +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + || ( extension == "VK_EXT_debug_utils" ) +#if defined( VK_USE_PLATFORM_FUCHSIA ) + || ( extension == "VK_FUCHSIA_imagepipe_surface" ) +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + || ( extension == "VK_EXT_metal_surface" ) +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + || ( extension == "VK_KHR_surface_protected_capabilities" ) || ( extension == "VK_EXT_validation_features" ) || + ( extension == "VK_EXT_headless_surface" ) || ( extension == "VK_EXT_surface_maintenance1" ) || ( extension == "VK_EXT_acquire_drm_display" ) +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + || ( extension == "VK_EXT_directfb_surface" ) +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + || ( extension == "VK_QNX_screen_surface" ) +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + || ( extension == "VK_KHR_portability_enumeration" ) || ( extension == "VK_GOOGLE_surfaceless_query" ) || + ( extension == "VK_LUNARG_direct_driver_loading" ) || ( extension == "VK_EXT_layer_settings" ) || ( extension == "VK_NV_display_stereo" ); + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isObsoletedExtension( std::string const & extension ) + { + return ( extension == "VK_AMD_negative_viewport_height" ); + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isPromotedExtension( std::string const & extension ) + { + return ( extension == "VK_KHR_sampler_mirror_clamp_to_edge" ) || ( extension == "VK_EXT_debug_marker" ) || ( extension == "VK_AMD_draw_indirect_count" ) || + ( extension == "VK_KHR_dynamic_rendering" ) || ( extension == "VK_KHR_multiview" ) || +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + ( extension == "VK_NV_win32_keyed_mutex" ) || +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + ( extension == "VK_KHR_get_physical_device_properties2" ) || ( extension == "VK_KHR_device_group" ) || + ( extension == "VK_KHR_shader_draw_parameters" ) || ( extension == "VK_EXT_texture_compression_astc_hdr" ) || + ( extension == "VK_EXT_pipeline_robustness" ) || ( extension == "VK_KHR_maintenance1" ) || ( extension == "VK_KHR_device_group_creation" ) || + ( extension == "VK_KHR_external_memory_capabilities" ) || ( extension == "VK_KHR_external_memory" ) || + ( extension == "VK_KHR_external_semaphore_capabilities" ) || ( extension == "VK_KHR_external_semaphore" ) || + ( extension == "VK_KHR_push_descriptor" ) || ( extension == "VK_KHR_shader_float16_int8" ) || ( extension == "VK_KHR_16bit_storage" ) || + ( extension == "VK_KHR_descriptor_update_template" ) || ( extension == "VK_KHR_imageless_framebuffer" ) || + ( extension == "VK_KHR_create_renderpass2" ) || ( extension == "VK_KHR_external_fence_capabilities" ) || ( extension == "VK_KHR_external_fence" ) || + ( extension == "VK_KHR_maintenance2" ) || ( extension == "VK_KHR_variable_pointers" ) || ( extension == "VK_KHR_dedicated_allocation" ) || + ( extension == "VK_EXT_sampler_filter_minmax" ) || ( extension == "VK_KHR_storage_buffer_storage_class" ) || + ( extension == "VK_EXT_inline_uniform_block" ) || ( extension == "VK_KHR_relaxed_block_layout" ) || + ( extension == "VK_KHR_get_memory_requirements2" ) || ( extension == "VK_KHR_image_format_list" ) || + ( extension == "VK_KHR_sampler_ycbcr_conversion" ) || ( extension == "VK_KHR_bind_memory2" ) || ( extension == "VK_EXT_descriptor_indexing" ) || + ( extension == "VK_EXT_shader_viewport_index_layer" ) || ( extension == "VK_KHR_maintenance3" ) || ( extension == "VK_KHR_draw_indirect_count" ) || + ( extension == "VK_EXT_global_priority" ) || ( extension == "VK_KHR_shader_subgroup_extended_types" ) || ( extension == "VK_KHR_8bit_storage" ) || + ( extension == "VK_KHR_shader_atomic_int64" ) || ( extension == "VK_EXT_calibrated_timestamps" ) || ( extension == "VK_KHR_global_priority" ) || + ( extension == "VK_EXT_vertex_attribute_divisor" ) || ( extension == "VK_EXT_pipeline_creation_feedback" ) || + ( extension == "VK_KHR_driver_properties" ) || ( extension == "VK_KHR_shader_float_controls" ) || ( extension == "VK_KHR_depth_stencil_resolve" ) || + ( extension == "VK_NV_compute_shader_derivatives" ) || ( extension == "VK_NV_fragment_shader_barycentric" ) || + ( extension == "VK_KHR_timeline_semaphore" ) || ( extension == "VK_KHR_vulkan_memory_model" ) || + ( extension == "VK_KHR_shader_terminate_invocation" ) || ( extension == "VK_EXT_scalar_block_layout" ) || + ( extension == "VK_EXT_subgroup_size_control" ) || ( extension == "VK_KHR_dynamic_rendering_local_read" ) || ( extension == "VK_KHR_spirv_1_4" ) || + ( extension == "VK_KHR_separate_depth_stencil_layouts" ) || ( extension == "VK_EXT_tooling_info" ) || + ( extension == "VK_EXT_separate_stencil_usage" ) || ( extension == "VK_KHR_uniform_buffer_standard_layout" ) || + ( extension == "VK_KHR_buffer_device_address" ) || ( extension == "VK_EXT_line_rasterization" ) || ( extension == "VK_EXT_host_query_reset" ) || + ( extension == "VK_EXT_index_type_uint8" ) || ( extension == "VK_EXT_extended_dynamic_state" ) || ( extension == "VK_EXT_host_image_copy" ) || + ( extension == "VK_KHR_map_memory2" ) || ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || + ( extension == "VK_KHR_shader_integer_dot_product" ) || ( extension == "VK_EXT_texel_buffer_alignment" ) || + ( extension == "VK_KHR_shader_non_semantic_info" ) || ( extension == "VK_EXT_private_data" ) || + ( extension == "VK_EXT_pipeline_creation_cache_control" ) || ( extension == "VK_KHR_synchronization2" ) || + ( extension == "VK_KHR_zero_initialize_workgroup_memory" ) || ( extension == "VK_EXT_ycbcr_2plane_444_formats" ) || + ( extension == "VK_EXT_image_robustness" ) || ( extension == "VK_KHR_copy_commands2" ) || ( extension == "VK_EXT_4444_formats" ) || + ( extension == "VK_ARM_rasterization_order_attachment_access" ) || ( extension == "VK_VALVE_mutable_descriptor_type" ) || + ( extension == "VK_KHR_format_feature_flags2" ) || ( extension == "VK_EXT_extended_dynamic_state2" ) || + ( extension == "VK_EXT_global_priority_query" ) || ( extension == "VK_EXT_load_store_op_none" ) || ( extension == "VK_KHR_maintenance4" ) || + ( extension == "VK_KHR_shader_subgroup_rotate" ) || ( extension == "VK_EXT_depth_clamp_zero_one" ) || + ( extension == "VK_EXT_pipeline_protected_access" ) || ( extension == "VK_KHR_maintenance5" ) || + ( extension == "VK_KHR_vertex_attribute_divisor" ) || ( extension == "VK_KHR_load_store_op_none" ) || + ( extension == "VK_KHR_shader_float_controls2" ) || ( extension == "VK_KHR_index_type_uint8" ) || ( extension == "VK_KHR_line_rasterization" ) || + ( extension == "VK_KHR_shader_expect_assume" ) || ( extension == "VK_KHR_maintenance6" ); + } +} // namespace VULKAN_HPP_NAMESPACE + +#endif diff --git a/src/libraries/vulkanheaders/vulkan_format_traits.hpp b/src/libraries/vulkanheaders/vulkan_format_traits.hpp index ac4250a6a..5224e6c34 100644 --- a/src/libraries/vulkanheaders/vulkan_format_traits.hpp +++ b/src/libraries/vulkanheaders/vulkan_format_traits.hpp @@ -1,4 +1,4 @@ -// Copyright 2015-2022 The Khronos Group Inc. +// Copyright 2015-2025 The Khronos Group Inc. // // SPDX-License-Identifier: Apache-2.0 OR MIT // @@ -12,10 +12,105 @@ namespace VULKAN_HPP_NAMESPACE { + //===================== //=== Format Traits === //===================== + // The three-dimensional extent of a texel block. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 std::array blockExtent( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return { { 5, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return { { 5, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return { { 5, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return { { 5, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return { { 6, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return { { 6, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return { { 6, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return { { 6, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return { { 8, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return { { 8, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return { { 8, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return { { 8, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return { { 8, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return { { 8, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return { { 10, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return { { 10, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return { { 10, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return { { 10, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return { { 10, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return { { 10, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return { { 10, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return { { 10, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return { { 12, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return { { 12, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return { { 12, 12, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return { { 12, 12, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return { { 2, 1, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return { { 5, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return { { 5, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return { { 6, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return { { 6, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return { { 8, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return { { 8, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return { { 8, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return { { 10, 5, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return { { 10, 6, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return { { 10, 8, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return { { 10, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return { { 12, 10, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return { { 12, 12, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return { { 8, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return { { 8, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return { { 8, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return { { 4, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return { { 8, 4, 1 } }; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return { { 4, 4, 1 } }; + + default: return { { 1, 1, 1 } }; + } + } + // The texel block size in bytes. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t blockSize( VULKAN_HPP_NAMESPACE::Format format ) { @@ -259,6 +354,8 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 16; case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 16; case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: return 1; case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 8; case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 8; case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 8; @@ -267,596 +364,268 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 8; case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 8; case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: return 4; default: VULKAN_HPP_ASSERT( false ); return 0; } } - // The number of texels in a texel block. - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t texelsPerBlock( VULKAN_HPP_NAMESPACE::Format format ) + // The class of the format (can't be just named "class"!) + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * compatibilityClass( VULKAN_HPP_NAMESPACE::Format format ) { switch ( format ) { - case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 1; - case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 1; - case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 1; - case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 20; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 20; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 25; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 25; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 30; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 30; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 36; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 36; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 40; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 40; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 48; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 48; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 64; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 64; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 50; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 50; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 60; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 60; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 80; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 80; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 100; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 100; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 120; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 120; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 144; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 144; - case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 1; - case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 16; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 20; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 25; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 30; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 36; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 40; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 48; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 64; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 50; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 60; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 80; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 100; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 120; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 144; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 1; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 1; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 1; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 1; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 1; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 1; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 1; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 1; - - default: VULKAN_HPP_ASSERT( false ); return 0; - } - } - - // The three-dimensional extent of a texel block. - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 std::array blockExtent( VULKAN_HPP_NAMESPACE::Format format ) - { - switch ( format ) - { - case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return { { 5, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return { { 5, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return { { 5, 5, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return { { 5, 5, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return { { 6, 5, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return { { 6, 5, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return { { 6, 6, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return { { 6, 6, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return { { 8, 5, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return { { 8, 5, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return { { 8, 6, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return { { 8, 6, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return { { 8, 8, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return { { 8, 8, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return { { 10, 5, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return { { 10, 5, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return { { 10, 6, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return { { 10, 6, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return { { 10, 8, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return { { 10, 8, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return { { 10, 10, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return { { 10, 10, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return { { 12, 10, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return { { 12, 10, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return { { 12, 12, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return { { 12, 12, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return { { 2, 1, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return { { 2, 1, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return { { 2, 1, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return { { 2, 1, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return { { 2, 1, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return { { 2, 1, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return { { 2, 1, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return { { 2, 1, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return { { 5, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return { { 5, 5, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return { { 6, 5, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return { { 6, 6, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return { { 8, 5, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return { { 8, 6, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return { { 8, 8, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return { { 10, 5, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return { { 10, 6, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return { { 10, 8, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return { { 10, 10, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return { { 12, 10, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return { { 12, 12, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return { { 8, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return { { 8, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return { { 8, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return { { 4, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return { { 8, 4, 1 } }; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return { { 4, 4, 1 } }; - - default: return { { 1, 1, 1 } }; - } - } - - // A textual description of the compression scheme, or an empty string if it is not compressed - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * compressionScheme( VULKAN_HPP_NAMESPACE::Format format ) - { - switch ( format ) - { - case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return "BC"; - case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return "BC"; - case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return "BC"; - case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return "BC"; - case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return "BC"; - case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return "BC"; - case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return "BC"; - case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return "BC"; - case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return "BC"; - case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return "BC"; - case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return "BC"; - case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return "BC"; - case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return "BC"; - case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return "BC"; - case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return "BC"; - case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return "BC"; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return "ETC2"; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return "ETC2"; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return "ETC2"; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return "ETC2"; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return "ETC2"; - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return "ETC2"; - case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return "EAC"; - case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return "EAC"; - case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return "EAC"; - case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return "EAC"; - case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return "ASTC LDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return "ASTC HDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return "ASTC HDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return "ASTC HDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return "ASTC HDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return "ASTC HDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return "ASTC HDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return "ASTC HDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return "ASTC HDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return "ASTC HDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return "ASTC HDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return "ASTC HDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return "ASTC HDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return "ASTC HDR"; - case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return "ASTC HDR"; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return "PVRTC"; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return "PVRTC"; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return "PVRTC"; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return "PVRTC"; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return "PVRTC"; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return "PVRTC"; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC"; - case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC"; - - default: return ""; - } - } - - // True, if this format is a compressed one. - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool isCompressed( VULKAN_HPP_NAMESPACE::Format format ) - { - return ( *VULKAN_HPP_NAMESPACE::compressionScheme( format ) != 0 ); - } - - // The number of bits into which the format is packed. A single image element in this format - // can be stored in the same space as a scalar type of this bit width. - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t packed( VULKAN_HPP_NAMESPACE::Format format ) - { - switch ( format ) - { - case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 8; - case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 32; - case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return "8-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return "24-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return "48-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return "96-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return "96-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return "96-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return "64-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return "128-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return "192-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return "192-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return "192-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return "256-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return "256-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return "256-bit"; + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return "D16"; + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return "D24"; + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return "D32"; + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return "S8"; + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return "D16S8"; + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return "D24S8"; + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return "D32S8"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return "BC1_RGB"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return "BC1_RGB"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return "BC1_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return "BC1_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return "BC2"; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return "BC2"; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return "BC3"; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return "BC3"; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return "BC4"; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return "BC4"; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return "BC5"; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return "BC5"; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return "BC6H"; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return "BC6H"; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return "BC7"; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return "BC7"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return "ETC2_RGB"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return "ETC2_RGB"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return "ETC2_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return "ETC2_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return "ETC2_EAC_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return "ETC2_EAC_RGBA"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return "EAC_R"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return "EAC_R"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return "EAC_RG"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return "EAC_RG"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return "ASTC_4x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return "ASTC_4x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return "ASTC_5x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return "ASTC_5x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return "ASTC_5x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return "ASTC_5x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return "ASTC_6x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return "ASTC_6x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return "ASTC_6x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return "ASTC_6x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return "ASTC_8x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return "ASTC_8x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return "ASTC_8x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return "ASTC_8x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return "ASTC_8x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return "ASTC_8x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return "ASTC_10x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return "ASTC_10x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return "ASTC_10x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return "ASTC_10x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return "ASTC_10x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return "ASTC_10x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return "ASTC_10x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return "ASTC_10x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return "ASTC_12x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return "ASTC_12x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return "ASTC_12x12"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return "ASTC_12x12"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return "32-bit G8B8G8R8"; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return "32-bit B8G8R8G8"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return "8-bit 3-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return "8-bit 2-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return "8-bit 3-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return "8-bit 2-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return "8-bit 3-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "64-bit R10G10B10A10"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "64-bit G10B10G10R10"; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "64-bit B10G10R10G10"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "10-bit 3-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "10-bit 2-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "10-bit 3-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "10-bit 2-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "10-bit 3-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "64-bit R12G12B12A12"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "64-bit G12B12G12R12"; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "64-bit B12G12R12G12"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "12-bit 3-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "12-bit 2-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "12-bit 3-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "12-bit 2-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "12-bit 3-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return "64-bit G16B16G16R16"; + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return "64-bit B16G16R16G16"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return "16-bit 3-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return "16-bit 2-plane 420"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return "16-bit 3-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return "16-bit 2-plane 422"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return "16-bit 3-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return "8-bit 2-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return "10-bit 2-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return "12-bit 2-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return "16-bit 2-plane 444"; + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return "ASTC_4x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return "ASTC_5x4"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return "ASTC_5x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return "ASTC_6x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return "ASTC_6x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return "ASTC_8x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return "ASTC_8x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return "ASTC_8x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return "ASTC_10x5"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return "ASTC_10x6"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return "ASTC_10x8"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return "ASTC_10x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return "ASTC_12x10"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return "ASTC_12x12"; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: return "8-bit alpha"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return "PVRTC1_2BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return "PVRTC1_4BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return "PVRTC2_2BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return "PVRTC2_4BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return "PVRTC1_2BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return "PVRTC1_4BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC2_2BPP"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC2_4BPP"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: return "32-bit"; - default: return 0; - } - } - - // True, if the components of this format are compressed, otherwise false. - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool componentsAreCompressed( VULKAN_HPP_NAMESPACE::Format format ) - { - switch ( format ) - { - case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: - case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: - case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: - case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: - case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: - case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: - case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: - case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: - case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: - case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: - case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: - case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return true; - default: return false; + default: VULKAN_HPP_ASSERT( false ); return ""; } } @@ -1828,7 +1597,7 @@ namespace VULKAN_HPP_NAMESPACE { case 0: return 10; case 1: return 11; - case 2: return 10; + case 2: return 11; default: VULKAN_HPP_ASSERT( false ); return 0; } case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: @@ -2236,6 +2005,28 @@ namespace VULKAN_HPP_NAMESPACE case 3: return 4; default: VULKAN_HPP_ASSERT( false ); return 0; } + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: + switch ( component ) + { + case 0: return 1; + case 1: return 5; + case 2: return 5; + case 3: return 5; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } default: return 0; } @@ -2484,6 +2275,8 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 4; case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 4; case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: return 1; case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 4; case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 4; case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 4; @@ -2492,6 +2285,7 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 4; case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 4; case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: return 2; default: return 0; } @@ -2556,8 +2350,8 @@ namespace VULKAN_HPP_NAMESPACE switch ( component ) { case 0: return "B"; - case 1: return "R"; - case 2: return "G"; + case 1: return "G"; + case 2: return "R"; case 3: return "A"; default: VULKAN_HPP_ASSERT( false ); return ""; } @@ -3392,21 +3186,21 @@ namespace VULKAN_HPP_NAMESPACE switch ( component ) { case 0: return "R"; - case 1: return "B"; + case 1: return "G"; default: VULKAN_HPP_ASSERT( false ); return ""; } case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: switch ( component ) { case 0: return "R"; - case 1: return "B"; + case 1: return "G"; default: VULKAN_HPP_ASSERT( false ); return ""; } case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: switch ( component ) { case 0: return "R"; - case 1: return "B"; + case 1: return "G"; default: VULKAN_HPP_ASSERT( false ); return ""; } case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: @@ -4433,6 +4227,21 @@ namespace VULKAN_HPP_NAMESPACE case 3: return "A"; default: VULKAN_HPP_ASSERT( false ); return ""; } + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: + switch ( component ) + { + case 0: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: switch ( component ) { @@ -4505,6 +4314,13 @@ namespace VULKAN_HPP_NAMESPACE case 3: return "A"; default: VULKAN_HPP_ASSERT( false ); return ""; } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: + switch ( component ) + { + case 0: return "R"; + case 1: return "G"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } default: return ""; } @@ -5613,7 +5429,7 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: switch ( component ) { - case 0: return "SRGB"; + case 0: return "SNORM"; default: VULKAN_HPP_ASSERT( false ); return ""; } case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: @@ -5626,8 +5442,8 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: switch ( component ) { - case 0: return "SRGB"; - case 1: return "SRGB"; + case 0: return "SNORM"; + case 1: return "SNORM"; default: VULKAN_HPP_ASSERT( false ); return ""; } case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: @@ -6446,6 +6262,21 @@ namespace VULKAN_HPP_NAMESPACE case 3: return "SFLOAT"; default: VULKAN_HPP_ASSERT( false ); return ""; } + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: switch ( component ) { @@ -6518,6 +6349,13 @@ namespace VULKAN_HPP_NAMESPACE case 3: return "SRGB"; default: VULKAN_HPP_ASSERT( false ); return ""; } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: + switch ( component ) + { + case 0: return "SFIXED5"; + case 1: return "SFIXED5"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } default: return ""; } @@ -6721,41 +6559,246 @@ namespace VULKAN_HPP_NAMESPACE default: VULKAN_HPP_ASSERT( false ); return 0; } - default: return 0; + default: return 0; + } + } + + // True, if the components of this format are compressed, otherwise false. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool componentsAreCompressed( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return true; + default: return false; + } + } + + // A textual description of the compression scheme, or an empty string if it is not compressed + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * compressionScheme( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return "BC"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return "ETC2"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return "EAC"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return "EAC"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return "EAC"; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return "EAC"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return "ASTC LDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return "ASTC HDR"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC"; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC"; + + default: return ""; } } - // The number of image planes of this format. - VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeCount( VULKAN_HPP_NAMESPACE::Format format ) + // True, if this format is a compressed one. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool isCompressed( VULKAN_HPP_NAMESPACE::Format format ) + { + return ( *VULKAN_HPP_NAMESPACE::compressionScheme( format ) != 0 ); + } + + // The number of bits into which the format is packed. A single image element in this format + // can be stored in the same space as a scalar type of this bit width. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t packed( VULKAN_HPP_NAMESPACE::Format format ) { switch ( format ) { - case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3; - case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 2; - case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3; - case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 2; - case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 3; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 2; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 3; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 2; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 3; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 3; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 2; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 3; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 2; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 3; - case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 3; - case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 2; - case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 3; - case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 2; - case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 3; - case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 2; - case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 2; - case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 2; - case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 8; + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 32; + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: return 16; - default: return 1; + default: return 0; } } @@ -6949,6 +6992,40 @@ namespace VULKAN_HPP_NAMESPACE } } + // The number of image planes of this format. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeCount( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 3; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 2; + + default: return 1; + } + } + // The relative height of this plane. A value of k means that this plane is 1/k the height of the overall format. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeHeightDivisor( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane ) { @@ -7329,5 +7406,264 @@ namespace VULKAN_HPP_NAMESPACE } } + // The number of texels in a texel block. + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t texelsPerBlock( VULKAN_HPP_NAMESPACE::Format format ) + { + switch ( format ) + { + case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 1; + case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 1; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 20; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 20; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 25; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 25; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 30; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 30; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 36; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 36; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 40; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 40; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 48; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 48; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 64; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 64; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 50; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 50; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 60; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 60; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 80; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 80; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 100; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 100; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 120; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 120; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 144; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 144; + case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 20; + case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 25; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 30; + case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 36; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 40; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 48; + case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 64; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 50; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 60; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 80; + case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 100; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 120; + case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 144; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: return 1; + + default: VULKAN_HPP_ASSERT( false ); return 0; + } + } + } // namespace VULKAN_HPP_NAMESPACE #endif diff --git a/src/libraries/vulkanheaders/vulkan_fuchsia.h b/src/libraries/vulkanheaders/vulkan_fuchsia.h index 61774ff9c..0af61bda2 100644 --- a/src/libraries/vulkanheaders/vulkan_fuchsia.h +++ b/src/libraries/vulkanheaders/vulkan_fuchsia.h @@ -2,7 +2,7 @@ #define VULKAN_FUCHSIA_H_ 1 /* -** Copyright 2015-2022 The Khronos Group Inc. +** Copyright 2015-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -19,6 +19,7 @@ extern "C" { +// VK_FUCHSIA_imagepipe_surface is a preprocessor guard. Do not pass it to API calls. #define VK_FUCHSIA_imagepipe_surface 1 #define VK_FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION 1 #define VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME "VK_FUCHSIA_imagepipe_surface" @@ -41,6 +42,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateImagePipeSurfaceFUCHSIA( #endif +// VK_FUCHSIA_external_memory is a preprocessor guard. Do not pass it to API calls. #define VK_FUCHSIA_external_memory 1 #define VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION 1 #define VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME "VK_FUCHSIA_external_memory" @@ -81,6 +83,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandlePropertiesFUCHSIA( #endif +// VK_FUCHSIA_external_semaphore is a preprocessor guard. Do not pass it to API calls. #define VK_FUCHSIA_external_semaphore 1 #define VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 #define VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_FUCHSIA_external_semaphore" @@ -115,6 +118,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreZirconHandleFUCHSIA( #endif +// VK_FUCHSIA_buffer_collection is a preprocessor guard. Do not pass it to API calls. #define VK_FUCHSIA_buffer_collection 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferCollectionFUCHSIA) #define VK_FUCHSIA_BUFFER_COLLECTION_SPEC_VERSION 2 diff --git a/src/libraries/vulkanheaders/vulkan_funcs.hpp b/src/libraries/vulkanheaders/vulkan_funcs.hpp index d4ec6de4d..eb18dbac6 100644 --- a/src/libraries/vulkanheaders/vulkan_funcs.hpp +++ b/src/libraries/vulkanheaders/vulkan_funcs.hpp @@ -1,4 +1,4 @@ -// Copyright 2015-2022 The Khronos Group Inc. +// Copyright 2015-2025 The Khronos Group Inc. // // SPDX-License-Identifier: Apache-2.0 OR MIT // @@ -8,8 +8,12 @@ #ifndef VULKAN_FUNCS_HPP #define VULKAN_FUNCS_HPP +// include-what-you-use: make sure, vulkan.hpp is used by code-completers +// IWYU pragma: private; include "vulkan.hpp" + namespace VULKAN_HPP_NAMESPACE { + //=========================== //=== COMMAND Definitions === //=========================== @@ -34,15 +38,18 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateInstance && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Instance instance; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &instance ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); + reinterpret_cast( &instance ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); - return createResultValueType( static_cast( result ), instance ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( instance ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -51,25 +58,28 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateInstance && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Instance instance; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &instance ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" ); + reinterpret_cast( &instance ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" ); - return createResultValueType( static_cast( result ), - UniqueHandle( instance, ObjectDestroy( allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( instance, detail::ObjectDestroy( allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyInstance( m_instance, reinterpret_cast( pAllocator ) ); + d.vkDestroyInstance( static_cast( m_instance ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -77,11 +87,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Instance::destroy( Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyInstance && "Function requires " ); +# endif d.vkDestroyInstance( m_instance, reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, @@ -89,73 +102,83 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); + return static_cast( + d.vkEnumeratePhysicalDevices( static_cast( m_instance ), pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDevices && "Function requires " ); +# endif std::vector physicalDevices; uint32_t physicalDeviceCount; - VkResult result; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ); - if ( ( result == VK_SUCCESS ) && physicalDeviceCount ) + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount ) { physicalDevices.resize( physicalDeviceCount ); - result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ); + result = static_cast( + d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); if ( physicalDeviceCount < physicalDevices.size() ) { physicalDevices.resize( physicalDeviceCount ); } - return createResultValueType( static_cast( result ), physicalDevices ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDevices ) ); } template ::value, int>::type> + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDevices && "Function requires " ); +# endif std::vector physicalDevices( physicalDeviceAllocator ); uint32_t physicalDeviceCount; - VkResult result; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ); - if ( ( result == VK_SUCCESS ) && physicalDeviceCount ) + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount ) { physicalDevices.resize( physicalDeviceCount ); - result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ); + result = static_cast( + d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); if ( physicalDeviceCount < physicalDevices.size() ) { physicalDevices.resize( physicalDeviceCount ); } - return createResultValueType( static_cast( result ), physicalDevices ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDevices ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( pFeatures ) ); + d.vkGetPhysicalDeviceFeatures( static_cast( m_physicalDevice ), reinterpret_cast( pFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -164,13 +187,16 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( &features ) ); return features; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, @@ -178,7 +204,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + d.vkGetPhysicalDeviceFormatProperties( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( pFormatProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -187,13 +214,16 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); return formatProperties; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, @@ -205,7 +235,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( static_cast( m_physicalDevice ), static_cast( format ), static_cast( type ), static_cast( tiling ), @@ -225,27 +255,31 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; - VkResult result = d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, - static_cast( format ), - static_cast( type ), - static_cast( tiling ), - static_cast( usage ), - static_cast( flags ), - reinterpret_cast( &imageFormatProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); - return createResultValueType( static_cast( result ), imageFormatProperties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( pProperties ) ); + d.vkGetPhysicalDeviceProperties( static_cast( m_physicalDevice ), reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -254,13 +288,16 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( &properties ) ); return properties; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, @@ -269,15 +306,21 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceQueueFamilyProperties( - m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + static_cast( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename QueueFamilyPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties && "Function requires " ); +# endif std::vector queueFamilyProperties; uint32_t queueFamilyPropertyCount; @@ -294,14 +337,17 @@ namespace VULKAN_HPP_NAMESPACE return queueFamilyProperties; } - template ::value, int>::type> + template < + typename QueueFamilyPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties && "Function requires " ); +# endif std::vector queueFamilyProperties( queueFamilyPropertiesAllocator ); uint32_t queueFamilyPropertyCount; @@ -317,14 +363,15 @@ namespace VULKAN_HPP_NAMESPACE } return queueFamilyProperties; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + d.vkGetPhysicalDeviceMemoryProperties( static_cast( m_physicalDevice ), + reinterpret_cast( pMemoryProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -333,51 +380,60 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); return memoryProperties; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetInstanceProcAddr( m_instance, pName ); + return d.vkGetInstanceProcAddr( static_cast( m_instance ), pName ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PFN_VoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetInstanceProcAddr && "Function requires " ); +# endif PFN_vkVoidFunction result = d.vkGetInstanceProcAddr( m_instance, name.c_str() ); return result; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetDeviceProcAddr( m_device, pName ); + return d.vkGetDeviceProcAddr( static_cast( m_device ), pName ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PFN_VoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceProcAddr && "Function requires " ); +# endif PFN_vkVoidFunction result = d.vkGetDeviceProcAddr( m_device, name.c_str() ); return result; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, @@ -386,7 +442,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDevice( m_physicalDevice, + return static_cast( d.vkCreateDevice( static_cast( m_physicalDevice ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDevice ) ) ); @@ -398,16 +454,19 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDevice && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Device device; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &device ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); + reinterpret_cast( &device ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); - return createResultValueType( static_cast( result ), device ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( device ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -418,26 +477,29 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDevice && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Device device; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &device ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" ); + reinterpret_cast( &device ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" ); - return createResultValueType( static_cast( result ), - UniqueHandle( device, ObjectDestroy( allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( device, detail::ObjectDestroy( allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDevice( m_device, reinterpret_cast( pAllocator ) ); + d.vkDestroyDevice( static_cast( m_device ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -445,11 +507,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Device::destroy( Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDevice && "Function requires " ); +# endif d.vkDestroyDevice( m_device, reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char * pLayerName, @@ -463,67 +528,78 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename ExtensionPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Dispatch const & d ) { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateInstanceExtensionProperties && "Function requires " ); +# endif std::vector properties; uint32_t propertyCount; - VkResult result; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = d.vkEnumerateInstanceExtensionProperties( - layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ); + result = static_cast( d.vkEnumerateInstanceExtensionProperties( + layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( static_cast( result ), properties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } - template ::value, int>::type> + template < + typename ExtensionPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d ) { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateInstanceExtensionProperties && "Function requires " ); +# endif std::vector properties( extensionPropertiesAllocator ); uint32_t propertyCount; - VkResult result; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) + result = static_cast( + d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = d.vkEnumerateInstanceExtensionProperties( - layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ); + result = static_cast( d.vkEnumerateInstanceExtensionProperties( + layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( static_cast( result ), properties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName, @@ -532,72 +608,83 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkEnumerateDeviceExtensionProperties( + static_cast( m_physicalDevice ), pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename ExtensionPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateDeviceExtensionProperties && "Function requires " ); +# endif std::vector properties; uint32_t propertyCount; - VkResult result; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) + result = static_cast( + d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = d.vkEnumerateDeviceExtensionProperties( - m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ); + result = static_cast( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( static_cast( result ), properties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } - template ::value, int>::type> + template < + typename ExtensionPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateDeviceExtensionProperties && "Function requires " ); +# endif std::vector properties( extensionPropertiesAllocator ); uint32_t propertyCount; - VkResult result; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) + result = static_cast( + d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = d.vkEnumerateDeviceExtensionProperties( - m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ); + result = static_cast( d.vkEnumerateDeviceExtensionProperties( + m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( static_cast( result ), properties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, @@ -609,63 +696,72 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties( Dispatch const & d ) { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateInstanceLayerProperties && "Function requires " ); +# endif std::vector properties; uint32_t propertyCount; - VkResult result; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ); + result = static_cast( + d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( static_cast( result ), properties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template ::value, int>::type> + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateInstanceLayerProperties && "Function requires " ); +# endif std::vector properties( layerPropertiesAllocator ); uint32_t propertyCount; - VkResult result; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ); + result = static_cast( + d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( static_cast( result ), properties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount, @@ -673,74 +769,84 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkEnumerateDeviceLayerProperties( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateDeviceLayerProperties && "Function requires " ); +# endif std::vector properties; uint32_t propertyCount; - VkResult result; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + result = static_cast( + d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( static_cast( result ), properties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template ::value, int>::type> + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateDeviceLayerProperties && "Function requires " ); +# endif std::vector properties( layerPropertiesAllocator ); uint32_t propertyCount; - VkResult result; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) { properties.resize( propertyCount ); - result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + result = static_cast( + d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( static_cast( result ), properties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); + d.vkGetDeviceQueue( static_cast( m_device ), queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -749,13 +855,16 @@ namespace VULKAN_HPP_NAMESPACE Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceQueue && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Queue queue; d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( &queue ) ); return queue; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, @@ -764,7 +873,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + return static_cast( + d.vkQueueSubmit( static_cast( m_queue ), submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -773,31 +883,38 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueSubmit && "Function requires " ); +# endif - VkResult result = d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkQueueWaitIdle( m_queue ) ); + return static_cast( d.vkQueueWaitIdle( static_cast( m_queue ) ) ); } #else template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::waitIdle( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueWaitIdle && "Function requires " ); +# endif - VkResult result = d.vkQueueWaitIdle( m_queue ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkQueueWaitIdle( m_queue ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -806,18 +923,21 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkDeviceWaitIdle( m_device ) ); + return static_cast( d.vkDeviceWaitIdle( static_cast( m_device ) ) ); } #else template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::waitIdle( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDeviceWaitIdle && "Function requires " ); +# endif - VkResult result = d.vkDeviceWaitIdle( m_device ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkDeviceWaitIdle( m_device ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -828,7 +948,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAllocateMemory( m_device, + return static_cast( d.vkAllocateMemory( static_cast( m_device ), reinterpret_cast( pAllocateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMemory ) ) ); @@ -842,16 +962,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateMemory && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::DeviceMemory memory; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &memory ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); + reinterpret_cast( &memory ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); - return createResultValueType( static_cast( result ), memory ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memory ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -862,20 +985,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateMemory && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::DeviceMemory memory; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &memory ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" ); + reinterpret_cast( &memory ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" ); - return createResultValueType( static_cast( result ), - UniqueHandle( memory, ObjectFree( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( memory, detail::ObjectFree( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, @@ -883,7 +1009,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + d.vkFreeMemory( static_cast( m_device ), static_cast( memory ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -893,12 +1019,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeMemory && "Function requires " ); +# endif d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, @@ -906,7 +1035,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + d.vkFreeMemory( static_cast( m_device ), static_cast( memory ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -916,12 +1045,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeMemory && "Function requires " ); +# endif d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, @@ -932,7 +1064,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkMapMemory( m_device, + return static_cast( d.vkMapMemory( static_cast( m_device ), static_cast( memory ), static_cast( offset ), static_cast( size ), @@ -949,25 +1081,28 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkMapMemory && "Function requires " ); +# endif - void * pData; - VkResult result = d.vkMapMemory( m_device, - static_cast( memory ), - static_cast( offset ), - static_cast( size ), - static_cast( flags ), - &pData ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); + void * pData; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkMapMemory( m_device, + static_cast( memory ), + static_cast( offset ), + static_cast( size ), + static_cast( flags ), + &pData ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); - return createResultValueType( static_cast( result ), pData ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUnmapMemory( m_device, static_cast( memory ) ); + d.vkUnmapMemory( static_cast( m_device ), static_cast( memory ) ); } template @@ -976,7 +1111,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + return static_cast( + d.vkFlushMappedMemoryRanges( static_cast( m_device ), memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -986,13 +1122,17 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFlushMappedMemoryRanges && "Function requires " ); +# endif - VkResult result = d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, @@ -1001,7 +1141,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + d.vkInvalidateMappedMemoryRanges( static_cast( m_device ), memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1011,13 +1151,17 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkInvalidateMappedMemoryRanges && "Function requires " ); +# endif - VkResult result = d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, @@ -1025,7 +1169,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), reinterpret_cast( pCommittedMemoryInBytes ) ); + d.vkGetDeviceMemoryCommitment( + static_cast( m_device ), static_cast( memory ), reinterpret_cast( pCommittedMemoryInBytes ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1034,13 +1179,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryCommitment && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), reinterpret_cast( &committedMemoryInBytes ) ); return committedMemoryInBytes; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template @@ -1050,8 +1198,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); + return static_cast( d.vkBindBufferMemory( + static_cast( m_device ), static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); } #else template @@ -1059,12 +1207,15 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindBufferMemory && "Function requires " ); +# endif - VkResult result = - d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -1076,8 +1227,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); + return static_cast( d.vkBindImageMemory( + static_cast( m_device ), static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); } #else template @@ -1085,12 +1236,15 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindImageMemory && "Function requires " ); +# endif - VkResult result = - d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -1100,7 +1254,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetBufferMemoryRequirements( + static_cast( m_device ), static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1109,13 +1264,16 @@ namespace VULKAN_HPP_NAMESPACE Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( &memoryRequirements ) ); return memoryRequirements; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, @@ -1123,7 +1281,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetImageMemoryRequirements( + static_cast( m_device ), static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1132,13 +1291,16 @@ namespace VULKAN_HPP_NAMESPACE Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( &memoryRequirements ) ); return memoryRequirements; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, @@ -1147,18 +1309,25 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSparseMemoryRequirements( m_device, + d.vkGetImageSparseMemoryRequirements( static_cast( m_device ), static_cast( image ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements && "Function requires " ); +# endif std::vector sparseMemoryRequirements; uint32_t sparseMemoryRequirementCount; @@ -1179,14 +1348,18 @@ namespace VULKAN_HPP_NAMESPACE template ::value, int>::type> + typename std::enable_if< + std::is_same::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements && "Function requires " ); +# endif std::vector sparseMemoryRequirements( sparseImageMemoryRequirementsAllocator ); @@ -1205,7 +1378,7 @@ namespace VULKAN_HPP_NAMESPACE } return sparseMemoryRequirements; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, @@ -1218,7 +1391,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + d.vkGetPhysicalDeviceSparseImageFormatProperties( static_cast( m_physicalDevice ), static_cast( format ), static_cast( type ), static_cast( samples ), @@ -1229,7 +1402,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename SparseImageFormatPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, @@ -1239,6 +1416,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties && + "Function requires " ); +# endif std::vector properties; uint32_t propertyCount; @@ -1268,10 +1449,11 @@ namespace VULKAN_HPP_NAMESPACE return properties; } - template ::value, int>::type> + template < + typename SparseImageFormatPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, @@ -1282,6 +1464,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties && + "Function requires " ); +# endif std::vector properties( sparseImageFormatPropertiesAllocator ); uint32_t propertyCount; @@ -1310,7 +1496,7 @@ namespace VULKAN_HPP_NAMESPACE } return properties; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, @@ -1319,8 +1505,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast( pBindInfo ), static_cast( fence ) ) ); + return static_cast( d.vkQueueBindSparse( + static_cast( m_queue ), bindInfoCount, reinterpret_cast( pBindInfo ), static_cast( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1329,14 +1515,17 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueBindSparse && "Function requires " ); +# endif - VkResult result = - d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast( bindInfo.data() ), static_cast( fence ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast( bindInfo.data() ), static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, @@ -1345,7 +1534,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateFence( m_device, + return static_cast( d.vkCreateFence( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); @@ -1357,16 +1546,19 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateFence && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::Fence fence; - VkResult result = + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateFence( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &fence ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); + reinterpret_cast( &fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); - return createResultValueType( static_cast( result ), fence ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -1375,20 +1567,23 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateFence && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::Fence fence; - VkResult result = + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateFence( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &fence ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" ); + reinterpret_cast( &fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" ); - return createResultValueType( static_cast( result ), - UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, @@ -1396,7 +1591,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + d.vkDestroyFence( static_cast( m_device ), static_cast( fence ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1406,12 +1601,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyFence && "Function requires " ); +# endif d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, @@ -1419,7 +1617,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + d.vkDestroyFence( static_cast( m_device ), static_cast( fence ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1429,12 +1627,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyFence && "Function requires " ); +# endif d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, @@ -1442,7 +1643,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkResetFences( m_device, fenceCount, reinterpret_cast( pFences ) ) ); + return static_cast( d.vkResetFences( static_cast( m_device ), fenceCount, reinterpret_cast( pFences ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1451,31 +1652,37 @@ namespace VULKAN_HPP_NAMESPACE Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkResetFences && "Function requires " ); +# endif - VkResult result = d.vkResetFences( m_device, fences.size(), reinterpret_cast( fences.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkResetFences( m_device, fences.size(), reinterpret_cast( fences.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); + return static_cast( d.vkGetFenceStatus( static_cast( m_device ), static_cast( fence ) ) ); } #else template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetFenceStatus && "Function requires " ); +# endif - VkResult result = d.vkGetFenceStatus( m_device, static_cast( fence ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); return static_cast( result ); } @@ -1489,8 +1696,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkWaitForFences( m_device, fenceCount, reinterpret_cast( pFences ), static_cast( waitAll ), timeout ) ); + return static_cast( d.vkWaitForFences( + static_cast( m_device ), fenceCount, reinterpret_cast( pFences ), static_cast( waitAll ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1502,16 +1709,18 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWaitForFences && "Function requires " ); +# endif - VkResult result = - d.vkWaitForFences( m_device, fences.size(), reinterpret_cast( fences.data() ), static_cast( waitAll ), timeout ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkWaitForFences( m_device, fences.size(), reinterpret_cast( fences.data() ), static_cast( waitAll ), timeout ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); return static_cast( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, @@ -1520,7 +1729,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateSemaphore( m_device, + return static_cast( d.vkCreateSemaphore( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSemaphore ) ) ); @@ -1534,16 +1743,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSemaphore && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Semaphore semaphore; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &semaphore ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); + reinterpret_cast( &semaphore ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); - return createResultValueType( static_cast( result ), semaphore ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( semaphore ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -1554,21 +1766,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSemaphore && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Semaphore semaphore; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &semaphore ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" ); + reinterpret_cast( &semaphore ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( semaphore, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( semaphore, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, @@ -1576,7 +1790,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + d.vkDestroySemaphore( + static_cast( m_device ), static_cast( semaphore ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1586,12 +1801,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySemaphore && "Function requires " ); +# endif d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, @@ -1599,7 +1817,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + d.vkDestroySemaphore( + static_cast( m_device ), static_cast( semaphore ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1609,12 +1828,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySemaphore && "Function requires " ); +# endif d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, @@ -1623,7 +1845,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateEvent( m_device, + return static_cast( d.vkCreateEvent( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pEvent ) ) ); @@ -1635,16 +1857,19 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateEvent && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::Event event; - VkResult result = + VULKAN_HPP_NAMESPACE::Event event; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateEvent( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &event ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); + reinterpret_cast( &event ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); - return createResultValueType( static_cast( result ), event ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( event ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -1653,20 +1878,23 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateEvent && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::Event event; - VkResult result = + VULKAN_HPP_NAMESPACE::Event event; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateEvent( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &event ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" ); + reinterpret_cast( &event ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" ); - return createResultValueType( static_cast( result ), - UniqueHandle( event, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( event, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, @@ -1674,7 +1902,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + d.vkDestroyEvent( static_cast( m_device ), static_cast( event ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1684,12 +1912,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyEvent && "Function requires " ); +# endif d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, @@ -1697,7 +1928,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + d.vkDestroyEvent( static_cast( m_device ), static_cast( event ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1707,30 +1938,35 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyEvent && "Function requires " ); +# endif d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); + return static_cast( d.vkGetEventStatus( static_cast( m_device ), static_cast( event ) ) ); } #else template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetEventStatus && "Function requires " ); +# endif - VkResult result = d.vkGetEventStatus( m_device, static_cast( event ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", - { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); return static_cast( result ); } @@ -1741,7 +1977,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); + return static_cast( d.vkSetEvent( static_cast( m_device ), static_cast( event ) ) ); } #else template @@ -1749,11 +1985,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetEvent && "Function requires " ); +# endif - VkResult result = d.vkSetEvent( m_device, static_cast( event ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -1762,18 +2001,21 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); + return static_cast( d.vkResetEvent( static_cast( m_device ), static_cast( event ) ) ); } #else template VULKAN_HPP_INLINE typename ResultValueType::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkResetEvent && "Function requires " ); +# endif - VkResult result = d.vkResetEvent( m_device, static_cast( event ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -1784,7 +2026,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateQueryPool( m_device, + return static_cast( d.vkCreateQueryPool( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pQueryPool ) ) ); @@ -1798,16 +2040,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateQueryPool && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::QueryPool queryPool; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &queryPool ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); + reinterpret_cast( &queryPool ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); - return createResultValueType( static_cast( result ), queryPool ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( queryPool ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -1818,21 +2063,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateQueryPool && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::QueryPool queryPool; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &queryPool ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" ); + reinterpret_cast( &queryPool ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( queryPool, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( queryPool, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, @@ -1840,7 +2087,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyQueryPool( + static_cast( m_device ), static_cast( queryPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1850,12 +2098,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyQueryPool && "Function requires " ); +# endif d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, @@ -1863,7 +2114,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyQueryPool( + static_cast( m_device ), static_cast( queryPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1873,12 +2125,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyQueryPool && "Function requires " ); +# endif d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, @@ -1891,7 +2146,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetQueryPoolResults( m_device, + return static_cast( d.vkGetQueryPoolResults( static_cast( m_device ), static_cast( queryPool ), firstQuery, queryCount, @@ -1902,7 +2157,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, @@ -1913,22 +2171,25 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueryPoolResults && "Function requires " ); +# endif VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); std::vector data( dataSize / sizeof( DataType ) ); - VkResult result = d.vkGetQueryPoolResults( m_device, - static_cast( queryPool ), - firstQuery, - queryCount, - data.size() * sizeof( DataType ), - reinterpret_cast( data.data() ), - static_cast( stride ), - static_cast( flags ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + static_cast( stride ), + static_cast( flags ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); - return ResultValue>( static_cast( result ), data ); + return ResultValue>( result, std::move( data ) ); } template @@ -1940,23 +2201,25 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueryPoolResults && "Function requires " ); +# endif - DataType data; - VkResult result = d.vkGetQueryPoolResults( m_device, - static_cast( queryPool ), - firstQuery, - queryCount, - sizeof( DataType ), - reinterpret_cast( &data ), - static_cast( stride ), - static_cast( flags ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetQueryPoolResults( m_device, + static_cast( queryPool ), + firstQuery, + queryCount, + sizeof( DataType ), + reinterpret_cast( &data ), + static_cast( stride ), + static_cast( flags ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); - return ResultValue( static_cast( result ), data ); + return ResultValue( result, std::move( data ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, @@ -1965,7 +2228,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateBuffer( m_device, + return static_cast( d.vkCreateBuffer( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pBuffer ) ) ); @@ -1977,16 +2240,19 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBuffer && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Buffer buffer; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &buffer ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); + reinterpret_cast( &buffer ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); - return createResultValueType( static_cast( result ), buffer ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( buffer ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -1995,20 +2261,23 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBuffer && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Buffer buffer; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &buffer ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" ); + reinterpret_cast( &buffer ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" ); - return createResultValueType( static_cast( result ), - UniqueHandle( buffer, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( buffer, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, @@ -2016,7 +2285,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + d.vkDestroyBuffer( static_cast( m_device ), static_cast( buffer ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2026,12 +2295,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBuffer && "Function requires " ); +# endif d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, @@ -2039,7 +2311,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + d.vkDestroyBuffer( static_cast( m_device ), static_cast( buffer ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2049,12 +2321,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBuffer && "Function requires " ); +# endif d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, @@ -2063,7 +2338,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateBufferView( m_device, + return static_cast( d.vkCreateBufferView( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); @@ -2077,16 +2352,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBufferView && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::BufferView view; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &view ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); + reinterpret_cast( &view ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); - return createResultValueType( static_cast( result ), view ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( view ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2097,20 +2375,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBufferView && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::BufferView view; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &view ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" ); + reinterpret_cast( &view ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" ); - return createResultValueType( static_cast( result ), - UniqueHandle( view, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( view, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, @@ -2118,7 +2399,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + d.vkDestroyBufferView( + static_cast( m_device ), static_cast( bufferView ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2128,12 +2410,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBufferView && "Function requires " ); +# endif d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, @@ -2141,7 +2426,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + d.vkDestroyBufferView( + static_cast( m_device ), static_cast( bufferView ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2151,12 +2437,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBufferView && "Function requires " ); +# endif d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, @@ -2165,7 +2454,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateImage( m_device, + return static_cast( d.vkCreateImage( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pImage ) ) ); @@ -2177,16 +2466,19 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImage && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::Image image; - VkResult result = + VULKAN_HPP_NAMESPACE::Image image; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateImage( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &image ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); + reinterpret_cast( &image ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); - return createResultValueType( static_cast( result ), image ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( image ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2195,20 +2487,23 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImage && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::Image image; - VkResult result = + VULKAN_HPP_NAMESPACE::Image image; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateImage( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &image ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" ); + reinterpret_cast( &image ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" ); - return createResultValueType( static_cast( result ), - UniqueHandle( image, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( image, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, @@ -2216,7 +2511,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + d.vkDestroyImage( static_cast( m_device ), static_cast( image ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2226,12 +2521,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyImage && "Function requires " ); +# endif d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, @@ -2239,7 +2537,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + d.vkDestroyImage( static_cast( m_device ), static_cast( image ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2249,12 +2547,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyImage && "Function requires " ); +# endif d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, @@ -2263,7 +2564,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSubresourceLayout( m_device, + d.vkGetImageSubresourceLayout( static_cast( m_device ), static_cast( image ), reinterpret_cast( pSubresource ), reinterpret_cast( pLayout ) ); @@ -2275,6 +2576,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSubresourceLayout && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::SubresourceLayout layout; d.vkGetImageSubresourceLayout( m_device, @@ -2284,7 +2588,7 @@ namespace VULKAN_HPP_NAMESPACE return layout; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, @@ -2293,7 +2597,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateImageView( m_device, + return static_cast( d.vkCreateImageView( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); @@ -2307,16 +2611,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImageView && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::ImageView view; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateImageView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &view ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); + reinterpret_cast( &view ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); - return createResultValueType( static_cast( result ), view ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( view ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2327,20 +2634,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImageView && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::ImageView view; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateImageView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &view ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" ); + reinterpret_cast( &view ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" ); - return createResultValueType( static_cast( result ), - UniqueHandle( view, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( view, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, @@ -2348,7 +2658,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + d.vkDestroyImageView( + static_cast( m_device ), static_cast( imageView ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2358,12 +2669,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyImageView && "Function requires " ); +# endif d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, @@ -2371,7 +2685,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + d.vkDestroyImageView( + static_cast( m_device ), static_cast( imageView ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2381,12 +2696,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyImageView && "Function requires " ); +# endif d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, @@ -2395,7 +2713,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateShaderModule( m_device, + return static_cast( d.vkCreateShaderModule( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pShaderModule ) ) ); @@ -2409,16 +2727,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShaderModule && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &shaderModule ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); + reinterpret_cast( &shaderModule ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); - return createResultValueType( static_cast( result ), shaderModule ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( shaderModule ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2429,21 +2750,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShaderModule && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &shaderModule ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" ); + reinterpret_cast( &shaderModule ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( shaderModule, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( shaderModule, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, @@ -2451,7 +2774,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + d.vkDestroyShaderModule( + static_cast( m_device ), static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2461,12 +2785,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyShaderModule && "Function requires " ); +# endif d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, @@ -2474,7 +2801,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + d.vkDestroyShaderModule( + static_cast( m_device ), static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2484,12 +2812,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyShaderModule && "Function requires " ); +# endif d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, @@ -2498,7 +2829,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreatePipelineCache( m_device, + return static_cast( d.vkCreatePipelineCache( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineCache ) ) ); @@ -2512,16 +2843,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineCache && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &pipelineCache ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); + reinterpret_cast( &pipelineCache ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); - return createResultValueType( static_cast( result ), pipelineCache ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineCache ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2532,21 +2866,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineCache && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &pipelineCache ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" ); + reinterpret_cast( &pipelineCache ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( pipelineCache, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( pipelineCache, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, @@ -2554,7 +2890,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineCache( + static_cast( m_device ), static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2564,12 +2901,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineCache && "Function requires " ); +# endif d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, @@ -2577,7 +2917,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineCache( + static_cast( m_device ), static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2587,12 +2928,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineCache && "Function requires " ); +# endif d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, @@ -2601,67 +2945,79 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), pDataSize, pData ) ); + return static_cast( + d.vkGetPipelineCacheData( static_cast( m_device ), static_cast( pipelineCache ), pDataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineCacheData && "Function requires " ); +# endif std::vector data; size_t dataSize; - VkResult result; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ); - if ( ( result == VK_SUCCESS ) && dataSize ) + result = + static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) { data.resize( dataSize ); - result = d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ); + result = static_cast( + d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); VULKAN_HPP_ASSERT( dataSize <= data.size() ); if ( dataSize < data.size() ) { data.resize( dataSize ); } - return createResultValueType( static_cast( result ), data ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template ::value, int>::type> + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineCacheData && "Function requires " ); +# endif std::vector data( uint8_tAllocator ); size_t dataSize; - VkResult result; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ); - if ( ( result == VK_SUCCESS ) && dataSize ) + result = + static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) { data.resize( dataSize ); - result = d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ); + result = static_cast( + d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); VULKAN_HPP_ASSERT( dataSize <= data.size() ); if ( dataSize < data.size() ) { data.resize( dataSize ); } - return createResultValueType( static_cast( result ), data ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, @@ -2670,8 +3026,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); + return static_cast( d.vkMergePipelineCaches( + static_cast( m_device ), static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2682,14 +3038,17 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkMergePipelineCaches && "Function requires " ); +# endif - VkResult result = d.vkMergePipelineCaches( - m_device, static_cast( dstCache ), srcCaches.size(), reinterpret_cast( srcCaches.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkMergePipelineCaches( + m_device, static_cast( dstCache ), srcCaches.size(), reinterpret_cast( srcCaches.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, @@ -2700,7 +3059,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateGraphicsPipelines( m_device, + return static_cast( d.vkCreateGraphicsPipelines( static_cast( m_device ), static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), @@ -2709,7 +3068,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -2717,26 +3078,28 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function requires " ); +# endif std::vector pipelines( createInfos.size() ); - VkResult result = d.vkCreateGraphicsPipelines( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size(), reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( pipelines.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - return ResultValue>( static_cast( result ), pipelines ); + return ResultValue>( result, std::move( pipelines ) ); } template ::value, int>::type> + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -2745,20 +3108,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function requires " ); +# endif std::vector pipelines( createInfos.size(), pipelineAllocator ); - VkResult result = d.vkCreateGraphicsPipelines( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size(), reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( pipelines.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - return ResultValue>( static_cast( result ), pipelines ); + return ResultValue>( result, std::move( pipelines ) ); } template @@ -2769,24 +3135,30 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Pipeline pipeline; - VkResult result = d.vkCreateGraphicsPipelines( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), 1, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &pipeline ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - return ResultValue( static_cast( result ), pipeline ); + return ResultValue( result, std::move( pipeline ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -2794,33 +3166,35 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function requires " ); +# endif std::vector pipelines( createInfos.size() ); - VkResult result = d.vkCreateGraphicsPipelines( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size(), reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( pipelines.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); } - return ResultValue, PipelineAllocator>>( - static_cast( result ), std::move( uniquePipelines ) ); + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); } - template >::value, int>::type> + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -2829,27 +3203,29 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function requires " ); +# endif std::vector pipelines( createInfos.size() ); - VkResult result = d.vkCreateGraphicsPipelines( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size(), reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( pipelines.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); } - return ResultValue, PipelineAllocator>>( - static_cast( result ), std::move( uniquePipelines ) ); + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); } template @@ -2860,25 +3236,27 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateGraphicsPipelines && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Pipeline pipeline; - VkResult result = d.vkCreateGraphicsPipelines( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), 1, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &pipeline ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( - static_cast( result ), - UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, @@ -2889,7 +3267,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateComputePipelines( m_device, + return static_cast( d.vkCreateComputePipelines( static_cast( m_device ), static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), @@ -2898,7 +3276,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -2906,26 +3286,28 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function requires " ); +# endif std::vector pipelines( createInfos.size() ); - VkResult result = d.vkCreateComputePipelines( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size(), reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( pipelines.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - return ResultValue>( static_cast( result ), pipelines ); + return ResultValue>( result, std::move( pipelines ) ); } template ::value, int>::type> + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -2934,20 +3316,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function requires " ); +# endif std::vector pipelines( createInfos.size(), pipelineAllocator ); - VkResult result = d.vkCreateComputePipelines( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size(), reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( pipelines.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - return ResultValue>( static_cast( result ), pipelines ); + return ResultValue>( result, std::move( pipelines ) ); } template @@ -2958,24 +3343,30 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Pipeline pipeline; - VkResult result = d.vkCreateComputePipelines( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), 1, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &pipeline ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - return ResultValue( static_cast( result ), pipeline ); + return ResultValue( result, std::move( pipeline ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -2983,33 +3374,35 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function requires " ); +# endif std::vector pipelines( createInfos.size() ); - VkResult result = d.vkCreateComputePipelines( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size(), reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( pipelines.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); } - return ResultValue, PipelineAllocator>>( - static_cast( result ), std::move( uniquePipelines ) ); + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); } - template >::value, int>::type> + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -3018,27 +3411,29 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function requires " ); +# endif std::vector pipelines( createInfos.size() ); - VkResult result = d.vkCreateComputePipelines( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size(), reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( pipelines.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); } - return ResultValue, PipelineAllocator>>( - static_cast( result ), std::move( uniquePipelines ) ); + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); } template @@ -3049,25 +3444,27 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateComputePipelines && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Pipeline pipeline; - VkResult result = d.vkCreateComputePipelines( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), 1, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &pipeline ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( - static_cast( result ), - UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, @@ -3075,7 +3472,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipeline( + static_cast( m_device ), static_cast( pipeline ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3085,12 +3483,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipeline && "Function requires " ); +# endif d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, @@ -3098,7 +3499,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipeline( + static_cast( m_device ), static_cast( pipeline ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3108,12 +3510,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipeline && "Function requires " ); +# endif d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, @@ -3122,7 +3527,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreatePipelineLayout( m_device, + return static_cast( d.vkCreatePipelineLayout( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineLayout ) ) ); @@ -3136,16 +3541,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineLayout && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &pipelineLayout ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); + reinterpret_cast( &pipelineLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); - return createResultValueType( static_cast( result ), pipelineLayout ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineLayout ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -3156,21 +3564,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineLayout && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &pipelineLayout ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" ); + reinterpret_cast( &pipelineLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( pipelineLayout, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( pipelineLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, @@ -3178,7 +3588,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineLayout( + static_cast( m_device ), static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3188,12 +3599,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineLayout && "Function requires " ); +# endif d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, @@ -3201,7 +3615,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineLayout( + static_cast( m_device ), static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3211,12 +3626,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineLayout && "Function requires " ); +# endif d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, @@ -3225,7 +3643,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateSampler( m_device, + return static_cast( d.vkCreateSampler( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSampler ) ) ); @@ -3237,16 +3655,19 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSampler && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Sampler sampler; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSampler( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &sampler ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); + reinterpret_cast( &sampler ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); - return createResultValueType( static_cast( result ), sampler ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( sampler ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -3255,20 +3676,23 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional allocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSampler && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Sampler sampler; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSampler( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &sampler ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" ); + reinterpret_cast( &sampler ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" ); - return createResultValueType( static_cast( result ), - UniqueHandle( sampler, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( sampler, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, @@ -3276,7 +3700,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + d.vkDestroySampler( static_cast( m_device ), static_cast( sampler ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3286,12 +3710,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySampler && "Function requires " ); +# endif d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, @@ -3299,7 +3726,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + d.vkDestroySampler( static_cast( m_device ), static_cast( sampler ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3309,12 +3736,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySampler && "Function requires " ); +# endif d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, @@ -3323,7 +3753,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDescriptorSetLayout( m_device, + return static_cast( d.vkCreateDescriptorSetLayout( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSetLayout ) ) ); @@ -3337,16 +3767,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorSetLayout && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; - VkResult result = d.vkCreateDescriptorSetLayout( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &setLayout ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); + reinterpret_cast( &setLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); - return createResultValueType( static_cast( result ), setLayout ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( setLayout ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -3357,21 +3790,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorSetLayout && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; - VkResult result = d.vkCreateDescriptorSetLayout( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &setLayout ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" ); + reinterpret_cast( &setLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( setLayout, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( setLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, @@ -3379,8 +3814,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorSetLayout( - m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorSetLayout( static_cast( m_device ), + static_cast( descriptorSetLayout ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3390,13 +3826,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorSetLayout && "Function requires " ); +# endif d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, @@ -3404,8 +3843,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorSetLayout( - m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorSetLayout( static_cast( m_device ), + static_cast( descriptorSetLayout ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3415,13 +3855,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorSetLayout && "Function requires " ); +# endif d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, @@ -3430,7 +3873,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDescriptorPool( m_device, + return static_cast( d.vkCreateDescriptorPool( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorPool ) ) ); @@ -3444,16 +3887,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorPool && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &descriptorPool ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); + reinterpret_cast( &descriptorPool ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); - return createResultValueType( static_cast( result ), descriptorPool ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorPool ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -3464,21 +3910,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorPool && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &descriptorPool ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" ); + reinterpret_cast( &descriptorPool ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( descriptorPool, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( descriptorPool, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, @@ -3486,7 +3934,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorPool( + static_cast( m_device ), static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3496,12 +3945,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorPool && "Function requires " ); +# endif d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, @@ -3509,7 +3961,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorPool( + static_cast( m_device ), static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3519,12 +3972,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorPool && "Function requires " ); +# endif d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template @@ -3533,8 +3989,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); + return static_cast( d.vkResetDescriptorPool( + static_cast( m_device ), static_cast( descriptorPool ), static_cast( flags ) ) ); } #else template @@ -3543,6 +3999,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkResetDescriptorPool && "Function requires " ); +# endif d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ); } @@ -3554,70 +4013,86 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAllocateDescriptorSets( - m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pDescriptorSets ) ) ); + return static_cast( d.vkAllocateDescriptorSets( static_cast( m_device ), + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pDescriptorSets ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function requires " ); +# endif std::vector descriptorSets( allocateInfo.descriptorSetCount ); - VkResult result = d.vkAllocateDescriptorSets( - m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateDescriptorSets( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); - return createResultValueType( static_cast( result ), descriptorSets ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorSets ) ); } template ::value, int>::type> + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function requires " ); +# endif std::vector descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator ); - VkResult result = d.vkAllocateDescriptorSets( - m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateDescriptorSets( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); - return createResultValueType( static_cast( result ), descriptorSets ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorSets ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename DescriptorSetAllocator, + typename std::enable_if>::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, DescriptorSetAllocator>>::type Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function requires " ); +# endif std::vector descriptorSets( allocateInfo.descriptorSetCount ); - VkResult result = d.vkAllocateDescriptorSets( - m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateDescriptorSets( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); std::vector, DescriptorSetAllocator> uniqueDescriptorSets; uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); - PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + detail::PoolFree deleter( *this, allocateInfo.descriptorPool, d ); for ( auto const & descriptorSet : descriptorSets ) { uniqueDescriptorSets.push_back( UniqueHandle( descriptorSet, deleter ) ); } - return createResultValueType( static_cast( result ), std::move( uniqueDescriptorSets ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); } - template >::value, int>::type> + template < + typename Dispatch, + typename DescriptorSetAllocator, + typename std::enable_if>::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, DescriptorSetAllocator>>::type Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, @@ -3625,22 +4100,25 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateDescriptorSets && "Function requires " ); +# endif std::vector descriptorSets( allocateInfo.descriptorSetCount ); - VkResult result = d.vkAllocateDescriptorSets( - m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateDescriptorSets( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); std::vector, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator ); uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); - PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + detail::PoolFree deleter( *this, allocateInfo.descriptorPool, d ); for ( auto const & descriptorSet : descriptorSets ) { uniqueDescriptorSets.push_back( UniqueHandle( descriptorSet, deleter ) ); } - return createResultValueType( static_cast( result ), std::move( uniqueDescriptorSets ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, @@ -3649,8 +4127,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkFreeDescriptorSets( - m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); + return static_cast( d.vkFreeDescriptorSets( static_cast( m_device ), + static_cast( descriptorPool ), + descriptorSetCount, + reinterpret_cast( pDescriptorSets ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3660,11 +4140,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeDescriptorSets && "Function requires " ); +# endif d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSets.size(), reinterpret_cast( descriptorSets.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE Result( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, @@ -3673,8 +4156,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkFreeDescriptorSets( - m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); + return static_cast( d.vkFreeDescriptorSets( static_cast( m_device ), + static_cast( descriptorPool ), + descriptorSetCount, + reinterpret_cast( pDescriptorSets ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3684,11 +4169,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeDescriptorSets && "Function requires " ); +# endif d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSets.size(), reinterpret_cast( descriptorSets.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, @@ -3698,7 +4186,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUpdateDescriptorSets( m_device, + d.vkUpdateDescriptorSets( static_cast( m_device ), descriptorWriteCount, reinterpret_cast( pDescriptorWrites ), descriptorCopyCount, @@ -3713,6 +4201,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSets && "Function requires " ); +# endif d.vkUpdateDescriptorSets( m_device, descriptorWrites.size(), @@ -3720,7 +4211,7 @@ namespace VULKAN_HPP_NAMESPACE descriptorCopies.size(), reinterpret_cast( descriptorCopies.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, @@ -3729,7 +4220,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateFramebuffer( m_device, + return static_cast( d.vkCreateFramebuffer( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFramebuffer ) ) ); @@ -3743,16 +4234,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateFramebuffer && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &framebuffer ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); + reinterpret_cast( &framebuffer ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); - return createResultValueType( static_cast( result ), framebuffer ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( framebuffer ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -3763,21 +4257,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateFramebuffer && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &framebuffer ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" ); + reinterpret_cast( &framebuffer ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( framebuffer, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( framebuffer, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, @@ -3785,7 +4281,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + d.vkDestroyFramebuffer( + static_cast( m_device ), static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3795,12 +4292,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyFramebuffer && "Function requires " ); +# endif d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, @@ -3808,7 +4308,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + d.vkDestroyFramebuffer( + static_cast( m_device ), static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3818,12 +4319,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyFramebuffer && "Function requires " ); +# endif d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, @@ -3832,7 +4336,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateRenderPass( m_device, + return static_cast( d.vkCreateRenderPass( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); @@ -3846,16 +4350,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::RenderPass renderPass; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &renderPass ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); + reinterpret_cast( &renderPass ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); - return createResultValueType( static_cast( result ), renderPass ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -3866,21 +4373,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::RenderPass renderPass; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &renderPass ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" ); + reinterpret_cast( &renderPass ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( renderPass, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, @@ -3888,7 +4397,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + d.vkDestroyRenderPass( + static_cast( m_device ), static_cast( renderPass ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3898,12 +4408,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyRenderPass && "Function requires " ); +# endif d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, @@ -3911,7 +4424,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + d.vkDestroyRenderPass( + static_cast( m_device ), static_cast( renderPass ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3921,12 +4435,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyRenderPass && "Function requires " ); +# endif d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, @@ -3934,7 +4451,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( pGranularity ) ); + d.vkGetRenderAreaGranularity( static_cast( m_device ), static_cast( renderPass ), reinterpret_cast( pGranularity ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3943,13 +4460,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRenderAreaGranularity && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Extent2D granularity; d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( &granularity ) ); return granularity; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, @@ -3958,7 +4478,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateCommandPool( m_device, + return static_cast( d.vkCreateCommandPool( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pCommandPool ) ) ); @@ -3972,16 +4492,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCommandPool && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::CommandPool commandPool; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &commandPool ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); + reinterpret_cast( &commandPool ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); - return createResultValueType( static_cast( result ), commandPool ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandPool ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -3992,21 +4515,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCommandPool && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::CommandPool commandPool; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &commandPool ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" ); + reinterpret_cast( &commandPool ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( commandPool, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( commandPool, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, @@ -4014,7 +4539,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyCommandPool( + static_cast( m_device ), static_cast( commandPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4024,12 +4550,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCommandPool && "Function requires " ); +# endif d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, @@ -4037,7 +4566,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyCommandPool( + static_cast( m_device ), static_cast( commandPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4047,12 +4577,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCommandPool && "Function requires " ); +# endif d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template @@ -4061,7 +4594,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); + return static_cast( + d.vkResetCommandPool( static_cast( m_device ), static_cast( commandPool ), static_cast( flags ) ) ); } #else template @@ -4069,11 +4603,15 @@ namespace VULKAN_HPP_NAMESPACE Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkResetCommandPool && "Function requires " ); +# endif - VkResult result = d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -4083,70 +4621,86 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAllocateCommandBuffers( - m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pCommandBuffers ) ) ); + return static_cast( d.vkAllocateCommandBuffers( static_cast( m_device ), + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pCommandBuffers ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function requires " ); +# endif std::vector commandBuffers( allocateInfo.commandBufferCount ); - VkResult result = d.vkAllocateCommandBuffers( - m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateCommandBuffers( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); - return createResultValueType( static_cast( result ), commandBuffers ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandBuffers ) ); } template ::value, int>::type> + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function requires " ); +# endif std::vector commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator ); - VkResult result = d.vkAllocateCommandBuffers( - m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateCommandBuffers( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); - return createResultValueType( static_cast( result ), commandBuffers ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandBuffers ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename CommandBufferAllocator, + typename std::enable_if>::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, CommandBufferAllocator>>::type Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function requires " ); +# endif std::vector commandBuffers( allocateInfo.commandBufferCount ); - VkResult result = d.vkAllocateCommandBuffers( - m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateCommandBuffers( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); std::vector, CommandBufferAllocator> uniqueCommandBuffers; uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); - PoolFree deleter( *this, allocateInfo.commandPool, d ); + detail::PoolFree deleter( *this, allocateInfo.commandPool, d ); for ( auto const & commandBuffer : commandBuffers ) { uniqueCommandBuffers.push_back( UniqueHandle( commandBuffer, deleter ) ); } - return createResultValueType( static_cast( result ), std::move( uniqueCommandBuffers ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); } - template >::value, int>::type> + template < + typename Dispatch, + typename CommandBufferAllocator, + typename std::enable_if>::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, CommandBufferAllocator>>::type Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, @@ -4154,22 +4708,25 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAllocateCommandBuffers && "Function requires " ); +# endif std::vector commandBuffers( allocateInfo.commandBufferCount ); - VkResult result = d.vkAllocateCommandBuffers( - m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateCommandBuffers( + m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); std::vector, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator ); uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); - PoolFree deleter( *this, allocateInfo.commandPool, d ); + detail::PoolFree deleter( *this, allocateInfo.commandPool, d ); for ( auto const & commandBuffer : commandBuffers ) { uniqueCommandBuffers.push_back( UniqueHandle( commandBuffer, deleter ) ); } - return createResultValueType( static_cast( result ), std::move( uniqueCommandBuffers ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, @@ -4178,8 +4735,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkFreeCommandBuffers( - m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + d.vkFreeCommandBuffers( static_cast( m_device ), + static_cast( commandPool ), + commandBufferCount, + reinterpret_cast( pCommandBuffers ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4189,11 +4748,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeCommandBuffers && "Function requires " ); +# endif d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, @@ -4202,8 +4764,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkFreeCommandBuffers( - m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + d.vkFreeCommandBuffers( static_cast( m_device ), + static_cast( commandPool ), + commandBufferCount, + reinterpret_cast( pCommandBuffers ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4213,18 +4777,22 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkFreeCommandBuffers && "Function requires " ); +# endif d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( pBeginInfo ) ) ); + return static_cast( + d.vkBeginCommandBuffer( static_cast( m_commandBuffer ), reinterpret_cast( pBeginInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4233,31 +4801,38 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBeginCommandBuffer && "Function requires " ); +# endif - VkResult result = d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); + return static_cast( d.vkEndCommandBuffer( static_cast( m_commandBuffer ) ) ); } #else template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::end( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEndCommandBuffer && "Function requires " ); +# endif - VkResult result = d.vkEndCommandBuffer( m_commandBuffer ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -4267,18 +4842,22 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + return static_cast( d.vkResetCommandBuffer( static_cast( m_commandBuffer ), static_cast( flags ) ) ); } #else template VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkResetCommandBuffer && "Function requires " ); +# endif - VkResult result = d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -4288,7 +4867,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindPipeline( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + d.vkCmdBindPipeline( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); } template @@ -4298,7 +4878,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewports ) ); + d.vkCmdSetViewport( static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pViewports ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4308,10 +4888,13 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewport && "Function requires " ); +# endif d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast( viewports.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, @@ -4320,7 +4903,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast( pScissors ) ); + d.vkCmdSetScissor( static_cast( m_commandBuffer ), firstScissor, scissorCount, reinterpret_cast( pScissors ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4330,16 +4913,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetScissor && "Function requires " ); +# endif d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast( scissors.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); + d.vkCmdSetLineWidth( static_cast( m_commandBuffer ), lineWidth ); } template @@ -4347,21 +4933,21 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + d.vkCmdSetDepthBias( static_cast( m_commandBuffer ), depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); } template VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); + d.vkCmdSetBlendConstants( static_cast( m_commandBuffer ), blendConstants ); } template VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); + d.vkCmdSetDepthBounds( static_cast( m_commandBuffer ), minDepthBounds, maxDepthBounds ); } template @@ -4369,7 +4955,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); + d.vkCmdSetStencilCompareMask( static_cast( m_commandBuffer ), static_cast( faceMask ), compareMask ); } template @@ -4377,7 +4963,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); + d.vkCmdSetStencilWriteMask( static_cast( m_commandBuffer ), static_cast( faceMask ), writeMask ); } template @@ -4385,7 +4971,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); + d.vkCmdSetStencilReference( static_cast( m_commandBuffer ), static_cast( faceMask ), reference ); } template @@ -4399,7 +4985,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindDescriptorSets( m_commandBuffer, + d.vkCmdBindDescriptorSets( static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, @@ -4419,6 +5005,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets && "Function requires " ); +# endif d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), @@ -4429,7 +5018,7 @@ namespace VULKAN_HPP_NAMESPACE dynamicOffsets.size(), dynamicOffsets.data() ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, @@ -4438,7 +5027,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( indexType ) ); + d.vkCmdBindIndexBuffer( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( indexType ) ); } template @@ -4449,8 +5041,11 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindVertexBuffers( - m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ) ); + d.vkCmdBindVertexBuffers( static_cast( m_commandBuffer ), + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4461,6 +5056,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers && "Function requires " ); +# endif # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); # else @@ -4476,14 +5074,14 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( buffers.data() ), reinterpret_cast( offsets.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + d.vkCmdDraw( static_cast( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance ); } template @@ -4495,7 +5093,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + d.vkCmdDrawIndexed( static_cast( m_commandBuffer ), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); } template @@ -4506,7 +5104,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + d.vkCmdDrawIndirect( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); } template @@ -4517,7 +5116,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + d.vkCmdDrawIndexedIndirect( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); } template @@ -4525,7 +5125,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + d.vkCmdDispatch( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); } template @@ -4534,7 +5134,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); + d.vkCmdDispatchIndirect( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ) ); } template @@ -4545,7 +5145,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBuffer( m_commandBuffer, + d.vkCmdCopyBuffer( static_cast( m_commandBuffer ), static_cast( srcBuffer ), static_cast( dstBuffer ), regionCount, @@ -4560,6 +5160,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer && "Function requires " ); +# endif d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), @@ -4567,7 +5170,7 @@ namespace VULKAN_HPP_NAMESPACE regions.size(), reinterpret_cast( regions.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, @@ -4579,7 +5182,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImage( m_commandBuffer, + d.vkCmdCopyImage( static_cast( m_commandBuffer ), static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), @@ -4598,6 +5201,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImage && "Function requires " ); +# endif d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), @@ -4607,7 +5213,7 @@ namespace VULKAN_HPP_NAMESPACE regions.size(), reinterpret_cast( regions.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, @@ -4620,7 +5226,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBlitImage( m_commandBuffer, + d.vkCmdBlitImage( static_cast( m_commandBuffer ), static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), @@ -4641,6 +5247,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBlitImage && "Function requires " ); +# endif d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), @@ -4651,7 +5260,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( regions.data() ), static_cast( filter ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, @@ -4662,7 +5271,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBufferToImage( m_commandBuffer, + d.vkCmdCopyBufferToImage( static_cast( m_commandBuffer ), static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), @@ -4679,6 +5288,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage && "Function requires " ); +# endif d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), @@ -4687,7 +5299,7 @@ namespace VULKAN_HPP_NAMESPACE regions.size(), reinterpret_cast( regions.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, @@ -4698,7 +5310,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImageToBuffer( m_commandBuffer, + d.vkCmdCopyImageToBuffer( static_cast( m_commandBuffer ), static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), @@ -4715,6 +5327,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer && "Function requires " ); +# endif d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), @@ -4723,7 +5338,7 @@ namespace VULKAN_HPP_NAMESPACE regions.size(), reinterpret_cast( regions.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, @@ -4733,8 +5348,11 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdUpdateBuffer( - m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( dataSize ), pData ); + d.vkCmdUpdateBuffer( static_cast( m_commandBuffer ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( dataSize ), + pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4745,6 +5363,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdUpdateBuffer && "Function requires " ); +# endif d.vkCmdUpdateBuffer( m_commandBuffer, static_cast( dstBuffer ), @@ -4752,7 +5373,7 @@ namespace VULKAN_HPP_NAMESPACE data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, @@ -4762,7 +5383,11 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdFillBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( size ), data ); + d.vkCmdFillBuffer( static_cast( m_commandBuffer ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( size ), + data ); } template @@ -4774,7 +5399,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdClearColorImage( m_commandBuffer, + d.vkCmdClearColorImage( static_cast( m_commandBuffer ), static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pColor ), @@ -4791,6 +5416,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdClearColorImage && "Function requires " ); +# endif d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), @@ -4799,7 +5427,7 @@ namespace VULKAN_HPP_NAMESPACE ranges.size(), reinterpret_cast( ranges.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, @@ -4810,7 +5438,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdClearDepthStencilImage( m_commandBuffer, + d.vkCmdClearDepthStencilImage( static_cast( m_commandBuffer ), static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pDepthStencil ), @@ -4828,6 +5456,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdClearDepthStencilImage && "Function requires " ); +# endif d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), @@ -4836,7 +5467,7 @@ namespace VULKAN_HPP_NAMESPACE ranges.size(), reinterpret_cast( ranges.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, @@ -4846,7 +5477,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdClearAttachments( m_commandBuffer, + d.vkCmdClearAttachments( static_cast( m_commandBuffer ), attachmentCount, reinterpret_cast( pAttachments ), rectCount, @@ -4860,6 +5491,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdClearAttachments && "Function requires " ); +# endif d.vkCmdClearAttachments( m_commandBuffer, attachments.size(), @@ -4867,7 +5501,7 @@ namespace VULKAN_HPP_NAMESPACE rects.size(), reinterpret_cast( rects.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, @@ -4879,7 +5513,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResolveImage( m_commandBuffer, + d.vkCmdResolveImage( static_cast( m_commandBuffer ), static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), @@ -4898,6 +5532,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdResolveImage && "Function requires " ); +# endif d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), @@ -4907,7 +5544,7 @@ namespace VULKAN_HPP_NAMESPACE regions.size(), reinterpret_cast( regions.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, @@ -4915,7 +5552,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + d.vkCmdSetEvent( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); } template @@ -4924,7 +5561,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + d.vkCmdResetEvent( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); } template @@ -4941,7 +5578,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWaitEvents( m_commandBuffer, + d.vkCmdWaitEvents( static_cast( m_commandBuffer ), eventCount, reinterpret_cast( pEvents ), static_cast( srcStageMask ), @@ -4966,6 +5603,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWaitEvents && "Function requires " ); +# endif d.vkCmdWaitEvents( m_commandBuffer, events.size(), @@ -4979,7 +5619,7 @@ namespace VULKAN_HPP_NAMESPACE imageMemoryBarriers.size(), reinterpret_cast( imageMemoryBarriers.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, @@ -4994,7 +5634,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPipelineBarrier( m_commandBuffer, + d.vkCmdPipelineBarrier( static_cast( m_commandBuffer ), static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), @@ -5018,6 +5658,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier && "Function requires " ); +# endif d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), @@ -5030,7 +5673,7 @@ namespace VULKAN_HPP_NAMESPACE imageMemoryBarriers.size(), reinterpret_cast( imageMemoryBarriers.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, @@ -5039,14 +5682,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginQuery( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); + d.vkCmdBeginQuery( + static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ) ); } template VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); + d.vkCmdEndQuery( static_cast( m_commandBuffer ), static_cast( queryPool ), query ); } template @@ -5056,7 +5700,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); + d.vkCmdResetQueryPool( static_cast( m_commandBuffer ), static_cast( queryPool ), firstQuery, queryCount ); } template @@ -5066,7 +5710,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteTimestamp( m_commandBuffer, static_cast( pipelineStage ), static_cast( queryPool ), query ); + d.vkCmdWriteTimestamp( + static_cast( m_commandBuffer ), static_cast( pipelineStage ), static_cast( queryPool ), query ); } template @@ -5080,7 +5725,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyQueryPoolResults( m_commandBuffer, + d.vkCmdCopyQueryPoolResults( static_cast( m_commandBuffer ), static_cast( queryPool ), firstQuery, queryCount, @@ -5099,7 +5744,12 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, size, pValues ); + d.vkCmdPushConstants( static_cast( m_commandBuffer ), + static_cast( layout ), + static_cast( stageFlags ), + offset, + size, + pValues ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5111,6 +5761,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushConstants && "Function requires " ); +# endif d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), @@ -5119,7 +5772,7 @@ namespace VULKAN_HPP_NAMESPACE values.size() * sizeof( ValuesType ), reinterpret_cast( values.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, @@ -5127,7 +5780,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), static_cast( contents ) ); + d.vkCmdBeginRenderPass( static_cast( m_commandBuffer ), + reinterpret_cast( pRenderPassBegin ), + static_cast( contents ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5137,23 +5792,26 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass && "Function requires " ); +# endif d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( &renderPassBegin ), static_cast( contents ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); + d.vkCmdNextSubpass( static_cast( m_commandBuffer ), static_cast( contents ) ); } template VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndRenderPass( m_commandBuffer ); + d.vkCmdEndRenderPass( static_cast( m_commandBuffer ) ); } template @@ -5162,7 +5820,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + d.vkCmdExecuteCommands( static_cast( m_commandBuffer ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5171,10 +5829,13 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdExecuteCommands && "Function requires " ); +# endif d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_VERSION_1_1 === @@ -5190,14 +5851,17 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type enumerateInstanceVersion( Dispatch const & d ) { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumerateInstanceVersion && "Function requires " ); +# endif - uint32_t apiVersion; - VkResult result = d.vkEnumerateInstanceVersion( &apiVersion ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); + uint32_t apiVersion; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkEnumerateInstanceVersion( &apiVersion ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); - return createResultValueType( static_cast( result ), apiVersion ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( apiVersion ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, @@ -5205,7 +5869,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + return static_cast( + d.vkBindBufferMemory2( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5214,13 +5879,17 @@ namespace VULKAN_HPP_NAMESPACE Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindBufferMemory2 && "Function requires or " ); +# endif - VkResult result = d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, @@ -5228,7 +5897,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + return static_cast( + d.vkBindImageMemory2( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5237,13 +5907,17 @@ namespace VULKAN_HPP_NAMESPACE Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindImageMemory2 && "Function requires or " ); +# endif - VkResult result = d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, @@ -5254,7 +5928,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetDeviceGroupPeerMemoryFeatures( - m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); + static_cast( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5263,6 +5937,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPeerMemoryFeatures && + "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; d.vkGetDeviceGroupPeerMemoryFeatures( @@ -5270,13 +5948,13 @@ namespace VULKAN_HPP_NAMESPACE return peerMemoryFeatures; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); + d.vkCmdSetDeviceMask( static_cast( m_commandBuffer ), deviceMask ); } template @@ -5289,7 +5967,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + d.vkCmdDispatchBase( static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } template @@ -5299,73 +5977,87 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkEnumeratePhysicalDeviceGroups( - m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + return static_cast( d.vkEnumeratePhysicalDeviceGroups( static_cast( m_instance ), + pPhysicalDeviceGroupCount, + reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroups && + "Function requires or " ); +# endif std::vector physicalDeviceGroupProperties; uint32_t physicalDeviceGroupCount; - VkResult result; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ); - if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount ) + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) { physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = d.vkEnumeratePhysicalDeviceGroups( - m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( + m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) { physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); } - return createResultValueType( static_cast( result ), physicalDeviceGroupProperties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); } template ::value, int>::type> + typename std::enable_if< + std::is_same::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroups && + "Function requires or " ); +# endif std::vector physicalDeviceGroupProperties( physicalDeviceGroupPropertiesAllocator ); - uint32_t physicalDeviceGroupCount; - VkResult result; + uint32_t physicalDeviceGroupCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ); - if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount ) + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) { physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = d.vkEnumeratePhysicalDeviceGroups( - m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( + m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) { physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); } - return createResultValueType( static_cast( result ), physicalDeviceGroupProperties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, @@ -5373,8 +6065,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageMemoryRequirements2( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetImageMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5383,6 +6076,10 @@ namespace VULKAN_HPP_NAMESPACE Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2 && + "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; d.vkGetImageMemoryRequirements2( @@ -5392,19 +6089,23 @@ namespace VULKAN_HPP_NAMESPACE } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2 && + "Function requires or " ); +# endif - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, @@ -5412,8 +6113,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetBufferMemoryRequirements2( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetBufferMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5422,6 +6124,10 @@ namespace VULKAN_HPP_NAMESPACE Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2 && + "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; d.vkGetBufferMemoryRequirements2( @@ -5431,19 +6137,23 @@ namespace VULKAN_HPP_NAMESPACE } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2 && + "Function requires or " ); +# endif - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, @@ -5452,18 +6162,26 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSparseMemoryRequirements2( m_device, + d.vkGetImageSparseMemoryRequirements2( static_cast( m_device ), reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2 && + "Function requires or " ); +# endif std::vector sparseMemoryRequirements; uint32_t sparseMemoryRequirementCount; @@ -5485,14 +6203,19 @@ namespace VULKAN_HPP_NAMESPACE template ::value, int>::type> + typename std::enable_if< + std::is_same::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2 && + "Function requires or " ); +# endif std::vector sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator ); @@ -5512,13 +6235,13 @@ namespace VULKAN_HPP_NAMESPACE } return sparseMemoryRequirements; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( pFeatures ) ); + d.vkGetPhysicalDeviceFeatures2( static_cast( m_physicalDevice ), reinterpret_cast( pFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5527,6 +6250,10 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2 && + "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); @@ -5535,24 +6262,29 @@ namespace VULKAN_HPP_NAMESPACE } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2 && + "Function requires or " ); +# endif - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get(); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get(); d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( pProperties ) ); + d.vkGetPhysicalDeviceProperties2( static_cast( m_physicalDevice ), reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5561,6 +6293,10 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2 && + "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); @@ -5569,17 +6305,22 @@ namespace VULKAN_HPP_NAMESPACE } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2 && + "Function requires or " ); +# endif - StructureChain structureChain; + VULKAN_HPP_NAMESPACE::StructureChain structureChain; VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get(); d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, @@ -5587,7 +6328,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + d.vkGetPhysicalDeviceFormatProperties2( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( pFormatProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5596,6 +6338,10 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2 && + "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); @@ -5604,18 +6350,22 @@ namespace VULKAN_HPP_NAMESPACE } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2 && + "Function requires or " ); +# endif - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get(); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get(); d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result @@ -5624,7 +6374,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( static_cast( m_physicalDevice ), reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); } @@ -5635,32 +6385,42 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2 && + "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; - VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, - reinterpret_cast( &imageFormatInfo ), - reinterpret_cast( &imageFormatProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); - return createResultValueType( static_cast( result ), imageFormatProperties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2 && + "Function requires or " ); +# endif StructureChain structureChain; VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get(); - VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, - reinterpret_cast( &imageFormatInfo ), - reinterpret_cast( &imageFormatProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); - return createResultValueType( static_cast( result ), structureChain ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, @@ -5669,15 +6429,22 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceQueueFamilyProperties2( - m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + static_cast( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename QueueFamilyProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function requires or " ); +# endif std::vector queueFamilyProperties; uint32_t queueFamilyPropertyCount; @@ -5694,14 +6461,18 @@ namespace VULKAN_HPP_NAMESPACE return queueFamilyProperties; } - template ::value, int>::type> + template < + typename QueueFamilyProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function requires or " ); +# endif std::vector queueFamilyProperties( queueFamilyProperties2Allocator ); uint32_t queueFamilyPropertyCount; @@ -5718,11 +6489,18 @@ namespace VULKAN_HPP_NAMESPACE return queueFamilyProperties; } - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function requires or " ); +# endif std::vector structureChains; std::vector queueFamilyProperties; @@ -5752,12 +6530,15 @@ namespace VULKAN_HPP_NAMESPACE template ::value, int>::type> + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function requires or " ); +# endif std::vector structureChains( structureChainAllocator ); std::vector queueFamilyProperties; @@ -5783,14 +6564,15 @@ namespace VULKAN_HPP_NAMESPACE } return structureChains; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + d.vkGetPhysicalDeviceMemoryProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( pMemoryProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5799,6 +6581,10 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2 && + "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); @@ -5807,18 +6593,23 @@ namespace VULKAN_HPP_NAMESPACE } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2 && + "Function requires or " ); +# endif - StructureChain structureChain; + VULKAN_HPP_NAMESPACE::StructureChain structureChain; VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = structureChain.template get(); d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, @@ -5827,18 +6618,26 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, + d.vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast( m_physicalDevice ), reinterpret_cast( pFormatInfo ), pPropertyCount, reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2 && + "Function requires or " ); +# endif std::vector properties; uint32_t propertyCount; @@ -5858,16 +6657,21 @@ namespace VULKAN_HPP_NAMESPACE return properties; } - template ::value, int>::type> + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2 && + "Function requires or " ); +# endif std::vector properties( sparseImageFormatProperties2Allocator ); uint32_t propertyCount; @@ -5886,7 +6690,7 @@ namespace VULKAN_HPP_NAMESPACE } return properties; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, @@ -5894,7 +6698,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); + d.vkTrimCommandPool( static_cast( m_device ), static_cast( commandPool ), static_cast( flags ) ); } template @@ -5903,7 +6707,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceQueue2( m_device, reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); + d.vkGetDeviceQueue2( static_cast( m_device ), reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5912,13 +6716,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceQueue2 && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::Queue queue; d.vkGetDeviceQueue2( m_device, reinterpret_cast( &queueInfo ), reinterpret_cast( &queue ) ); return queue; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result @@ -5928,7 +6735,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateSamplerYcbcrConversion( m_device, + return static_cast( d.vkCreateSamplerYcbcrConversion( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pYcbcrConversion ) ) ); @@ -5942,16 +6749,20 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversion && + "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; - VkResult result = d.vkCreateSamplerYcbcrConversion( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &ycbcrConversion ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); + reinterpret_cast( &ycbcrConversion ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); - return createResultValueType( static_cast( result ), ycbcrConversion ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( ycbcrConversion ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -5962,21 +6773,25 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversion && + "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; - VkResult result = d.vkCreateSamplerYcbcrConversion( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &ycbcrConversion ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" ); + reinterpret_cast( &ycbcrConversion ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( ycbcrConversion, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( ycbcrConversion, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, @@ -5984,8 +6799,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySamplerYcbcrConversion( - m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + d.vkDestroySamplerYcbcrConversion( static_cast( m_device ), + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5995,13 +6811,17 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversion && + "Function requires or " ); +# endif d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, @@ -6009,8 +6829,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySamplerYcbcrConversion( - m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + d.vkDestroySamplerYcbcrConversion( static_cast( m_device ), + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6020,13 +6841,17 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversion && + "Function requires or " ); +# endif d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result @@ -6036,7 +6861,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, + return static_cast( d.vkCreateDescriptorUpdateTemplate( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorUpdateTemplate ) ) ); @@ -6050,16 +6875,20 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplate && + "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; - VkResult result = d.vkCreateDescriptorUpdateTemplate( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &descriptorUpdateTemplate ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); - return createResultValueType( static_cast( result ), descriptorUpdateTemplate ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -6070,21 +6899,25 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplate && + "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; - VkResult result = d.vkCreateDescriptorUpdateTemplate( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &descriptorUpdateTemplate ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" ); + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" ); - return createResultValueType( static_cast( result ), - UniqueHandle( - descriptorUpdateTemplate, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + descriptorUpdateTemplate, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, @@ -6092,8 +6925,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorUpdateTemplate( - m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorUpdateTemplate( static_cast( m_device ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6103,13 +6937,17 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplate && + "Function requires or " ); +# endif d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, @@ -6117,8 +6955,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorUpdateTemplate( - m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorUpdateTemplate( static_cast( m_device ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6128,13 +6967,17 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplate && + "Function requires or " ); +# endif d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, @@ -6143,8 +6986,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUpdateDescriptorSetWithTemplate( - m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + d.vkUpdateDescriptorSetWithTemplate( static_cast( m_device ), + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6155,13 +7000,17 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSetWithTemplate && + "Function requires or " ); +# endif d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), reinterpret_cast( &data ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, @@ -6169,7 +7018,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, + d.vkGetPhysicalDeviceExternalBufferProperties( static_cast( m_physicalDevice ), reinterpret_cast( pExternalBufferInfo ), reinterpret_cast( pExternalBufferProperties ) ); } @@ -6181,6 +7030,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalBufferProperties && + "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, @@ -6189,7 +7042,7 @@ namespace VULKAN_HPP_NAMESPACE return externalBufferProperties; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, @@ -6197,7 +7050,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, + d.vkGetPhysicalDeviceExternalFenceProperties( static_cast( m_physicalDevice ), reinterpret_cast( pExternalFenceInfo ), reinterpret_cast( pExternalFenceProperties ) ); } @@ -6209,6 +7062,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalFenceProperties && + "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, @@ -6217,7 +7074,7 @@ namespace VULKAN_HPP_NAMESPACE return externalFenceProperties; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void @@ -6226,7 +7083,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, + d.vkGetPhysicalDeviceExternalSemaphoreProperties( static_cast( m_physicalDevice ), reinterpret_cast( pExternalSemaphoreInfo ), reinterpret_cast( pExternalSemaphoreProperties ) ); } @@ -6238,6 +7095,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalSemaphoreProperties && + "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, @@ -6246,7 +7107,7 @@ namespace VULKAN_HPP_NAMESPACE return externalSemaphoreProperties; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, @@ -6254,8 +7115,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorSetLayoutSupport( - m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); + d.vkGetDescriptorSetLayoutSupport( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pSupport ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6265,6 +7127,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupport && "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; d.vkGetDescriptorSetLayoutSupport( @@ -6274,20 +7139,23 @@ namespace VULKAN_HPP_NAMESPACE } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupport && "Function requires or " ); +# endif - StructureChain structureChain; + VULKAN_HPP_NAMESPACE::StructureChain structureChain; VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get(); d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_VERSION_1_2 === @@ -6301,7 +7169,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirectCount( m_commandBuffer, + d.vkCmdDrawIndirectCount( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), @@ -6320,7 +7188,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, + d.vkCmdDrawIndexedIndirectCount( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), @@ -6336,7 +7204,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateRenderPass2( m_device, + return static_cast( d.vkCreateRenderPass2( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); @@ -6350,16 +7218,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass2 && "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::RenderPass renderPass; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRenderPass2( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &renderPass ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); + reinterpret_cast( &renderPass ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); - return createResultValueType( static_cast( result ), renderPass ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -6370,21 +7241,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass2 && "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::RenderPass renderPass; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRenderPass2( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &renderPass ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" ); + reinterpret_cast( &renderPass ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( renderPass, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, @@ -6392,8 +7265,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginRenderPass2( - m_commandBuffer, reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); + d.vkCmdBeginRenderPass2( static_cast( m_commandBuffer ), + reinterpret_cast( pRenderPassBegin ), + reinterpret_cast( pSubpassBeginInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6403,11 +7277,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass2 && "Function requires or " ); +# endif d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, @@ -6415,8 +7292,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdNextSubpass2( - m_commandBuffer, reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); + d.vkCmdNextSubpass2( static_cast( m_commandBuffer ), + reinterpret_cast( pSubpassBeginInfo ), + reinterpret_cast( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6426,18 +7304,21 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdNextSubpass2 && "Function requires or " ); +# endif d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); + d.vkCmdEndRenderPass2( static_cast( m_commandBuffer ), reinterpret_cast( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6446,17 +7327,20 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEndRenderPass2 && "Function requires or " ); +# endif d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkResetQueryPool( m_device, static_cast( queryPool ), firstQuery, queryCount ); + d.vkResetQueryPool( static_cast( m_device ), static_cast( queryPool ), firstQuery, queryCount ); } template @@ -6465,7 +7349,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), pValue ) ); + return static_cast( d.vkGetSemaphoreCounterValue( static_cast( m_device ), static_cast( semaphore ), pValue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6474,14 +7358,18 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSemaphoreCounterValue && "Function requires or " ); +# endif - uint64_t value; - VkResult result = d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), &value ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); + uint64_t value; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), &value ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); - return createResultValueType( static_cast( result ), value ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, @@ -6489,7 +7377,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkWaitSemaphores( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); + return static_cast( d.vkWaitSemaphores( static_cast( m_device ), reinterpret_cast( pWaitInfo ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6498,22 +7386,25 @@ namespace VULKAN_HPP_NAMESPACE Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWaitSemaphores && "Function requires or " ); +# endif - VkResult result = d.vkWaitSemaphores( m_device, reinterpret_cast( &waitInfo ), timeout ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkWaitSemaphores( m_device, reinterpret_cast( &waitInfo ), timeout ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); return static_cast( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkSignalSemaphore( m_device, reinterpret_cast( pSignalInfo ) ) ); + return static_cast( d.vkSignalSemaphore( static_cast( m_device ), reinterpret_cast( pSignalInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6522,20 +7413,25 @@ namespace VULKAN_HPP_NAMESPACE Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSignalSemaphore && "Function requires or " ); +# endif - VkResult result = d.vkSignalSemaphore( m_device, reinterpret_cast( &signalInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkSignalSemaphore( m_device, reinterpret_cast( &signalInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( pInfo ) ) ); + return static_cast( + d.vkGetBufferDeviceAddress( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6544,19 +7440,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddress && + "Function requires or or " ); +# endif VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( &info ) ); return static_cast( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( pInfo ) ); + return d.vkGetBufferOpaqueCaptureAddress( static_cast( m_device ), reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6565,19 +7465,24 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureAddress && + "Function requires or " ); +# endif uint64_t result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( &info ) ); return result; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast( pInfo ) ); + return d.vkGetDeviceMemoryOpaqueCaptureAddress( static_cast( m_device ), + reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6586,12 +7491,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryOpaqueCaptureAddress && + "Function requires or " ); +# endif uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast( &info ) ); return result; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_VERSION_1_3 === @@ -6601,73 +7510,86 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast( pToolProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceToolProperties( + static_cast( m_physicalDevice ), pToolCount, reinterpret_cast( pToolProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getToolProperties( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolProperties && + "Function requires or " ); +# endif std::vector toolProperties; uint32_t toolCount; - VkResult result; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ); - if ( ( result == VK_SUCCESS ) && toolCount ) + result = static_cast( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) { toolProperties.resize( toolCount ); - result = - d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ); + result = static_cast( + d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); if ( toolCount < toolProperties.size() ) { toolProperties.resize( toolCount ); } - return createResultValueType( static_cast( result ), toolProperties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); } - template ::value, int>::type> + template < + typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolProperties && + "Function requires or " ); +# endif std::vector toolProperties( physicalDeviceToolPropertiesAllocator ); - uint32_t toolCount; - VkResult result; + uint32_t toolCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ); - if ( ( result == VK_SUCCESS ) && toolCount ) + result = static_cast( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) { toolProperties.resize( toolCount ); - result = - d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ); + result = static_cast( + d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); if ( toolCount < toolProperties.size() ) { toolProperties.resize( toolCount ); } - return createResultValueType( static_cast( result ), toolProperties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, @@ -6676,7 +7598,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreatePrivateDataSlot( m_device, + return static_cast( d.vkCreatePrivateDataSlot( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPrivateDataSlot ) ) ); @@ -6690,16 +7612,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlot && "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreatePrivateDataSlot( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &privateDataSlot ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" ); + reinterpret_cast( &privateDataSlot ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" ); - return createResultValueType( static_cast( result ), privateDataSlot ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( privateDataSlot ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -6710,21 +7635,24 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlot && "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; - VkResult result = + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreatePrivateDataSlot( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &privateDataSlot ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" ); + reinterpret_cast( &privateDataSlot ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( privateDataSlot, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( privateDataSlot, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, @@ -6732,7 +7660,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPrivateDataSlot( m_device, static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPrivateDataSlot( + static_cast( m_device ), static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6742,13 +7671,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlot && "Function requires or " ); +# endif d.vkDestroyPrivateDataSlot( m_device, static_cast( privateDataSlot ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, @@ -6756,7 +7688,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPrivateDataSlot( m_device, static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPrivateDataSlot( + static_cast( m_device ), static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6766,70 +7699,80 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlot && "Function requires or " ); +# endif d.vkDestroyPrivateDataSlot( m_device, static_cast( privateDataSlot ), reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkSetPrivateData( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), data ) ); + return static_cast( d.vkSetPrivateData( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); } #else template - VULKAN_HPP_INLINE typename ResultValueType::type Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + VULKAN_HPP_INLINE typename ResultValueType::type Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetPrivateData && "Function requires or " ); +# endif - VkResult result = - d.vkSetPrivateData( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), data ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkSetPrivateData( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + VULKAN_HPP_INLINE void Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPrivateData( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), pData ); + d.vkGetPrivateData( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPrivateData && "Function requires or " ); +# endif uint64_t data; - d.vkGetPrivateData( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), &data ); + d.vkGetPrivateData( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), &data ); return data; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, @@ -6837,7 +7780,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetEvent2( m_commandBuffer, static_cast( event ), reinterpret_cast( pDependencyInfo ) ); + d.vkCmdSetEvent2( + static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( pDependencyInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6847,10 +7791,13 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetEvent2 && "Function requires or " ); +# endif d.vkCmdSetEvent2( m_commandBuffer, static_cast( event ), reinterpret_cast( &dependencyInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event, @@ -6858,7 +7805,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResetEvent2( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + d.vkCmdResetEvent2( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); } template @@ -6868,8 +7815,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWaitEvents2( - m_commandBuffer, eventCount, reinterpret_cast( pEvents ), reinterpret_cast( pDependencyInfos ) ); + d.vkCmdWaitEvents2( static_cast( m_commandBuffer ), + eventCount, + reinterpret_cast( pEvents ), + reinterpret_cast( pDependencyInfos ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6879,6 +7828,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWaitEvents2 && "Function requires or " ); +# endif # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); # else @@ -6893,14 +7845,14 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( events.data() ), reinterpret_cast( dependencyInfos.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast( pDependencyInfo ) ); + d.vkCmdPipelineBarrier2( static_cast( m_commandBuffer ), reinterpret_cast( pDependencyInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6909,10 +7861,13 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier2 && "Function requires or " ); +# endif d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast( &dependencyInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, @@ -6921,7 +7876,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteTimestamp2( m_commandBuffer, static_cast( stage ), static_cast( queryPool ), query ); + d.vkCmdWriteTimestamp2( + static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); } template @@ -6931,7 +7887,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkQueueSubmit2( m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + return static_cast( + d.vkQueueSubmit2( static_cast( m_queue ), submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6940,20 +7897,24 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueSubmit2 && "Function requires or " ); +# endif - VkResult result = d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast( pCopyBufferInfo ) ); + d.vkCmdCopyBuffer2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyBufferInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6962,16 +7923,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer2 && "Function requires or " ); +# endif d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast( ©BufferInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast( pCopyImageInfo ) ); + d.vkCmdCopyImage2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6979,17 +7943,20 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImage2 && "Function requires or " ); +# endif d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast( ©ImageInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast( pCopyBufferToImageInfo ) ); + d.vkCmdCopyBufferToImage2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyBufferToImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6998,17 +7965,20 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage2 && "Function requires or " ); +# endif d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast( ©BufferToImageInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast( pCopyImageToBufferInfo ) ); + d.vkCmdCopyImageToBuffer2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyImageToBufferInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7017,16 +7987,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer2 && "Function requires or " ); +# endif d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast( ©ImageToBufferInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast( pBlitImageInfo ) ); + d.vkCmdBlitImage2( static_cast( m_commandBuffer ), reinterpret_cast( pBlitImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7034,17 +8007,20 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBlitImage2 && "Function requires or " ); +# endif d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast( &blitImageInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast( pResolveImageInfo ) ); + d.vkCmdResolveImage2( static_cast( m_commandBuffer ), reinterpret_cast( pResolveImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7053,17 +8029,20 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdResolveImage2 && "Function requires or " ); +# endif d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast( &resolveImageInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast( pRenderingInfo ) ); + d.vkCmdBeginRendering( static_cast( m_commandBuffer ), reinterpret_cast( pRenderingInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7072,30 +8051,33 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginRendering && "Function requires or " ); +# endif d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast( &renderingInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndRendering( m_commandBuffer ); + d.vkCmdEndRendering( static_cast( m_commandBuffer ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCullMode( m_commandBuffer, static_cast( cullMode ) ); + d.vkCmdSetCullMode( static_cast( m_commandBuffer ), static_cast( cullMode ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetFrontFace( m_commandBuffer, static_cast( frontFace ) ); + d.vkCmdSetFrontFace( static_cast( m_commandBuffer ), static_cast( frontFace ) ); } template @@ -7103,7 +8085,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast( primitiveTopology ) ); + d.vkCmdSetPrimitiveTopology( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); } template @@ -7112,7 +8094,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast( pViewports ) ); + d.vkCmdSetViewportWithCount( static_cast( m_commandBuffer ), viewportCount, reinterpret_cast( pViewports ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7121,17 +8103,21 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewportWithCount && + "Function requires or or " ); +# endif d.vkCmdSetViewportWithCount( m_commandBuffer, viewports.size(), reinterpret_cast( viewports.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast( pScissors ) ); + d.vkCmdSetScissorWithCount( static_cast( m_commandBuffer ), scissorCount, reinterpret_cast( pScissors ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7140,10 +8126,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetScissorWithCount && + "Function requires or or " ); +# endif d.vkCmdSetScissorWithCount( m_commandBuffer, scissors.size(), reinterpret_cast( scissors.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, @@ -7155,7 +8145,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindVertexBuffers2( m_commandBuffer, + d.vkCmdBindVertexBuffers2( static_cast( m_commandBuffer ), firstBinding, bindingCount, reinterpret_cast( pBuffers ), @@ -7174,6 +8164,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers2 && + "Function requires or or " ); +# endif # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); @@ -7201,27 +8195,27 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( sizes.data() ), reinterpret_cast( strides.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast( depthTestEnable ) ); + d.vkCmdSetDepthTestEnable( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast( depthWriteEnable ) ); + d.vkCmdSetDepthWriteEnable( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast( depthCompareOp ) ); + d.vkCmdSetDepthCompareOp( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); } template @@ -7229,14 +8223,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast( depthBoundsTestEnable ) ); + d.vkCmdSetDepthBoundsTestEnable( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast( stencilTestEnable ) ); + d.vkCmdSetStencilTestEnable( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); } template @@ -7248,7 +8242,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilOp( m_commandBuffer, + d.vkCmdSetStencilOp( static_cast( m_commandBuffer ), static_cast( faceMask ), static_cast( failOp ), static_cast( passOp ), @@ -7261,14 +8255,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast( rasterizerDiscardEnable ) ); + d.vkCmdSetRasterizerDiscardEnable( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast( depthBiasEnable ) ); + d.vkCmdSetDepthBiasEnable( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); } template @@ -7276,7 +8270,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast( primitiveRestartEnable ) ); + d.vkCmdSetPrimitiveRestartEnable( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); } template @@ -7285,8 +8279,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceBufferMemoryRequirements( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetDeviceBufferMemoryRequirements( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7295,6 +8290,10 @@ namespace VULKAN_HPP_NAMESPACE Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirements && + "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; d.vkGetDeviceBufferMemoryRequirements( @@ -7304,19 +8303,23 @@ namespace VULKAN_HPP_NAMESPACE } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirements && + "Function requires or " ); +# endif - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); d.vkGetDeviceBufferMemoryRequirements( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, @@ -7324,8 +8327,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceImageMemoryRequirements( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetDeviceImageMemoryRequirements( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7334,6 +8338,10 @@ namespace VULKAN_HPP_NAMESPACE Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirements && + "Function requires or " ); +# endif VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; d.vkGetDeviceImageMemoryRequirements( @@ -7343,19 +8351,23 @@ namespace VULKAN_HPP_NAMESPACE } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirements && + "Function requires or " ); +# endif - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); d.vkGetDeviceImageMemoryRequirements( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, @@ -7364,18 +8376,26 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceImageSparseMemoryRequirements( m_device, + d.vkGetDeviceImageSparseMemoryRequirements( static_cast( m_device ), reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirements && + "Function requires or " ); +# endif std::vector sparseMemoryRequirements; uint32_t sparseMemoryRequirementCount; @@ -7397,14 +8417,19 @@ namespace VULKAN_HPP_NAMESPACE template ::value, int>::type> + typename std::enable_if< + std::is_same::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirements && + "Function requires or " ); +# endif std::vector sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator ); @@ -7424,12609 +8449,20691 @@ namespace VULKAN_HPP_NAMESPACE } return sparseMemoryRequirements; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_surface === + //=== VK_VERSION_1_4 === template - VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setLineStipple( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineStipple( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, + void ** ppData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + return static_cast( d.vkMapMemory2( static_cast( m_device ), reinterpret_cast( pMemoryMapInfo ), ppData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkMapMemory2 && "Function requires or " ); +# endif - d.vkDestroySurfaceKHR( m_instance, - static_cast( surface ), - reinterpret_cast( static_cast( allocator ) ) ); + void * pData; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkMapMemory2( m_device, reinterpret_cast( &memoryMapInfo ), &pData ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + return static_cast( d.vkUnmapMemory2( static_cast( m_device ), reinterpret_cast( pMemoryUnmapInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE typename ResultValueType::type Device::unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUnmapMemory2 && "Function requires or " ); +# endif - d.vkDestroySurfaceKHR( m_instance, - static_cast( surface ), - reinterpret_cast( static_cast( allocator ) ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkUnmapMemory2( m_device, reinterpret_cast( &memoryUnmapInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, - VULKAN_HPP_NAMESPACE::SurfaceKHR surface, - VULKAN_HPP_NAMESPACE::Bool32 * pSupported, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( - m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( pSupported ) ) ); + d.vkCmdBindIndexBuffer2( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( size ), + static_cast( indexType ) ); + } + + template + VULKAN_HPP_INLINE void Device::getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetRenderingAreaGranularity( + static_cast( m_device ), reinterpret_cast( pRenderingAreaInfo ), reinterpret_cast( pGranularity ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRenderingAreaGranularity && "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::Bool32 supported; - VkResult result = d.vkGetPhysicalDeviceSurfaceSupportKHR( - m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( &supported ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); + VULKAN_HPP_NAMESPACE::Extent2D granularity; + d.vkGetRenderingAreaGranularity( + m_device, reinterpret_cast( &renderingAreaInfo ), reinterpret_cast( &granularity ) ); - return createResultValueType( static_cast( result ), supported ); + return granularity; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, - VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( - m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); + d.vkGetDeviceImageSubresourceLayout( + static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pLayout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayout && + "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; - VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( - m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + d.vkGetDeviceImageSubresourceLayout( + m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); - return createResultValueType( static_cast( result ), surfaceCapabilities ); + return layout; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayout && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + d.vkGetDeviceImageSubresourceLayout( + m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, - uint32_t * pSurfaceFormatCount, - VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( - m_physicalDevice, static_cast( surface ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); + d.vkGetImageSubresourceLayout2( static_cast( m_device ), + static_cast( image ), + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 Device::getImageSubresourceLayout2( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2 && + "Function requires or or or " ); +# endif - std::vector surfaceFormats; - uint32_t surfaceFormatCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ); - if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) - { - surfaceFormats.resize( surfaceFormatCount ); - result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( - m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); - VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); - if ( surfaceFormatCount < surfaceFormats.size() ) - { - surfaceFormats.resize( surfaceFormatCount ); - } - return createResultValueType( static_cast( result ), surfaceFormats ); + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + d.vkGetImageSubresourceLayout2( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, - SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, - Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getImageSubresourceLayout2( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2 && + "Function requires or or or " ); +# endif - std::vector surfaceFormats( surfaceFormatKHRAllocator ); - uint32_t surfaceFormatCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ); - if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) - { - surfaceFormats.resize( surfaceFormatCount ); - result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( - m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); - VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); - if ( surfaceFormatCount < surfaceFormats.size() ) - { - surfaceFormats.resize( surfaceFormatCount ); - } - return createResultValueType( static_cast( result ), surfaceFormats ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + d.vkGetImageSubresourceLayout2( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, - uint32_t * pPresentModeCount, - VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( - m_physicalDevice, static_cast( surface ), pPresentModeCount, reinterpret_cast( pPresentModes ) ) ); + d.vkCmdPushDescriptorSet( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWriteCount, + reinterpret_cast( pDescriptorWrites ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet && "Function requires or " ); +# endif - std::vector presentModes; - uint32_t presentModeCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ); - if ( ( result == VK_SUCCESS ) && presentModeCount ) - { - presentModes.resize( presentModeCount ); - result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( - m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); - VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); - if ( presentModeCount < presentModes.size() ) - { - presentModes.resize( presentModeCount ); - } - return createResultValueType( static_cast( result ), presentModes ); + d.vkCmdPushDescriptorSet( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, - PresentModeKHRAllocator & presentModeKHRAllocator, - Dispatch const & d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - std::vector presentModes( presentModeKHRAllocator ); - uint32_t presentModeCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ); - if ( ( result == VK_SUCCESS ) && presentModeCount ) - { - presentModes.resize( presentModeCount ); - result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( - m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); - VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); - if ( presentModeCount < presentModes.size() ) - { - presentModes.resize( presentModeCount ); - } - return createResultValueType( static_cast( result ), presentModes ); + d.vkCmdPushDescriptorSetWithTemplate( static_cast( m_commandBuffer ), + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + pData ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - //=== VK_KHR_swapchain === +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkCmdPushDescriptorSetWithTemplate && + "Function requires or or " ); +# endif + + d.vkCmdPushDescriptorSetWithTemplate( m_commandBuffer, + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + reinterpret_cast( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateSwapchainKHR( m_device, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pSwapchain ) ) ); + d.vkCmdSetRenderingAttachmentLocations( static_cast( m_commandBuffer ), + reinterpret_cast( pLocationInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetRenderingAttachmentLocations && + "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; - VkResult result = - d.vkCreateSwapchainKHR( m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &swapchain ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); - - return createResultValueType( static_cast( result ), swapchain ); + d.vkCmdSetRenderingAttachmentLocations( m_commandBuffer, reinterpret_cast( &locationInfo ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRenderingInputAttachmentIndices( static_cast( m_commandBuffer ), + reinterpret_cast( pInputAttachmentIndexInfo ) ); + } - VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; - VkResult result = - d.vkCreateSwapchainKHR( m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &swapchain ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetRenderingInputAttachmentIndices && + "Function requires or " ); +# endif - return createResultValueType( - static_cast( result ), - UniqueHandle( swapchain, ObjectDestroy( *this, allocator, d ) ) ); + d.vkCmdSetRenderingInputAttachmentIndices( m_commandBuffer, reinterpret_cast( &inputAttachmentIndexInfo ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + d.vkCmdBindDescriptorSets2( static_cast( m_commandBuffer ), + reinterpret_cast( pBindDescriptorSetsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets2 && "Function requires or " ); +# endif - d.vkDestroySwapchainKHR( m_device, - static_cast( swapchain ), - reinterpret_cast( static_cast( allocator ) ) ); + d.vkCmdBindDescriptorSets2( m_commandBuffer, reinterpret_cast( &bindDescriptorSetsInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + d.vkCmdPushConstants2( static_cast( m_commandBuffer ), reinterpret_cast( pPushConstantsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushConstants2 && "Function requires or " ); +# endif - d.vkDestroySwapchainKHR( m_device, - static_cast( swapchain ), - reinterpret_cast( static_cast( allocator ) ) ); + d.vkCmdPushConstants2( m_commandBuffer, reinterpret_cast( &pushConstantsInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - uint32_t * pSwapchainImageCount, - VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), pSwapchainImageCount, reinterpret_cast( pSwapchainImages ) ) ); + d.vkCmdPushDescriptorSet2( static_cast( m_commandBuffer ), reinterpret_cast( pPushDescriptorSetInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet2 && "Function requires or " ); +# endif - std::vector swapchainImages; - uint32_t swapchainImageCount; - VkResult result; - do - { - result = d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ); - if ( ( result == VK_SUCCESS ) && swapchainImageCount ) - { - swapchainImages.resize( swapchainImageCount ); - result = d.vkGetSwapchainImagesKHR( - m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); - VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); - if ( swapchainImageCount < swapchainImages.size() ) - { - swapchainImages.resize( swapchainImageCount ); - } - return createResultValueType( static_cast( result ), swapchainImages ); + d.vkCmdPushDescriptorSet2( m_commandBuffer, reinterpret_cast( &pushDescriptorSetInfo ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetWithTemplate2( static_cast( m_commandBuffer ), + reinterpret_cast( pPushDescriptorSetWithTemplateInfo ) ); + } - std::vector swapchainImages( imageAllocator ); - uint32_t swapchainImageCount; - VkResult result; - do - { - result = d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ); - if ( ( result == VK_SUCCESS ) && swapchainImageCount ) - { - swapchainImages.resize( swapchainImageCount ); - result = d.vkGetSwapchainImagesKHR( - m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); - VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); - if ( swapchainImageCount < swapchainImages.size() ) - { - swapchainImages.resize( swapchainImageCount ); - } - return createResultValueType( static_cast( result ), swapchainImages ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplate2 && + "Function requires or " ); +# endif + + d.vkCmdPushDescriptorSetWithTemplate2( m_commandBuffer, + reinterpret_cast( &pushDescriptorSetWithTemplateInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - uint64_t timeout, - VULKAN_HPP_NAMESPACE::Semaphore semaphore, - VULKAN_HPP_NAMESPACE::Fence fence, - uint32_t * pImageIndex, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAcquireNextImageKHR( - m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), pImageIndex ) ); + return static_cast( + d.vkCopyMemoryToImage( static_cast( m_device ), reinterpret_cast( pCopyMemoryToImageInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - uint64_t timeout, - VULKAN_HPP_NAMESPACE::Semaphore semaphore, - VULKAN_HPP_NAMESPACE::Fence fence, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMemoryToImage && "Function requires or " ); +# endif - uint32_t imageIndex; - VkResult result = d.vkAcquireNextImageKHR( - m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), &imageIndex ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eTimeout, - VULKAN_HPP_NAMESPACE::Result::eNotReady, - VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCopyMemoryToImage( m_device, reinterpret_cast( ©MemoryToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImage" ); - return ResultValue( static_cast( result ), imageIndex ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( pPresentInfo ) ) ); + return static_cast( + d.vkCopyImageToMemory( static_cast( m_device ), reinterpret_cast( pCopyImageToMemoryInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyImageToMemory && "Function requires or " ); +# endif - VkResult result = d.vkQueuePresentKHR( m_queue, reinterpret_cast( &presentInfo ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCopyImageToMemory( m_device, reinterpret_cast( ©ImageToMemoryInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemory" ); - return static_cast( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( - VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); + d.vkCopyImageToImage( static_cast( m_device ), reinterpret_cast( pCopyImageToImageInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyImageToImage && "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; - VkResult result = - d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( &deviceGroupPresentCapabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkCopyImageToImage( m_device, reinterpret_cast( ©ImageToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImage" ); - return createResultValueType( static_cast( result ), deviceGroupPresentCapabilities ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayout( uint32_t transitionCount, + const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( - m_device, static_cast( surface ), reinterpret_cast( pModes ) ) ); + return static_cast( d.vkTransitionImageLayout( + static_cast( m_device ), transitionCount, reinterpret_cast( pTransitions ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::transitionImageLayout( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkTransitionImageLayout && "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; - VkResult result = d.vkGetDeviceGroupSurfacePresentModesKHR( - m_device, static_cast( surface ), reinterpret_cast( &modes ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkTransitionImageLayout( m_device, transitions.size(), reinterpret_cast( transitions.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayout" ); - return createResultValueType( static_cast( result ), modes ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_surface === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, - uint32_t * pRectCount, - VULKAN_HPP_NAMESPACE::Rect2D * pRects, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); + d.vkDestroySurfaceKHR( + static_cast( m_instance ), static_cast( surface ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySurfaceKHR && "Function requires " ); +# endif - std::vector rects; - uint32_t rectCount; - VkResult result; - do - { - result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ); - if ( ( result == VK_SUCCESS ) && rectCount ) - { - rects.resize( rectCount ); - result = d.vkGetPhysicalDevicePresentRectanglesKHR( - m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); - VULKAN_HPP_ASSERT( rectCount <= rects.size() ); - if ( rectCount < rects.size() ) - { - rects.resize( rectCount ); - } - return createResultValueType( static_cast( result ), rects ); + d.vkDestroySurfaceKHR( m_instance, + static_cast( surface ), + reinterpret_cast( static_cast( allocator ) ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySurfaceKHR( + static_cast( m_instance ), static_cast( surface ), reinterpret_cast( pAllocator ) ); + } - std::vector rects( rect2DAllocator ); - uint32_t rectCount; - VkResult result; - do - { - result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ); - if ( ( result == VK_SUCCESS ) && rectCount ) - { - rects.resize( rectCount ); - result = d.vkGetPhysicalDevicePresentRectanglesKHR( - m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); - VULKAN_HPP_ASSERT( rectCount <= rects.size() ); - if ( rectCount < rects.size() ) - { - rects.resize( rectCount ); - } - return createResultValueType( static_cast( result ), rects ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySurfaceKHR && "Function requires " ); +# endif + + d.vkDestroySurfaceKHR( m_instance, + static_cast( surface ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, - uint32_t * pImageIndex, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::Bool32 * pSupported, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( pAcquireInfo ), pImageIndex ) ); + return static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( + static_cast( m_physicalDevice ), queueFamilyIndex, static_cast( surface ), reinterpret_cast( pSupported ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceSupportKHR && "Function requires " ); +# endif - uint32_t imageIndex; - VkResult result = d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( &acquireInfo ), &imageIndex ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eTimeout, - VULKAN_HPP_NAMESPACE::Result::eNotReady, - VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + VULKAN_HPP_NAMESPACE::Bool32 supported; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( + m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( &supported ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); - return ResultValue( static_cast( result ), imageIndex ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( supported ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_display === + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + reinterpret_cast( pSurfaceCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount, - VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + pSurfaceFormatCount, + reinterpret_cast( pSurfaceFormats ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormatsKHR && "Function requires " ); +# endif - std::vector properties; - uint32_t propertyCount; - VkResult result; + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) { - properties.resize( propertyCount ); - result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( + m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) { - properties.resize( propertyCount ); + surfaceFormats.resize( surfaceFormatCount ); } - return createResultValueType( static_cast( result ), properties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormatsKHR && "Function requires " ); +# endif - std::vector properties( displayPropertiesKHRAllocator ); - uint32_t propertyCount; - VkResult result; + std::vector surfaceFormats( surfaceFormatKHRAllocator ); + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) { - properties.resize( propertyCount ); - result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( + m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) { - properties.resize( propertyCount ); + surfaceFormats.resize( surfaceFormatCount ); } - return createResultValueType( static_cast( result ), properties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, - VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + pPresentModeCount, + reinterpret_cast( pPresentModes ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModesKHR && "Function requires " ); +# endif - std::vector properties; - uint32_t propertyCount; - VkResult result; + std::vector presentModes; + uint32_t presentModeCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) + result = static_cast( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) { - properties.resize( propertyCount ); - result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( - m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + presentModes.resize( presentModeCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( + m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) { - properties.resize( propertyCount ); + presentModes.resize( presentModeCount ); } - return createResultValueType( static_cast( result ), properties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModesKHR && "Function requires " ); +# endif - std::vector properties( displayPlanePropertiesKHRAllocator ); - uint32_t propertyCount; - VkResult result; + std::vector presentModes( presentModeKHRAllocator ); + uint32_t presentModeCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) + result = static_cast( + d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) { - properties.resize( propertyCount ); - result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( - m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); + presentModes.resize( presentModeCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( + m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) { - properties.resize( propertyCount ); + presentModes.resize( presentModeCount ); } - return createResultValueType( static_cast( result ), properties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_swapchain === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, - uint32_t * pDisplayCount, - VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); + return static_cast( d.vkCreateSwapchainKHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSwapchain ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSwapchainKHR && "Function requires " ); +# endif - std::vector displays; - uint32_t displayCount; - VkResult result; - do - { - result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ); - if ( ( result == VK_SUCCESS ) && displayCount ) - { - displays.resize( displayCount ); - result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); - VULKAN_HPP_ASSERT( displayCount <= displays.size() ); - if ( displayCount < displays.size() ) - { - displays.resize( displayCount ); - } - return createResultValueType( static_cast( result ), displays ); + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateSwapchainKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchain ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSwapchainKHR && "Function requires " ); +# endif - std::vector displays( displayKHRAllocator ); - uint32_t displayCount; - VkResult result; - do - { - result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ); - if ( ( result == VK_SUCCESS ) && displayCount ) - { - displays.resize( displayCount ); - result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); - VULKAN_HPP_ASSERT( displayCount <= displays.size() ); - if ( displayCount < displays.size() ) - { - displays.resize( displayCount ); - } - return createResultValueType( static_cast( result ), displays ); + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateSwapchainKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( swapchain, detail::ObjectDestroy( *this, allocator, d ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, - uint32_t * pPropertyCount, - VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDisplayModePropertiesKHR( - m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + d.vkDestroySwapchainKHR( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySwapchainKHR && "Function requires " ); +# endif - std::vector properties; - uint32_t propertyCount; - VkResult result; + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySwapchainKHR( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySwapchainKHR && "Function requires " ); +# endif + + d.vkDestroySwapchainKHR( m_device, + static_cast( swapchain ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSwapchainImagesKHR( + static_cast( m_device ), static_cast( swapchain ), pSwapchainImageCount, reinterpret_cast( pSwapchainImages ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSwapchainImagesKHR && "Function requires " ); +# endif + + std::vector swapchainImages; + uint32_t swapchainImageCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) + result = static_cast( + d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount ) { - properties.resize( propertyCount ); - result = d.vkGetDisplayModePropertiesKHR( - m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ); + swapchainImages.resize( swapchainImageCount ); + result = static_cast( d.vkGetSwapchainImagesKHR( + m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + if ( swapchainImageCount < swapchainImages.size() ) { - properties.resize( propertyCount ); + swapchainImages.resize( swapchainImageCount ); } - return createResultValueType( static_cast( result ), properties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchainImages ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, - DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, - Dispatch const & d ) const + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSwapchainImagesKHR && "Function requires " ); +# endif - std::vector properties( displayModePropertiesKHRAllocator ); - uint32_t propertyCount; - VkResult result; + std::vector swapchainImages( imageAllocator ); + uint32_t swapchainImageCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) + result = static_cast( + d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount ) { - properties.resize( propertyCount ); - result = d.vkGetDisplayModePropertiesKHR( - m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ); + swapchainImages.resize( swapchainImageCount ); + result = static_cast( d.vkGetSwapchainImagesKHR( + m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + if ( swapchainImageCount < swapchainImages.size() ) { - properties.resize( propertyCount ); + swapchainImages.resize( swapchainImageCount ); } - return createResultValueType( static_cast( result ), properties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchainImages ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, - const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + uint32_t * pImageIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDisplayModeKHR( m_physicalDevice, - static_cast( display ), - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pMode ) ) ); + return static_cast( d.vkAcquireNextImageKHR( static_cast( m_device ), + static_cast( swapchain ), + timeout, + static_cast( semaphore ), + static_cast( fence ), + pImageIndex ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, - const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireNextImageKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; - VkResult result = - d.vkCreateDisplayModeKHR( m_physicalDevice, - static_cast( display ), - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &mode ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); + uint32_t imageIndex; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAcquireNextImageKHR( + m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), &imageIndex ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); - return createResultValueType( static_cast( result ), mode ); + return ResultValue( result, std::move( imageIndex ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, - const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkQueuePresentKHR( static_cast( m_queue ), reinterpret_cast( pPresentInfo ) ) ); + } - VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; - VkResult result = - d.vkCreateDisplayModeKHR( m_physicalDevice, - static_cast( display ), - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &mode ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueuePresentKHR && "Function requires " ); +# endif - return createResultValueType( - static_cast( result ), - UniqueHandle( mode, ObjectDestroy( *this, allocator, d ) ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( &presentInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return static_cast( result ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, - uint32_t planeIndex, - VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( - m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( pCapabilities ) ) ); + return static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( + static_cast( m_device ), reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPresentCapabilitiesKHR && + "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; - VkResult result = d.vkGetDisplayPlaneCapabilitiesKHR( - m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( &capabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); - return createResultValueType( static_cast( result ), capabilities ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( deviceGroupPresentCapabilities ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pSurface ) ) ); + return static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( + static_cast( m_device ), static_cast( surface ), reinterpret_cast( pModes ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceGroupSurfacePresentModesKHR && + "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = d.vkCreateDisplayPlaneSurfaceKHR( - m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( + m_device, static_cast( surface ), reinterpret_cast( &modes ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); - return createResultValueType( static_cast( result ), surface ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( modes ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Instance::createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + uint32_t * pRectCount, + VULKAN_HPP_NAMESPACE::Rect2D * pRects, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = d.vkCreateDisplayPlaneSurfaceKHR( - m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" ); - - return createResultValueType( - static_cast( result ), - UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); - } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_KHR_display_swapchain === - - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, - const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateSharedSwapchainsKHR( m_device, - swapchainCount, - reinterpret_cast( pCreateInfos ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pSwapchains ) ) ); + return static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + static_cast( m_physicalDevice ), static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - Dispatch const & d ) const + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDevicePresentRectanglesKHR && + "Function requires or " ); +# endif - std::vector swapchains( createInfos.size() ); - VkResult result = d.vkCreateSharedSwapchainsKHR( - m_device, - createInfos.size(), - reinterpret_cast( createInfos.data() ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( swapchains.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); - - return createResultValueType( static_cast( result ), swapchains ); + std::vector rects; + uint32_t rectCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + if ( rectCount < rects.size() ) + { + rects.resize( rectCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( rects ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - SwapchainKHRAllocator & swapchainKHRAllocator, - Dispatch const & d ) const + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDevicePresentRectanglesKHR && + "Function requires or " ); +# endif - std::vector swapchains( createInfos.size(), swapchainKHRAllocator ); - VkResult result = d.vkCreateSharedSwapchainsKHR( - m_device, - createInfos.size(), - reinterpret_cast( createInfos.data() ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( swapchains.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); + std::vector rects( rect2DAllocator ); + uint32_t rectCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + if ( rectCount < rects.size() ) + { + rects.resize( rectCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( rects ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - return createResultValueType( static_cast( result ), swapchains ); + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkAcquireNextImage2KHR( static_cast( m_device ), reinterpret_cast( pAcquireInfo ), pImageIndex ) ); } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireNextImage2KHR && "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; - VkResult result = d.vkCreateSharedSwapchainsKHR( - m_device, - 1, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &swapchain ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); + uint32_t imageIndex; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( &acquireInfo ), &imageIndex ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); - return createResultValueType( static_cast( result ), swapchain ); + return ResultValue( result, std::move( imageIndex ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType, SwapchainKHRAllocator>>::type - Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - Dispatch const & d ) const + //=== VK_KHR_display === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - std::vector swapchains( createInfos.size() ); - VkResult result = d.vkCreateSharedSwapchainsKHR( - m_device, - createInfos.size(), - reinterpret_cast( createInfos.data() ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( swapchains.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); - std::vector, SwapchainKHRAllocator> uniqueSwapchains; - uniqueSwapchains.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( auto const & swapchain : swapchains ) - { - uniqueSwapchains.push_back( UniqueHandle( swapchain, deleter ) ); - } - return createResultValueType( static_cast( result ), std::move( uniqueSwapchains ) ); + return static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } - template >::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType, SwapchainKHRAllocator>>::type - Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - SwapchainKHRAllocator & swapchainKHRAllocator, - Dispatch const & d ) const +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename DisplayPropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPropertiesKHR && "Function requires " ); +# endif - std::vector swapchains( createInfos.size() ); - VkResult result = d.vkCreateSharedSwapchainsKHR( - m_device, - createInfos.size(), - reinterpret_cast( createInfos.data() ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( swapchains.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); - std::vector, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); - uniqueSwapchains.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( auto const & swapchain : swapchains ) + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do { - uniqueSwapchains.push_back( UniqueHandle( swapchain, deleter ) ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); } - return createResultValueType( static_cast( result ), std::move( uniqueSwapchains ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + template < + typename DisplayPropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPropertiesKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; - VkResult result = d.vkCreateSharedSwapchainsKHR( - m_device, - 1, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &swapchain ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" ); - - return createResultValueType( - static_cast( result ), - UniqueHandle( swapchain, ObjectDestroy( *this, allocator, d ) ) ); + std::vector properties( displayPropertiesKHRAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#if defined( VK_USE_PLATFORM_XLIB_KHR ) - //=== VK_KHR_xlib_surface === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateXlibSurfaceKHR( m_instance, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pSurface ) ) ); + return static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename DisplayPlanePropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = - d.vkCreateXlibSurfaceKHR( m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); - - return createResultValueType( static_cast( result ), surface ); + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Instance::createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + template < + typename DisplayPlanePropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = - d.vkCreateXlibSurfaceKHR( m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" ); - - return createResultValueType( - static_cast( result ), - UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + std::vector properties( displayPlanePropertiesKHRAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE Bool32 - PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, + uint32_t * pDisplayCount, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) ); + return static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( + static_cast( m_physicalDevice ), planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 - PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneSupportedDisplaysKHR && "Function requires " ); +# endif - VkBool32 result = d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); - - return static_cast( result ); - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - -#if defined( VK_USE_PLATFORM_XCB_KHR ) - //=== VK_KHR_xcb_surface === - - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateXcbSurfaceKHR( m_instance, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pSurface ) ) ); + std::vector displays; + uint32_t displayCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( + d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + if ( displayCount < displays.size() ) + { + displays.resize( displayCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displays ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneSupportedDisplaysKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = - d.vkCreateXcbSurfaceKHR( m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); - - return createResultValueType( static_cast( result ), surface ); + std::vector displays( displayKHRAllocator ); + uint32_t displayCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( + d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + if ( displayCount < displays.size() ) + { + displays.resize( displayCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displays ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Instance::createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = - d.vkCreateXcbSurfaceKHR( m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" ); - - return createResultValueType( - static_cast( result ), - UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + return static_cast( d.vkGetDisplayModePropertiesKHR( static_cast( m_physicalDevice ), + static_cast( display ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, - xcb_connection_t * connection, - xcb_visualid_t visual_id, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModePropertiesKHR && "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetDisplayModePropertiesKHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, - xcb_connection_t & connection, - xcb_visualid_t visual_id, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModePropertiesKHR && "Function requires " ); +# endif - VkBool32 result = d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); - - return static_cast( result ); + std::vector properties( displayModePropertiesKHRAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetDisplayModePropertiesKHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - -#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) - //=== VK_KHR_wayland_surface === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pSurface ) ) ); + return static_cast( d.vkCreateDisplayModeKHR( static_cast( m_physicalDevice ), + static_cast( display ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMode ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDisplayModeKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = d.vkCreateWaylandSurfaceKHR( - m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast( display ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &mode ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); - return createResultValueType( static_cast( result ), surface ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( mode ) ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Instance::createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDisplayModeKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = d.vkCreateWaylandSurfaceKHR( - m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" ); + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateDisplayModeKHR( m_physicalDevice, + static_cast( display ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &mode ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( mode, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, - struct wl_display * display, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, + uint32_t planeIndex, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) ); + return static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( static_cast( m_physicalDevice ), + static_cast( mode ), + planeIndex, + reinterpret_cast( pCapabilities ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 - PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneCapabilitiesKHR && "Function requires " ); +# endif - VkBool32 result = d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( + m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( &capabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); - return static_cast( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -#if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_KHR_android_surface === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pSurface ) ) ); + return static_cast( d.vkCreateDisplayPlaneSurfaceKHR( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDisplayPlaneSurfaceKHR && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = d.vkCreateAndroidSurfaceKHR( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, - reinterpret_cast( &createInfo ), + reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); - return createResultValueType( static_cast( result ), surface ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE +# ifndef VULKAN_HPP_NO_SMART_HANDLE template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Instance::createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + Instance::createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDisplayPlaneSurfaceKHR && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = d.vkCreateAndroidSurfaceKHR( + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, - reinterpret_cast( &createInfo ), + reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" ); + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_win32_surface === + //=== VK_KHR_display_swapchain === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateWin32SurfaceKHR( m_instance, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pSurface ) ) ); + return static_cast( d.vkCreateSharedSwapchainsKHR( static_cast( m_device ), + swapchainCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSwapchains ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = - d.vkCreateWin32SurfaceKHR( m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); + std::vector swapchains( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); - return createResultValueType( static_cast( result ), surface ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchains ) ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Instance::createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = - d.vkCreateWin32SurfaceKHR( m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" ); + std::vector swapchains( createInfos.size(), swapchainKHRAllocator ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchains ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) ); - } -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function requires " ); +# endif - //=== VK_EXT_debug_report === + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchain ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, SwapchainKHRAllocator>>::type + Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pCallback ) ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function requires " ); +# endif + + std::vector swapchains( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + std::vector, SwapchainKHRAllocator> uniqueSwapchains; + uniqueSwapchains.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & swapchain : swapchains ) + { + uniqueSwapchains.push_back( UniqueHandle( swapchain, deleter ) ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueSwapchains ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, - Optional allocator, - Dispatch const & d ) const + template >::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, SwapchainKHRAllocator>>::type + Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; - VkResult result = d.vkCreateDebugReportCallbackEXT( - m_instance, - reinterpret_cast( &createInfo ), + std::vector swapchains( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &callback ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); - - return createResultValueType( static_cast( result ), callback ); + reinterpret_cast( swapchains.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + std::vector, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); + uniqueSwapchains.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & swapchain : swapchains ) + { + uniqueSwapchains.push_back( UniqueHandle( swapchain, deleter ) ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueSwapchains ) ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Instance::createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSharedSwapchainsKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; - VkResult result = d.vkCreateDebugReportCallbackEXT( - m_instance, - reinterpret_cast( &createInfo ), + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSharedSwapchainsKHR( + m_device, + 1, + reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &callback ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" ); + reinterpret_cast( &swapchain ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( callback, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( swapchain, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === template - VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDebugReportCallbackEXT( - m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); + return static_cast( d.vkCreateXlibSurfaceKHR( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateXlibSurfaceKHR && "Function requires " ); +# endif - d.vkDestroyDebugReportCallbackEXT( - m_instance, - static_cast( callback ), - reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); - template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDebugReportCallbackEXT( - m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateXlibSurfaceKHR && "Function requires " ); +# endif - d.vkDestroyDebugReportCallbackEXT( - m_instance, - static_cast( callback ), - reinterpret_cast( static_cast( allocator ) ) ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateXlibSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, - uint64_t object, - size_t location, - int32_t messageCode, - const char * pLayerPrefix, - const char * pMessage, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE Bool32 + PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDebugReportMessageEXT( m_instance, - static_cast( flags ), - static_cast( objectType ), - object, - location, - messageCode, - pLayerPrefix, - pMessage ); + return static_cast( + d.vkGetPhysicalDeviceXlibPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, dpy, visualID ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, - uint64_t object, - size_t location, - int32_t messageCode, - const std::string & layerPrefix, - const std::string & message, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceXlibPresentationSupportKHR && + "Function requires " ); +# endif - d.vkDebugReportMessageEXT( m_instance, - static_cast( flags ), - static_cast( objectType ), - object, - location, - messageCode, - layerPrefix.c_str(), - message.c_str() ); + VkBool32 result = d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); + + return static_cast( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - //=== VK_EXT_debug_marker === +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + return static_cast( d.vkCreateXcbSurfaceKHR( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateXcbSurfaceKHR && "Function requires " ); +# endif - VkResult result = d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateXcbSurfaceKHR && "Function requires " ); +# endif - VkResult result = d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateXcbSurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + return static_cast( + d.vkGetPhysicalDeviceXcbPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, connection, visual_id ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, + xcb_connection_t & connection, + xcb_visualid_t visual_id, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceXcbPresentationSupportKHR && + "Function requires " ); +# endif - d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + VkBool32 result = d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); + + return static_cast( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); + return static_cast( d.vkCreateWaylandSurfaceKHR( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateWaylandSurfaceKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateWaylandSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateWaylandSurfaceKHR && "Function requires " ); +# endif - d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateWaylandSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" ); -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_queue === + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile, - VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, + struct wl_display * display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( - m_physicalDevice, reinterpret_cast( pVideoProfile ), reinterpret_cast( pCapabilities ) ) ); + return static_cast( + d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, display ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR && + "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; - VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR( - m_physicalDevice, reinterpret_cast( &videoProfile ), reinterpret_cast( &capabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + VkBool32 result = d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); - return createResultValueType( static_cast( result ), capabilities ); + return static_cast( result ); } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get(); - VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR( - m_physicalDevice, reinterpret_cast( &videoProfile ), reinterpret_cast( &capabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); - - return createResultValueType( static_cast( result ), structureChain ); - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, - uint32_t * pVideoFormatPropertyCount, - VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, - reinterpret_cast( pVideoFormatInfo ), - pVideoFormatPropertyCount, - reinterpret_cast( pVideoFormatProperties ) ) ); + return static_cast( d.vkCreateAndroidSurfaceKHR( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAndroidSurfaceKHR && "Function requires " ); +# endif - std::vector videoFormatProperties; - uint32_t videoFormatPropertyCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( - m_physicalDevice, reinterpret_cast( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount ) - { - videoFormatProperties.resize( videoFormatPropertyCount ); - result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, - reinterpret_cast( &videoFormatInfo ), - &videoFormatPropertyCount, - reinterpret_cast( videoFormatProperties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); - VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); - if ( videoFormatPropertyCount < videoFormatProperties.size() ) - { - videoFormatProperties.resize( videoFormatPropertyCount ); - } - return createResultValueType( static_cast( result ), videoFormatProperties ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateAndroidSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, - VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, - Dispatch const & d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAndroidSurfaceKHR && "Function requires " ); +# endif - std::vector videoFormatProperties( videoFormatPropertiesKHRAllocator ); - uint32_t videoFormatPropertyCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( - m_physicalDevice, reinterpret_cast( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount ) - { - videoFormatProperties.resize( videoFormatPropertyCount ); - result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, - reinterpret_cast( &videoFormatInfo ), - &videoFormatPropertyCount, - reinterpret_cast( videoFormatProperties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); - VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); - if ( videoFormatPropertyCount < videoFormatProperties.size() ) - { - videoFormatProperties.resize( videoFormatPropertyCount ); - } - return createResultValueType( static_cast( result ), videoFormatProperties ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateAndroidSurfaceKHR( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateVideoSessionKHR( m_device, - reinterpret_cast( pCreateInfo ), + return static_cast( d.vkCreateWin32SurfaceKHR( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), - reinterpret_cast( pVideoSession ) ) ); + reinterpret_cast( pSurface ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateWin32SurfaceKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; - VkResult result = - d.vkCreateVideoSessionKHR( m_device, - reinterpret_cast( &createInfo ), + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &videoSession ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); - return createResultValueType( static_cast( result ), videoSession ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateWin32SurfaceKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; - VkResult result = - d.vkCreateVideoSessionKHR( m_device, - reinterpret_cast( &createInfo ), + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateWin32SurfaceKHR( m_instance, + reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &videoSession ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" ); + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( videoSession, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyVideoSessionKHR( m_device, static_cast( videoSession ), reinterpret_cast( pAllocator ) ); + return static_cast( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex ) ); } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkDestroyVideoSessionKHR( - m_device, - static_cast( videoSession ), - reinterpret_cast( static_cast( allocator ) ) ); + return static_cast( d.vkCreateDebugReportCallbackEXT( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pCallback ) ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyVideoSessionKHR( m_device, static_cast( videoSession ), reinterpret_cast( pAllocator ) ); - } +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDebugReportCallbackEXT && "Function requires " ); +# endif -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDebugReportCallbackEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &callback ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); - d.vkDestroyVideoSessionKHR( - m_device, - static_cast( videoSession ), - reinterpret_cast( static_cast( allocator ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( callback ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, - uint32_t * pMemoryRequirementsCount, - VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( m_device, - static_cast( videoSession ), - pMemoryRequirementsCount, - reinterpret_cast( pMemoryRequirements ) ) ); - } +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDebugReportCallbackEXT && "Function requires " ); +# endif -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDebugReportCallbackEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &callback ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" ); - std::vector memoryRequirements; - uint32_t memoryRequirementsCount; - VkResult result; - do - { - result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast( videoSession ), &memoryRequirementsCount, nullptr ); - if ( ( result == VK_SUCCESS ) && memoryRequirementsCount ) - { - memoryRequirements.resize( memoryRequirementsCount ); - result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, - static_cast( videoSession ), - &memoryRequirementsCount, - reinterpret_cast( memoryRequirements.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" ); - VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); - if ( memoryRequirementsCount < memoryRequirements.size() ) - { - memoryRequirements.resize( memoryRequirementsCount ); - } - return createResultValueType( static_cast( result ), memoryRequirements ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( callback, detail::ObjectDestroy( *this, allocator, d ) ) ); } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, - VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator, - Dispatch const & d ) const + template + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - std::vector memoryRequirements( - videoSessionMemoryRequirementsKHRAllocator ); - uint32_t memoryRequirementsCount; - VkResult result; - do - { - result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast( videoSession ), &memoryRequirementsCount, nullptr ); - if ( ( result == VK_SUCCESS ) && memoryRequirementsCount ) - { - memoryRequirements.resize( memoryRequirementsCount ); - result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, - static_cast( videoSession ), - &memoryRequirementsCount, - reinterpret_cast( memoryRequirements.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" ); - VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); - if ( memoryRequirementsCount < memoryRequirements.size() ) - { - memoryRequirements.resize( memoryRequirementsCount ); - } - return createResultValueType( static_cast( result ), memoryRequirements ); + d.vkDestroyDebugReportCallbackEXT( + static_cast( m_instance ), static_cast( callback ), reinterpret_cast( pAllocator ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, - uint32_t bindSessionMemoryInfoCount, - const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkBindVideoSessionMemoryKHR( m_device, - static_cast( videoSession ), - bindSessionMemoryInfoCount, - reinterpret_cast( pBindSessionMemoryInfos ) ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDebugReportCallbackEXT && "Function requires " ); +# endif + + d.vkDestroyDebugReportCallbackEXT( + m_instance, + static_cast( callback ), + reinterpret_cast( static_cast( allocator ) ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindVideoSessionMemoryKHR( - VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, - VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos, - Dispatch const & d ) const + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VkResult result = d.vkBindVideoSessionMemoryKHR( m_device, - static_cast( videoSession ), - bindSessionMemoryInfos.size(), - reinterpret_cast( bindSessionMemoryInfos.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); - - return createResultValueType( static_cast( result ) ); + d.vkDestroyDebugReportCallbackEXT( + static_cast( m_instance ), static_cast( callback ), reinterpret_cast( pAllocator ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateVideoSessionParametersKHR( m_device, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pVideoSessionParameters ) ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDebugReportCallbackEXT && "Function requires " ); +# endif + + d.vkDestroyDebugReportCallbackEXT( + m_instance, + static_cast( callback ), + reinterpret_cast( static_cast( allocator ) ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; - VkResult result = d.vkCreateVideoSessionParametersKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &videoSessionParameters ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); - - return createResultValueType( static_cast( result ), videoSessionParameters ); + d.vkDebugReportMessageEXT( static_cast( m_instance ), + static_cast( flags ), + static_cast( objectType_ ), + object, + location, + messageCode, + pLayerPrefix, + pMessage ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDebugReportMessageEXT && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; - VkResult result = d.vkCreateVideoSessionParametersKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &videoSessionParameters ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" ); - - return createResultValueType( static_cast( result ), - UniqueHandle( - videoSessionParameters, ObjectDestroy( *this, allocator, d ) ) ); + d.vkDebugReportMessageEXT( m_instance, + static_cast( flags ), + static_cast( objectType_ ), + object, + location, + messageCode, + layerPrefix.c_str(), + message.c_str() ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_debug_marker === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, - const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkUpdateVideoSessionParametersKHR( m_device, - static_cast( videoSessionParameters ), - reinterpret_cast( pUpdateInfo ) ) ); + return static_cast( + d.vkDebugMarkerSetObjectTagEXT( static_cast( m_device ), reinterpret_cast( pTagInfo ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, - const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo, - Dispatch const & d ) const + Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDebugMarkerSetObjectTagEXT && "Function requires " ); +# endif - VkResult result = d.vkUpdateVideoSessionParametersKHR( m_device, - static_cast( videoSessionParameters ), - reinterpret_cast( &updateInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyVideoSessionParametersKHR( - m_device, static_cast( videoSessionParameters ), reinterpret_cast( pAllocator ) ); + return static_cast( + d.vkDebugMarkerSetObjectNameEXT( static_cast( m_device ), reinterpret_cast( pNameInfo ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDebugMarkerSetObjectNameEXT && "Function requires " ); +# endif - d.vkDestroyVideoSessionParametersKHR( - m_device, - static_cast( videoSessionParameters ), - reinterpret_cast( static_cast( allocator ) ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyVideoSessionParametersKHR( - m_device, static_cast( videoSessionParameters ), reinterpret_cast( pAllocator ) ); + d.vkCmdDebugMarkerBeginEXT( static_cast( m_commandBuffer ), reinterpret_cast( pMarkerInfo ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDebugMarkerBeginEXT && "Function requires " ); +# endif - d.vkDestroyVideoSessionParametersKHR( - m_device, - static_cast( videoSessionParameters ), - reinterpret_cast( static_cast( allocator ) ) ); + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast( pBeginInfo ) ); + d.vkCmdDebugMarkerEndEXT( static_cast( m_commandBuffer ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast( &beginInfo ) ); + d.vkCmdDebugMarkerInsertEXT( static_cast( m_commandBuffer ), reinterpret_cast( pMarkerInfo ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast( pEndCodingInfo ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDebugMarkerInsertEXT && "Function requires " ); +# endif + + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_queue === -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile, + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast( &endCodingInfo ) ); + return static_cast( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( pVideoProfile ), + reinterpret_cast( pCapabilities ) ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast( pCodingControlInfo ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoCapabilitiesKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( + m_physicalDevice, reinterpret_cast( &videoProfile ), reinterpret_cast( &capabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoCapabilitiesKHR && "Function requires " ); +# endif - d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast( &codingControlInfo ) ); - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( + m_physicalDevice, reinterpret_cast( &videoProfile ), reinterpret_cast( &capabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_decode_queue === + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast( pFrameInfo ) ); + return static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( pVideoFormatInfo ), + pVideoFormatPropertyCount, + reinterpret_cast( pVideoFormatProperties ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & frameInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); +# endif - d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast( &frameInfo ) ); + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = static_cast( + d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoFormatProperties ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - //=== VK_EXT_transform_feedback === - template - VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, - uint32_t bindingCount, - const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, - const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, - const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, - firstBinding, - bindingCount, - reinterpret_cast( pBuffers ), - reinterpret_cast( pOffsets ), - reinterpret_cast( pSizes ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); +# endif + + std::vector videoFormatProperties( videoFormatPropertiesKHRAllocator ); + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = static_cast( + d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoFormatProperties ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void - CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, - VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, - VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); - VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); -# else - if ( buffers.size() != offsets.size() ) +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); +# endif + + std::vector structureChains; + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + structureChains.resize( videoFormatPropertyCount ); + videoFormatProperties.resize( videoFormatPropertyCount ); + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + videoFormatProperties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + structureChains.resize( videoFormatPropertyCount ); } - if ( !sizes.empty() && buffers.size() != sizes.size() ) + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); + structureChains[i].template get() = videoFormatProperties[i]; } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - - d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, - firstBinding, - buffers.size(), - reinterpret_cast( buffers.data() ), - reinterpret_cast( offsets.data() ), - reinterpret_cast( sizes.data() ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, - uint32_t counterBufferCount, - const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, - const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, - firstCounterBuffer, - counterBufferCount, - reinterpret_cast( pCounterBuffers ), - reinterpret_cast( pCounterBufferOffsets ) ); - } +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); +# endif -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void - CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, - VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); -# else - if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + std::vector structureChains( structureChainAllocator ); + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + structureChains.resize( videoFormatPropertyCount ); + videoFormatProperties.resize( videoFormatPropertyCount ); + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + videoFormatProperties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + structureChains.resize( videoFormatPropertyCount ); } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - - d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, - firstCounterBuffer, - counterBuffers.size(), - reinterpret_cast( counterBuffers.data() ), - reinterpret_cast( counterBufferOffsets.data() ) ); + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + structureChains[i].template get() = videoFormatProperties[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, - uint32_t counterBufferCount, - const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, - const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, - firstCounterBuffer, - counterBufferCount, - reinterpret_cast( pCounterBuffers ), - reinterpret_cast( pCounterBufferOffsets ) ); + return static_cast( d.vkCreateVideoSessionKHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pVideoSession ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void - CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, - VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); -# else - if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateVideoSessionKHR && "Function requires " ); +# endif - d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, - firstCounterBuffer, - counterBuffers.size(), - reinterpret_cast( counterBuffers.data() ), - reinterpret_cast( counterBufferOffsets.data() ) ); + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &videoSession ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoSession ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t query, - VULKAN_HPP_NAMESPACE::QueryControlFlags flags, - uint32_t index, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateVideoSessionKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateVideoSessionKHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &videoSession ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( videoSession, detail::ObjectDestroy( *this, allocator, d ) ) ); } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void - CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); + d.vkDestroyVideoSessionKHR( + static_cast( m_device ), static_cast( videoSession ), reinterpret_cast( pAllocator ) ); } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, - uint32_t firstInstance, - VULKAN_HPP_NAMESPACE::Buffer counterBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, - uint32_t counterOffset, - uint32_t vertexStride, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, - instanceCount, - firstInstance, - static_cast( counterBuffer ), - static_cast( counterBufferOffset ), - counterOffset, - vertexStride ); - } +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionKHR && "Function requires " ); +# endif - //=== VK_NVX_binary_import === + d.vkDestroyVideoSessionKHR( + m_device, + static_cast( videoSession ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateCuModuleNVX( m_device, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pModule ) ) ); + d.vkDestroyVideoSessionKHR( + static_cast( m_device ), static_cast( videoSession ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::CuModuleNVX module; - VkResult result = - d.vkCreateCuModuleNVX( m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &module ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); - - return createResultValueType( static_cast( result ), module ); + d.vkDestroyVideoSessionKHR( + m_device, + static_cast( videoSession ), + reinterpret_cast( static_cast( allocator ) ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t * pMemoryRequirementsCount, + VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( static_cast( m_device ), + static_cast( videoSession ), + pMemoryRequirementsCount, + reinterpret_cast( pMemoryRequirements ) ) ); + } - VULKAN_HPP_NAMESPACE::CuModuleNVX module; - VkResult result = - d.vkCreateCuModuleNVX( m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &module ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetVideoSessionMemoryRequirementsKHR && "Function requires " ); +# endif + + std::vector memoryRequirements; + uint32_t memoryRequirementsCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast( videoSession ), &memoryRequirementsCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && memoryRequirementsCount ) + { + memoryRequirements.resize( memoryRequirementsCount ); + result = static_cast( + d.vkGetVideoSessionMemoryRequirementsKHR( m_device, + static_cast( videoSession ), + &memoryRequirementsCount, + reinterpret_cast( memoryRequirements.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + + VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); + if ( memoryRequirementsCount < memoryRequirements.size() ) + { + memoryRequirements.resize( memoryRequirementsCount ); + } + return memoryRequirements; + } + + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetVideoSessionMemoryRequirementsKHR && "Function requires " ); +# endif + + std::vector memoryRequirements( + videoSessionMemoryRequirementsKHRAllocator ); + uint32_t memoryRequirementsCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast( videoSession ), &memoryRequirementsCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && memoryRequirementsCount ) + { + memoryRequirements.resize( memoryRequirementsCount ); + result = static_cast( + d.vkGetVideoSessionMemoryRequirementsKHR( m_device, + static_cast( videoSession ), + &memoryRequirementsCount, + reinterpret_cast( memoryRequirements.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - return createResultValueType( static_cast( result ), - UniqueHandle( module, ObjectDestroy( *this, allocator, d ) ) ); + VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); + if ( memoryRequirementsCount < memoryRequirements.size() ) + { + memoryRequirements.resize( memoryRequirementsCount ); + } + return memoryRequirements; } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t bindSessionMemoryInfoCount, + const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateCuFunctionNVX( m_device, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pFunction ) ) ); + return static_cast( d.vkBindVideoSessionMemoryKHR( static_cast( m_device ), + static_cast( videoSession ), + bindSessionMemoryInfoCount, + reinterpret_cast( pBindSessionMemoryInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindVideoSessionMemoryKHR( + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindVideoSessionMemoryKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::CuFunctionNVX function; - VkResult result = - d.vkCreateCuFunctionNVX( m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &function ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkBindVideoSessionMemoryKHR( m_device, + static_cast( videoSession ), + bindSessionMemoryInfos.size(), + reinterpret_cast( bindSessionMemoryInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); - return createResultValueType( static_cast( result ), function ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::CuFunctionNVX function; - VkResult result = - d.vkCreateCuFunctionNVX( m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &function ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" ); - - return createResultValueType( - static_cast( result ), - UniqueHandle( function, ObjectDestroy( *this, allocator, d ) ) ); + return static_cast( d.vkCreateVideoSessionParametersKHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pVideoSessionParameters ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCuModuleNVX( m_device, static_cast( module ), reinterpret_cast( pAllocator ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateVideoSessionParametersKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateVideoSessionParametersKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &videoSessionParameters ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoSessionParameters ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateVideoSessionParametersKHR && "Function requires " ); +# endif - d.vkDestroyCuModuleNVX( m_device, - static_cast( module ), - reinterpret_cast( static_cast( allocator ) ) ); + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateVideoSessionParametersKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &videoSessionParameters ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + videoSessionParameters, detail::ObjectDestroy( *this, allocator, d ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCuModuleNVX( m_device, static_cast( module ), reinterpret_cast( pAllocator ) ); + return static_cast( d.vkUpdateVideoSessionParametersKHR( static_cast( m_device ), + static_cast( videoSessionParameters ), + reinterpret_cast( pUpdateInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateVideoSessionParametersKHR && "Function requires " ); +# endif - d.vkDestroyCuModuleNVX( m_device, - static_cast( module ), - reinterpret_cast( static_cast( allocator ) ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkUpdateVideoSessionParametersKHR( m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( &updateInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCuFunctionNVX( m_device, static_cast( function ), reinterpret_cast( pAllocator ) ); + d.vkDestroyVideoSessionParametersKHR( static_cast( m_device ), + static_cast( videoSessionParameters ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionParametersKHR && "Function requires " ); +# endif - d.vkDestroyCuFunctionNVX( m_device, - static_cast( function ), - reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCuFunctionNVX( m_device, static_cast( function ), reinterpret_cast( pAllocator ) ); + d.vkDestroyVideoSessionParametersKHR( static_cast( m_device ), + static_cast( videoSessionParameters ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, Optional allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyVideoSessionParametersKHR && "Function requires " ); +# endif - d.vkDestroyCuFunctionNVX( m_device, - static_cast( function ), - reinterpret_cast( static_cast( allocator ) ) ); + d.vkDestroyVideoSessionParametersKHR( + m_device, + static_cast( videoSessionParameters ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast( pLaunchInfo ) ); + d.vkCmdBeginVideoCodingKHR( static_cast( m_commandBuffer ), reinterpret_cast( pBeginInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginVideoCodingKHR && "Function requires " ); +# endif - d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast( &launchInfo ) ); + d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast( &beginInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_NVX_image_view_handle === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { + VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( pInfo ) ); + d.vkCmdEndVideoCodingKHR( static_cast( m_commandBuffer ), reinterpret_cast( pEndCodingInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEndVideoCodingKHR && "Function requires " ); +# endif - uint32_t result = d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( &info ) ); - - return result; + d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast( &endCodingInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, - VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetImageViewAddressNVX( m_device, static_cast( imageView ), reinterpret_cast( pProperties ) ) ); + d.vkCmdControlVideoCodingKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pCodingControlInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdControlVideoCodingKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; - VkResult result = - d.vkGetImageViewAddressNVX( m_device, static_cast( imageView ), reinterpret_cast( &properties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); - - return createResultValueType( static_cast( result ), properties ); + d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast( &codingControlInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_AMD_draw_indirect_count === + //=== VK_KHR_video_decode_queue === template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirectCountAMD( m_commandBuffer, - static_cast( buffer ), - static_cast( offset ), - static_cast( countBuffer ), - static_cast( countBufferOffset ), - maxDrawCount, - stride ); + d.vkCmdDecodeVideoKHR( static_cast( m_commandBuffer ), reinterpret_cast( pDecodeInfo ) ); } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, - static_cast( buffer ), - static_cast( offset ), - static_cast( countBuffer ), - static_cast( countBufferOffset ), - maxDrawCount, - stride ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDecodeVideoKHR && "Function requires " ); +# endif + + d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast( &decodeInfo ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_AMD_shader_info === + //=== VK_EXT_transform_feedback === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, - VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, - size_t * pInfoSize, - void * pInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetShaderInfoAMD( m_device, - static_cast( pipeline ), - static_cast( shaderStage ), - static_cast( infoType ), - pInfoSize, - pInfo ) ); + d.vkCmdBindTransformFeedbackBuffersEXT( static_cast( m_commandBuffer ), + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ), + reinterpret_cast( pSizes ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, - VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, - Dispatch const & d ) const + template + VULKAN_HPP_INLINE void + CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - std::vector info; - size_t infoSize; - VkResult result; - do +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindTransformFeedbackBuffersEXT && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); +# else + if ( buffers.size() != offsets.size() ) { - result = d.vkGetShaderInfoAMD( m_device, - static_cast( pipeline ), - static_cast( shaderStage ), - static_cast( infoType ), - &infoSize, - nullptr ); - if ( ( result == VK_SUCCESS ) && infoSize ) - { - info.resize( infoSize ); - result = d.vkGetShaderInfoAMD( m_device, - static_cast( pipeline ), - static_cast( shaderStage ), - static_cast( infoType ), - &infoSize, - reinterpret_cast( info.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); - VULKAN_HPP_ASSERT( infoSize <= info.size() ); - if ( infoSize < info.size() ) + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) { - info.resize( infoSize ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); } - return createResultValueType( static_cast( result ), info ); +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, - VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, - Uint8_tAllocator & uint8_tAllocator, - Dispatch const & d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginTransformFeedbackEXT( static_cast( m_commandBuffer ), + firstCounterBuffer, + counterBufferCount, + reinterpret_cast( pCounterBuffers ), + reinterpret_cast( pCounterBufferOffsets ) ); + } - std::vector info( uint8_tAllocator ); - size_t infoSize; - VkResult result; - do - { - result = d.vkGetShaderInfoAMD( m_device, - static_cast( pipeline ), - static_cast( shaderStage ), - static_cast( infoType ), - &infoSize, - nullptr ); - if ( ( result == VK_SUCCESS ) && infoSize ) - { - info.resize( infoSize ); - result = d.vkGetShaderInfoAMD( m_device, - static_cast( pipeline ), - static_cast( shaderStage ), - static_cast( infoType ), - &infoSize, - reinterpret_cast( info.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); - VULKAN_HPP_ASSERT( infoSize <= info.size() ); - if ( infoSize < info.size() ) +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginTransformFeedbackEXT && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) { - info.resize( infoSize ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); } - return createResultValueType( static_cast( result ), info ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - //=== VK_KHR_dynamic_rendering === + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast( pRenderingInfo ) ); + d.vkCmdEndTransformFeedbackEXT( static_cast( m_commandBuffer ), + firstCounterBuffer, + counterBufferCount, + reinterpret_cast( pCounterBuffers ), + reinterpret_cast( pCounterBufferOffsets ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEndTransformFeedbackEXT && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast( &renderingInfo ) ); + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndRenderingKHR( m_commandBuffer ); + d.vkCmdBeginQueryIndexedEXT( + static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ), index ); } -#if defined( VK_USE_PLATFORM_GGP ) - //=== VK_GGP_stream_descriptor_surface === - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pSurface ) ) ); + d.vkCmdEndQueryIndexedEXT( static_cast( m_commandBuffer ), static_cast( queryPool ), query, index ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = d.vkCreateStreamDescriptorSurfaceGGP( - m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); - - return createResultValueType( static_cast( result ), surface ); + d.vkCmdDrawIndirectByteCountEXT( static_cast( m_commandBuffer ), + instanceCount, + firstInstance, + static_cast( counterBuffer ), + static_cast( counterBufferOffset ), + counterOffset, + vertexStride ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE + //=== VK_NVX_binary_import === + template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Instance::createStreamDescriptorSurfaceGGPUnique( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = d.vkCreateStreamDescriptorSurfaceGGP( - m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" ); - - return createResultValueType( - static_cast( result ), - UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + return static_cast( d.vkCreateCuModuleNVX( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pModule ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_GGP*/ - - //=== VK_NV_external_memory_capabilities === +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, - VULKAN_HPP_NAMESPACE::ImageType type, - VULKAN_HPP_NAMESPACE::ImageTiling tiling, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, - VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, - static_cast( format ), - static_cast( type ), - static_cast( tiling ), - static_cast( usage ), - static_cast( flags ), - static_cast( externalHandleType ), - reinterpret_cast( pExternalImageFormatProperties ) ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCuModuleNVX && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::CuModuleNVX module; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCuModuleNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( module ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, - VULKAN_HPP_NAMESPACE::ImageType type, - VULKAN_HPP_NAMESPACE::ImageTiling tiling, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCuModuleNVX && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; - VkResult result = - d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, - static_cast( format ), - static_cast( type ), - static_cast( tiling ), - static_cast( usage ), - static_cast( flags ), - static_cast( externalHandleType ), - reinterpret_cast( &externalImageFormatProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + VULKAN_HPP_NAMESPACE::CuModuleNVX module; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCuModuleNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" ); - return createResultValueType( static_cast( result ), externalImageFormatProperties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( module, detail::ObjectDestroy( *this, allocator, d ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_external_memory_win32 === +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, - HANDLE * pHandle, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), pHandle ) ); + return static_cast( d.vkCreateCuFunctionNVX( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFunction ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandleNV( - VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCuFunctionNVX && "Function requires " ); +# endif - HANDLE handle; - VkResult result = - d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), &handle ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); + VULKAN_HPP_NAMESPACE::CuFunctionNVX function; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); - return createResultValueType( static_cast( result ), handle ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( function ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_get_physical_device_properties2 === +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCuFunctionNVX && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::CuFunctionNVX function; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCuFunctionNVX( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( function, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( pFeatures ) ); + d.vkDestroyCuModuleNVX( + static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 - PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCuModuleNVX && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; - d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); - - return features; + d.vkDestroyCuModuleNVX( m_device, + static_cast( module ), + reinterpret_cast( static_cast( allocator ) ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuModuleNVX( + static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); + } - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get(); - d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCuModuleNVX && "Function requires " ); +# endif - return structureChain; + d.vkDestroyCuModuleNVX( m_device, + static_cast( module ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( pProperties ) ); + d.vkDestroyCuFunctionNVX( + static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 - PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCuFunctionNVX && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; - d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); - - return properties; + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( static_cast( allocator ) ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyCuFunctionNVX( + static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); + } - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get(); - d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCuFunctionNVX && "Function requires " ); +# endif - return structureChain; + d.vkDestroyCuFunctionNVX( m_device, + static_cast( function ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, - VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceFormatProperties2KHR( - m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + d.vkCmdCuLaunchKernelNVX( static_cast( m_commandBuffer ), reinterpret_cast( pLaunchInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 - PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCuLaunchKernelNVX && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; - d.vkGetPhysicalDeviceFormatProperties2KHR( - m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast( &launchInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - return formatProperties; + //=== VK_NVX_image_view_handle === + + template + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetImageViewHandleNVX( static_cast( m_device ), reinterpret_cast( pInfo ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageViewHandleNVX && "Function requires " ); +# endif - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get(); - d.vkGetPhysicalDeviceFormatProperties2KHR( - m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + uint32_t result = d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( &info ) ); - return structureChain; + return result; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, - VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE uint64_t Device::getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, - reinterpret_cast( pImageFormatInfo ), - reinterpret_cast( pImageFormatProperties ) ) ); + return d.vkGetImageViewHandle64NVX( static_cast( m_device ), reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + VULKAN_HPP_INLINE uint64_t Device::getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageViewHandle64NVX && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; - VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, - reinterpret_cast( &imageFormatInfo ), - reinterpret_cast( &imageFormatProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + uint64_t result = d.vkGetImageViewHandle64NVX( m_device, reinterpret_cast( &info ) ); - return createResultValueType( static_cast( result ), imageFormatProperties ); + return result; } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get(); - VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, - reinterpret_cast( &imageFormatInfo ), - reinterpret_cast( &imageFormatProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); - - return createResultValueType( static_cast( result ), structureChain ); + return static_cast( d.vkGetImageViewAddressNVX( + static_cast( m_device ), static_cast( imageView ), reinterpret_cast( pProperties ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, - VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( - m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); - } +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageViewAddressNVX && "Function requires " ); +# endif -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetImageViewAddressNVX( m_device, static_cast( imageView ), reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( - m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); - if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) - { - queueFamilyProperties.resize( queueFamilyPropertyCount ); - } - return queueFamilyProperties; + //=== VK_AMD_draw_indirect_count === + + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndirectCountAMD( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDrawIndexedIndirectCountAMD( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } - std::vector queueFamilyProperties( queueFamilyProperties2Allocator ); - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( - m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + //=== VK_AMD_shader_info === - VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); - if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) - { - queueFamilyProperties.resize( queueFamilyPropertyCount ); - } - return queueFamilyProperties; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetShaderInfoAMD( static_cast( m_device ), + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + pInfoSize, + pInfo ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderInfoAMD && "Function requires " ); +# endif - std::vector structureChains; - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - structureChains.resize( queueFamilyPropertyCount ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) - { - queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; - } - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( - m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - - VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); - if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + std::vector info; + size_t infoSize; + VULKAN_HPP_NAMESPACE::Result result; + do { - structureChains.resize( queueFamilyPropertyCount ); - } - for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + if ( infoSize < info.size() ) { - structureChains[i].template get() = queueFamilyProperties[i]; + info.resize( infoSize ); } - return structureChains; + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( info ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderInfoAMD && "Function requires " ); +# endif - std::vector structureChains( structureChainAllocator ); - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); - structureChains.resize( queueFamilyPropertyCount ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) - { - queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; - } - d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( - m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); - - VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); - if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + std::vector info( uint8_tAllocator ); + size_t infoSize; + VULKAN_HPP_NAMESPACE::Result result; + do { - structureChains.resize( queueFamilyPropertyCount ); - } - for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, + static_cast( pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + if ( infoSize < info.size() ) { - structureChains[i].template get() = queueFamilyProperties[i]; + info.resize( infoSize ); } - return structureChains; + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( info ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_dynamic_rendering === template - VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + d.vkCmdBeginRenderingKHR( static_cast( m_commandBuffer ), reinterpret_cast( pRenderingInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 - PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginRenderingKHR && "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; - d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); - - return memoryProperties; + d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast( &renderingInfo ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderingKHR( static_cast( m_commandBuffer ) ); + } - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = - structureChain.template get(); - d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === - return structureChain; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateStreamDescriptorSurfaceGGP( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, - uint32_t * pPropertyCount, - VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, - reinterpret_cast( pFormatInfo ), - pPropertyCount, - reinterpret_cast( pProperties ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateStreamDescriptorSurfaceGGP && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, - Dispatch const & d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createStreamDescriptorSurfaceGGPUnique( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateStreamDescriptorSurfaceGGP && "Function requires " ); +# endif - std::vector properties; - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( - m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); - properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, - reinterpret_cast( &formatInfo ), - &propertyCount, - reinterpret_cast( properties.data() ) ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return properties; + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( pExternalImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( &externalImageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( externalImageFormatProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryWin32HandleNV( + static_cast( m_device ), static_cast( memory ), static_cast( handleType ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandleNV( + VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandleNV && "Function requires " ); +# endif + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFeatures2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 + PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + + return features; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFeatures2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( &features ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceProperties2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 + PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); + + return properties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceProperties2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get(); + d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( &properties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceFormatProperties2KHR( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return formatProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFormatProperties2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get(); + d.vkGetPhysicalDeviceFormatProperties2KHR( + m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( pImageFormatInfo ), + reinterpret_cast( pImageFormatProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceImageFormatProperties2KHR && + "Function requires or " ); +# endif + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, + VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + static_cast( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename QueueFamilyProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function requires or " ); +# endif + + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + template < + typename QueueFamilyProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function requires or " ); +# endif + + std::vector queueFamilyProperties( queueFamilyProperties2Allocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function requires or " ); +# endif + + std::vector structureChains; + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get() = queueFamilyProperties[i]; + } + return structureChains; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function requires or " ); +# endif + + std::vector structureChains( structureChainAllocator ); + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( + m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get() = queueFamilyProperties[i]; + } + return structureChains; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMemoryProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( pMemoryProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + + return memoryProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMemoryProperties2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( pFormatInfo ), + pPropertyCount, + reinterpret_cast( pProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR && + "Function requires or " ); +# endif + + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, + SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR && + "Function requires or " ); +# endif + + std::vector properties( sparseImageFormatProperties2Allocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_device_group === + + template + VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( + static_cast( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( + uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceGroupPeerMemoryFeaturesKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeaturesKHR( + m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); + + return peerMemoryFeatures; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDeviceMaskKHR( static_cast( m_commandBuffer ), deviceMask ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchBaseKHR( static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateViSurfaceNN( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateViSurfaceNN && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateViSurfaceNN && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateViSurfaceNN( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + + template + VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkTrimCommandPoolKHR( static_cast( m_device ), static_cast( commandPool ), static_cast( flags ) ); + } + + //=== VK_KHR_device_group_creation === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( static_cast( m_instance ), + pPhysicalDeviceGroupCount, + reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroupsKHR && + "Function requires or " ); +# endif + + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); + } + + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Instance::enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceGroupsKHR && + "Function requires or " ); +# endif + + std::vector physicalDeviceGroupProperties( + physicalDeviceGroupPropertiesAllocator ); + uint32_t physicalDeviceGroupCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( + m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_external_memory_capabilities === + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( pExternalBufferInfo ), + reinterpret_cast( pExternalBufferProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalBufferPropertiesKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + + return externalBufferProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandleKHR && "Function requires " ); +# endif + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( static_cast( m_device ), + static_cast( handleType ), + handle, + reinterpret_cast( pMemoryWin32HandleProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryWin32HandlePropertiesKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryWin32HandlePropertiesKHR( m_device, + static_cast( handleType ), + handle, + reinterpret_cast( &memoryWin32HandleProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryWin32HandleProperties ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryFdKHR( static_cast( m_device ), reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryFdKHR && "Function requires " ); +# endif + + int fd; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryFdPropertiesKHR( static_cast( m_device ), + static_cast( handleType ), + fd, + reinterpret_cast( pMemoryFdProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryFdPropertiesKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryFdPropertiesKHR( + m_device, static_cast( handleType ), fd, reinterpret_cast( &memoryFdProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryFdProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_external_semaphore_capabilities === + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( pExternalSemaphoreInfo ), + reinterpret_cast( pExternalSemaphoreProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties + PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + + return externalSemaphoreProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkImportSemaphoreWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkImportSemaphoreWin32HandleKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSemaphoreWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSemaphoreWin32HandleKHR && "Function requires " ); +# endif + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkImportSemaphoreFdKHR( static_cast( m_device ), reinterpret_cast( pImportSemaphoreFdInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkImportSemaphoreFdKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( &importSemaphoreFdInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetSemaphoreFdKHR( static_cast( m_device ), reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSemaphoreFdKHR && "Function requires " ); +# endif + + int fd; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_push_descriptor === + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetKHR( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWriteCount, + reinterpret_cast( pDescriptorWrites ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetKHR && "Function requires or " ); +# endif + + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetWithTemplateKHR( static_cast( m_commandBuffer ), + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkCmdPushDescriptorSetWithTemplateKHR && + "Function requires or or " ); +# endif + + d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + reinterpret_cast( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_conditional_rendering === + + template + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginConditionalRenderingEXT( static_cast( m_commandBuffer ), + reinterpret_cast( pConditionalRenderingBegin ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginConditionalRenderingEXT && "Function requires " ); +# endif + + d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( &conditionalRenderingBegin ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndConditionalRenderingEXT( static_cast( m_commandBuffer ) ); + } + + //=== VK_KHR_descriptor_update_template === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplateKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDescriptorUpdateTemplateKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDescriptorUpdateTemplateKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + descriptorUpdateTemplate, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDescriptorUpdateTemplateKHR( static_cast( m_device ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDescriptorUpdateTemplateKHR && + "Function requires or " ); +# endif + + d.vkDestroyDescriptorUpdateTemplateKHR( + m_device, + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateDescriptorSetWithTemplateKHR( static_cast( m_device ), + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateDescriptorSetWithTemplateKHR && + "Function requires or " ); +# endif + + d.vkUpdateDescriptorSetWithTemplateKHR( m_device, + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_clip_space_w_scaling === + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWScalingNV( + static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pViewportWScalings ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportWScalings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewportWScalingNV && "Function requires " ); +# endif + + d.vkCmdSetViewportWScalingNV( + m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast( viewportWScalings.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_direct_mode_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkReleaseDisplayEXT( static_cast( m_physicalDevice ), static_cast( display ) ) ); + } +#else + template + VULKAN_HPP_INLINE void PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleaseDisplayEXT && "Function requires " ); +# endif + + d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display * dpy, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireXlibDisplayEXT( static_cast( m_physicalDevice ), dpy, static_cast( display ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireXlibDisplayEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast( display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display * dpy, + RROutput rrOutput, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetRandROutputDisplayEXT( static_cast( m_physicalDevice ), dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRandROutputDisplayEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRandROutputDisplayEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast( m_physicalDevice ), + static_cast( surface ), + reinterpret_cast( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( + m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_display_control === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkDisplayPowerControlEXT( + static_cast( m_device ), static_cast( display ), reinterpret_cast( pDisplayPowerInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDisplayPowerControlEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( &displayPowerInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkRegisterDeviceEventEXT( static_cast( m_device ), + reinterpret_cast( pDeviceEventInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkRegisterDeviceEventEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkRegisterDeviceEventEXT( + m_device, + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkRegisterDeviceEventEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkRegisterDeviceEventEXT( + m_device, + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkRegisterDisplayEventEXT( static_cast( m_device ), + static_cast( display ), + reinterpret_cast( pDisplayEventInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFence ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkRegisterDisplayEventEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkRegisterDisplayEventEXT( + m_device, + static_cast( display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkRegisterDisplayEventEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkRegisterDisplayEventEXT( + m_device, + static_cast( display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSwapchainCounterEXT( + static_cast( m_device ), static_cast( swapchain ), static_cast( counter ), pCounterValue ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getSwapchainCounterEXT( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSwapchainCounterEXT && "Function requires " ); +# endif + + uint64_t counterValue; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), &counterValue ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( counterValue ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_GOOGLE_display_timing === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetRefreshCycleDurationGOOGLE( static_cast( m_device ), + static_cast( swapchain ), + reinterpret_cast( pDisplayTimingProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRefreshCycleDurationGOOGLE && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRefreshCycleDurationGOOGLE( + m_device, static_cast( swapchain ), reinterpret_cast( &displayTimingProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displayTimingProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPastPresentationTimingGOOGLE( static_cast( m_device ), + static_cast( swapchain ), + pPresentationTimingCount, + reinterpret_cast( pPresentationTimings ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename PastPresentationTimingGOOGLEAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPastPresentationTimingGOOGLE && "Function requires " ); +# endif + + std::vector presentationTimings; + uint32_t presentationTimingCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( + d.vkGetPastPresentationTimingGOOGLE( m_device, + static_cast( swapchain ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + if ( presentationTimingCount < presentationTimings.size() ) + { + presentationTimings.resize( presentationTimingCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentationTimings ) ); + } + + template < + typename PastPresentationTimingGOOGLEAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPastPresentationTimingGOOGLE && "Function requires " ); +# endif + + std::vector presentationTimings( + pastPresentationTimingGOOGLEAllocator ); + uint32_t presentationTimingCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( + d.vkGetPastPresentationTimingGOOGLE( m_device, + static_cast( swapchain ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + if ( presentationTimingCount < presentationTimings.size() ) + { + presentationTimings.resize( presentationTimingCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentationTimings ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_discard_rectangles === + + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDiscardRectangleEXT( + static_cast( m_commandBuffer ), firstDiscardRectangle, discardRectangleCount, reinterpret_cast( pDiscardRectangles ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + VULKAN_HPP_NAMESPACE::ArrayProxy const & discardRectangles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetDiscardRectangleEXT && "Function requires " ); +# endif + + d.vkCmdSetDiscardRectangleEXT( + m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast( discardRectangles.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDiscardRectangleEnableEXT( static_cast( m_commandBuffer ), static_cast( discardRectangleEnable ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDiscardRectangleModeEXT( static_cast( m_commandBuffer ), static_cast( discardRectangleMode ) ); + } + + //=== VK_EXT_hdr_metadata === + + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSetHdrMetadataEXT( static_cast( m_device ), + swapchainCount, + reinterpret_cast( pSwapchains ), + reinterpret_cast( pMetadata ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & swapchains, + VULKAN_HPP_NAMESPACE::ArrayProxy const & metadata, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetHdrMetadataEXT && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); +# else + if ( swapchains.size() != metadata.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkSetHdrMetadataEXT( m_device, + swapchains.size(), + reinterpret_cast( swapchains.data() ), + reinterpret_cast( metadata.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_create_renderpass2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateRenderPass2KHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pRenderPass ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass2KHR && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRenderPass2KHR && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateRenderPass2KHR( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginRenderPass2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pRenderPassBegin ), + reinterpret_cast( pSubpassBeginInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginRenderPass2KHR && "Function requires or " ); +# endif + + d.vkCmdBeginRenderPass2KHR( + m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdNextSubpass2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pSubpassBeginInfo ), + reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdNextSubpass2KHR && "Function requires or " ); +# endif + + d.vkCmdNextSubpass2KHR( + m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndRenderPass2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pSubpassEndInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEndRenderPass2KHR && "Function requires or " ); +# endif + + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_shared_presentable_image === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetSwapchainStatusKHR( static_cast( m_device ), static_cast( swapchain ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSwapchainStatusKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return static_cast( result ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_external_fence_capabilities === + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( pExternalFenceInfo ), + reinterpret_cast( pExternalFenceProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalFencePropertiesKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + + return externalFenceProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( + const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkImportFenceWin32HandleKHR( static_cast( m_device ), + reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkImportFenceWin32HandleKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetFenceWin32HandleKHR( static_cast( m_device ), reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetFenceWin32HandleKHR && "Function requires " ); +# endif + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkImportFenceFdKHR( static_cast( m_device ), reinterpret_cast( pImportFenceFdInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkImportFenceFdKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetFenceFdKHR( static_cast( m_device ), reinterpret_cast( pGetFdInfo ), pFd ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetFenceFdKHR && "Function requires " ); +# endif + + int fd; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_performance_query === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( static_cast( m_physicalDevice ), + queueFamilyIndex, + pCounterCount, + reinterpret_cast( pCounters ), + reinterpret_cast( pCounterDescriptions ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value && + std::is_same::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, + std::vector>>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && + "Function requires " ); +# endif + + std::pair, + std::vector> + data_; + std::vector & counters = data_.first; + std::vector & counterDescriptions = data_.second; + uint32_t counterCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + &counterCount, + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + if ( counterCount < counters.size() ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } + + template ::value && + std::is_same::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, + std::vector>>::type + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, + PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, + PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && + "Function requires " ); +# endif + + std::pair, + std::vector> + data_( + std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) ); + std::vector & counters = data_.first; + std::vector & counterDescriptions = data_.second; + uint32_t counterCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + m_physicalDevice, + queueFamilyIndex, + &counterCount, + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + if ( counterCount < counters.size() ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( pPerformanceQueryCreateInfo ), + pNumPasses ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR && + "Function requires " ); +# endif + + uint32_t numPasses; + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + m_physicalDevice, reinterpret_cast( &performanceQueryCreateInfo ), &numPasses ); + + return numPasses; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkAcquireProfilingLockKHR( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireProfilingLockKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkReleaseProfilingLockKHR( static_cast( m_device ) ); + } + + //=== VK_KHR_get_surface_capabilities2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( pSurfaceInfo ), + reinterpret_cast( pSurfaceCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR && + "Function requires " ); +# endif + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( pSurfaceInfo ), + pSurfaceFormatCount, + reinterpret_cast( pSurfaceFormats ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function requires " ); +# endif + + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function requires " ); +# endif + + std::vector surfaceFormats( surfaceFormat2KHRAllocator ); + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function requires " ); +# endif + + std::vector structureChains; + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + structureChains.resize( surfaceFormatCount ); + surfaceFormats.resize( surfaceFormatCount ); + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + surfaceFormats[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + structureChains.resize( surfaceFormatCount ); + } + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + structureChains[i].template get() = surfaceFormats[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function requires " ); +# endif + + std::vector structureChains( structureChainAllocator ); + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + structureChains.resize( surfaceFormatCount ); + surfaceFormats.resize( surfaceFormatCount ); + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + surfaceFormats[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + structureChains.resize( surfaceFormatCount ); + } + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + structureChains[i].template get() = surfaceFormats[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_get_display_properties2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename DisplayProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayProperties2KHR && + "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + template < + typename DisplayProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayProperties2KHR && + "Function requires " ); +# endif + + std::vector properties( displayProperties2KHRAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename DisplayPlaneProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR && + "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + template < + typename DisplayPlaneProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR && + "Function requires " ); +# endif + + std::vector properties( displayPlaneProperties2KHRAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDisplayModeProperties2KHR( static_cast( m_physicalDevice ), + static_cast( display ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename DisplayModeProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + template < + typename DisplayModeProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function requires " ); +# endif + + std::vector properties( displayModeProperties2KHRAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function requires " ); +# endif + + std::vector structureChains; + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + structureChains.resize( propertyCount ); + properties.resize( propertyCount ); + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + properties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + structureChains.resize( propertyCount ); + } + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + structureChains[i].template get() = properties[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function requires " ); +# endif + + std::vector structureChains( structureChainAllocator ); + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + structureChains.resize( propertyCount ); + properties.resize( propertyCount ); + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + properties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + structureChains.resize( propertyCount ); + } + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + structureChains[i].template get() = properties[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDisplayPlaneCapabilities2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( pDisplayPlaneInfo ), + reinterpret_cast( pCapabilities ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayPlaneCapabilities2KHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, + reinterpret_cast( &displayPlaneInfo ), + reinterpret_cast( &capabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateIOSSurfaceMVK( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIOSSurfaceMVK && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIOSSurfaceMVK && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateIOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateMacOSSurfaceMVK( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMacOSSurfaceMVK && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMacOSSurfaceMVK && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateMacOSSurfaceMVK( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkSetDebugUtilsObjectNameEXT( static_cast( m_device ), reinterpret_cast( pNameInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetDebugUtilsObjectNameEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkSetDebugUtilsObjectTagEXT( static_cast( m_device ), reinterpret_cast( pTagInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetDebugUtilsObjectTagEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueBeginDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueBeginDebugUtilsLabelEXT && "Function requires " ); +# endif + + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueEndDebugUtilsLabelEXT( static_cast( m_queue ) ); + } + + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkQueueInsertDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueInsertDebugUtilsLabelEXT && "Function requires " ); +# endif + + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBeginDebugUtilsLabelEXT( static_cast( m_commandBuffer ), reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBeginDebugUtilsLabelEXT && "Function requires " ); +# endif + + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdEndDebugUtilsLabelEXT( static_cast( m_commandBuffer ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdInsertDebugUtilsLabelEXT( static_cast( m_commandBuffer ), reinterpret_cast( pLabelInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdInsertDebugUtilsLabelEXT && "Function requires " ); +# endif + + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateDebugUtilsMessengerEXT( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMessenger ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDebugUtilsMessengerEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &messenger ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( messenger ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDebugUtilsMessengerEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &messenger ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( messenger, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugUtilsMessengerEXT( static_cast( m_instance ), + static_cast( messenger ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDebugUtilsMessengerEXT && "Function requires " ); +# endif + + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, + static_cast( messenger ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyDebugUtilsMessengerEXT( static_cast( m_instance ), + static_cast( messenger ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDebugUtilsMessengerEXT && "Function requires " ); +# endif + + d.vkDestroyDebugUtilsMessengerEXT( + m_instance, + static_cast( messenger ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkSubmitDebugUtilsMessageEXT( static_cast( m_instance ), + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( pCallbackData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSubmitDebugUtilsMessageEXT && "Function requires " ); +# endif + + d.vkSubmitDebugUtilsMessageEXT( m_instance, + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( &callbackData ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer, + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( + static_cast( m_device ), buffer, reinterpret_cast( pProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAndroidHardwareBufferPropertiesANDROID && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAndroidHardwareBufferPropertiesANDROID && + "Function requires " ); +# endif + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = + structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( + static_cast( m_device ), reinterpret_cast( pInfo ), pBuffer ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryAndroidHardwareBufferANDROID && + "Function requires " ); +# endif + + struct AHardwareBuffer * buffer; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( &info ), &buffer ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( buffer ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateExecutionGraphPipelinesAMDX( static_cast( m_device ), + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createExecutionGraphPipelinesAMDX( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( result, std::move( pipelines ) ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createExecutionGraphPipelinesAMDX( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size(), pipelineAllocator ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( result, std::move( pipelines ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDX", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue( result, std::move( pipeline ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createExecutionGraphPipelinesAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createExecutionGraphPipelinesAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createExecutionGraphPipelineAMDXUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateExecutionGraphPipelinesAMDX && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDXUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetExecutionGraphPipelineScratchSizeAMDX( static_cast( m_device ), + static_cast( executionGraph ), + reinterpret_cast( pSizeInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetExecutionGraphPipelineScratchSizeAMDX && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetExecutionGraphPipelineScratchSizeAMDX( + m_device, static_cast( executionGraph ), reinterpret_cast( &sizeInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineScratchSizeAMDX" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( sizeInfo ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, + uint32_t * pNodeIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetExecutionGraphPipelineNodeIndexAMDX( static_cast( m_device ), + static_cast( executionGraph ), + reinterpret_cast( pNodeInfo ), + pNodeIndex ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getExecutionGraphPipelineNodeIndexAMDX( + VULKAN_HPP_NAMESPACE::Pipeline executionGraph, const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetExecutionGraphPipelineNodeIndexAMDX && "Function requires " ); +# endif + + uint32_t nodeIndex; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetExecutionGraphPipelineNodeIndexAMDX( + m_device, static_cast( executionGraph ), reinterpret_cast( &nodeInfo ), &nodeIndex ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineNodeIndexAMDX" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( nodeIndex ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdInitializeGraphScratchMemoryAMDX( static_cast( m_commandBuffer ), + static_cast( executionGraph ), + static_cast( scratch ), + static_cast( scratchSize ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchGraphAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + static_cast( scratchSize ), + reinterpret_cast( pCountInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphAMDX && "Function requires " ); +# endif + + d.vkCmdDispatchGraphAMDX( m_commandBuffer, + static_cast( scratch ), + static_cast( scratchSize ), + reinterpret_cast( &countInfo ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchGraphIndirectAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + static_cast( scratchSize ), + reinterpret_cast( pCountInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphIndirectAMDX && "Function requires " ); +# endif + + d.vkCmdDispatchGraphIndirectAMDX( m_commandBuffer, + static_cast( scratch ), + static_cast( scratchSize ), + reinterpret_cast( &countInfo ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + VULKAN_HPP_NAMESPACE::DeviceAddress countInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchGraphIndirectCountAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + static_cast( scratchSize ), + static_cast( countInfo ) ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetSampleLocationsEXT( static_cast( m_commandBuffer ), reinterpret_cast( pSampleLocationsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetSampleLocationsEXT && "Function requires " ); +# endif + + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( &sampleLocationsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( static_cast( m_physicalDevice ), + static_cast( samples ), + reinterpret_cast( pMultisampleProperties ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT + PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceMultisamplePropertiesEXT && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( + m_physicalDevice, static_cast( samples ), reinterpret_cast( &multisampleProperties ) ); + + return multisampleProperties; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_get_memory_requirements2 === + + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageMemoryRequirements2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetImageMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetBufferMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferMemoryRequirements2KHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetBufferMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSparseMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2KHR && + "Function requires or " ); +# endif + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageSparseMemoryRequirements2KHR && + "Function requires or " ); +# endif + + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2KHR( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2KHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_acceleration_structure === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateAccelerationStructureKHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pAccelerationStructure ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateAccelerationStructureKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( accelerationStructure ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateAccelerationStructureKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + accelerationStructure, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureKHR( static_cast( m_device ), + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureKHR && "Function requires " ); +# endif + + d.vkDestroyAccelerationStructureKHR( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureKHR( static_cast( m_device ), + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureKHR && "Function requires " ); +# endif + + d.vkDestroyAccelerationStructureKHR( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + CommandBuffer::buildAccelerationStructuresKHR( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildAccelerationStructuresKHR( static_cast( m_commandBuffer ), + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( ppBuildRangeInfos ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructuresKHR && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildAccelerationStructuresIndirectKHR( static_cast( m_commandBuffer ), + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( pIndirectDeviceAddresses ), + pIndirectStrides, + ppMaxPrimitiveCounts ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectDeviceAddresses, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectStrides, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pMaxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructuresIndirectKHR && + "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() ); + VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() ); + VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() ); +# else + if ( infos.size() != indirectDeviceAddresses.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); + } + if ( infos.size() != indirectStrides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); + } + if ( infos.size() != pMaxPrimitiveCounts.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( indirectDeviceAddresses.data() ), + indirectStrides.data(), + pMaxPrimitiveCounts.data() ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkBuildAccelerationStructuresKHR( static_cast( m_device ), + static_cast( deferredOperation ), + infoCount, + reinterpret_cast( pInfos ), + reinterpret_cast( ppBuildRangeInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBuildAccelerationStructuresKHR && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkBuildAccelerationStructuresKHR( m_device, + static_cast( deferredOperation ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyAccelerationStructureKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyAccelerationStructureKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyAccelerationStructureKHR( + m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyAccelerationStructureToMemoryKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyAccelerationStructureToMemoryKHR && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyAccelerationStructureToMemoryKHR( + m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyMemoryToAccelerationStructureKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMemoryToAccelerationStructureKHR && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyMemoryToAccelerationStructureKHR( + m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( static_cast( m_device ), + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + dataSize, + pData, + stride ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWriteAccelerationStructuresPropertiesKHR && + "Function requires " ); +# endif + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkWriteAccelerationStructuresPropertiesKHR( m_device, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + stride ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::writeAccelerationStructuresPropertyKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWriteAccelerationStructuresPropertiesKHR && + "Function requires " ); +# endif + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkWriteAccelerationStructuresPropertiesKHR( m_device, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + sizeof( DataType ), + reinterpret_cast( &data ), + stride ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyAccelerationStructureKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyAccelerationStructureKHR && "Function requires " ); +# endif + + d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyAccelerationStructureToMemoryKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyAccelerationStructureToMemoryKHR && + "Function requires " ); +# endif + + d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryToAccelerationStructureKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToAccelerationStructureKHR && + "Function requires " ); +# endif + + d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetAccelerationStructureDeviceAddressKHR( + static_cast( m_device ), reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureDeviceAddressKHR && + "Function requires " ); +# endif + + VkDeviceAddress result = + d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteAccelerationStructuresPropertiesKHR( static_cast( m_commandBuffer ), + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWriteAccelerationStructuresPropertiesKHR && + "Function requires " ); +# endif + + d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceAccelerationStructureCompatibilityKHR( static_cast( m_device ), + reinterpret_cast( pVersionInfo ), + reinterpret_cast( pCompatibility ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceAccelerationStructureCompatibilityKHR && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; + d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, + reinterpret_cast( &versionInfo ), + reinterpret_cast( &compatibility ) ); + + return compatibility; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetAccelerationStructureBuildSizesKHR( static_cast( m_device ), + static_cast( buildType ), + reinterpret_cast( pBuildInfo ), + pMaxPrimitiveCounts, + reinterpret_cast( pSizeInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, + VULKAN_HPP_NAMESPACE::ArrayProxy const & maxPrimitiveCounts, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureBuildSizesKHR && + "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount ); +# else + if ( maxPrimitiveCounts.size() != buildInfo.geometryCount ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + d.vkGetAccelerationStructureBuildSizesKHR( m_device, + static_cast( buildType ), + reinterpret_cast( &buildInfo ), + maxPrimitiveCounts.data(), + reinterpret_cast( &sizeInfo ) ); + + return sizeInfo; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_ray_tracing_pipeline === + + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pRaygenShaderBindingTable ), + reinterpret_cast( pMissShaderBindingTable ), + reinterpret_cast( pHitShaderBindingTable ), + reinterpret_cast( pCallableShaderBindingTable ), + width, + height, + depth ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdTraceRaysKHR && "Function requires " ); +# endif + + d.vkCmdTraceRaysKHR( m_commandBuffer, + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + width, + height, + depth ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateRayTracingPipelinesKHR( static_cast( m_device ), + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( result, std::move( pipelines ) ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size(), pipelineAllocator ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( result, std::move( pipelines ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue( result, std::move( pipeline ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesKHRUnique( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesKHR( + m_device, + static_cast( deferredOperation ), + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( + static_cast( m_device ), static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesKHR && + "Function requires or " ); +# endif + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + static_cast( m_device ), static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && + "Function requires " ); +# endif + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && + "Function requires " ); +# endif + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdTraceRaysIndirectKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pRaygenShaderBindingTable ), + reinterpret_cast( pMissShaderBindingTable ), + reinterpret_cast( pHitShaderBindingTable ), + reinterpret_cast( pCallableShaderBindingTable ), + static_cast( indirectDeviceAddress ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdTraceRaysIndirectKHR && "Function requires " ); +# endif + + d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + static_cast( indirectDeviceAddress ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE DeviceSize Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t group, + VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetRayTracingShaderGroupStackSizeKHR( + static_cast( m_device ), static_cast( pipeline ), group, static_cast( groupShader ) ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRayTracingPipelineStackSizeKHR( static_cast( m_commandBuffer ), pipelineStackSize ); + } + + //=== VK_KHR_sampler_ycbcr_conversion === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateSamplerYcbcrConversionKHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pYcbcrConversion ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversionKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( ycbcrConversion ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateSamplerYcbcrConversionKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateSamplerYcbcrConversionKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( ycbcrConversion, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroySamplerYcbcrConversionKHR( static_cast( m_device ), + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroySamplerYcbcrConversionKHR && + "Function requires or " ); +# endif + + d.vkDestroySamplerYcbcrConversionKHR( + m_device, + static_cast( ycbcrConversion ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_bind_memory2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkBindBufferMemory2KHR( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindBufferMemory2KHR && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkBindImageMemory2KHR( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindImageMemory2KHR && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_image_drm_format_modifier === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( + VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( + static_cast( m_device ), static_cast( image ), reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageDrmFormatModifierPropertiesEXT && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( + m_device, static_cast( image ), reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_validation_cache === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateValidationCacheEXT( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pValidationCache ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateValidationCacheEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateValidationCacheEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &validationCache ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( validationCache ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateValidationCacheEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateValidationCacheEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &validationCache ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( validationCache, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyValidationCacheEXT( + static_cast( m_device ), static_cast( validationCache ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyValidationCacheEXT && "Function requires " ); +# endif + + d.vkDestroyValidationCacheEXT( + m_device, + static_cast( validationCache ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyValidationCacheEXT( + static_cast( m_device ), static_cast( validationCache ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyValidationCacheEXT && "Function requires " ); +# endif + + d.vkDestroyValidationCacheEXT( + m_device, + static_cast( validationCache ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkMergeValidationCachesEXT( static_cast( m_device ), + static_cast( dstCache ), + srcCacheCount, + reinterpret_cast( pSrcCaches ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkMergeValidationCachesEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkMergeValidationCachesEXT( + m_device, static_cast( dstCache ), srcCaches.size(), reinterpret_cast( srcCaches.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetValidationCacheDataEXT( static_cast( m_device ), static_cast( validationCache ), pDataSize, pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetValidationCacheDataEXT && "Function requires " ); +# endif + + std::vector data; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetValidationCacheDataEXT && "Function requires " ); +# endif + + std::vector data( uint8_tAllocator ); + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_shading_rate_image === + + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindShadingRateImageNV( + static_cast( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportShadingRatePaletteNV( + static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pShadingRatePalettes ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( + uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shadingRatePalettes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewportShadingRatePaletteNV && "Function requires " ); +# endif + + d.vkCmdSetViewportShadingRatePaletteNV( + m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast( shadingRatePalettes.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoarseSampleOrderNV( static_cast( m_commandBuffer ), + static_cast( sampleOrderType ), + customSampleOrderCount, + reinterpret_cast( pCustomSampleOrders ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + VULKAN_HPP_NAMESPACE::ArrayProxy const & customSampleOrders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetCoarseSampleOrderNV && "Function requires " ); +# endif + + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, + static_cast( sampleOrderType ), + customSampleOrders.size(), + reinterpret_cast( customSampleOrders.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_ray_tracing === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateAccelerationStructureNV( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pAccelerationStructure ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateAccelerationStructureNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( accelerationStructure ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateAccelerationStructureNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateAccelerationStructureNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + accelerationStructure, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureNV( static_cast( m_device ), + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureNV && "Function requires " ); +# endif + + d.vkDestroyAccelerationStructureNV( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyAccelerationStructureNV( static_cast( m_device ), + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyAccelerationStructureNV && "Function requires " ); +# endif + + d.vkDestroyAccelerationStructureNV( + m_device, + static_cast( accelerationStructure ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetAccelerationStructureMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR + Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureMemoryRequirementsNV && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; + d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, - SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, - Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureMemoryRequirementsNV && + "Function requires " ); +# endif - std::vector properties( sparseImageFormatProperties2Allocator ); - uint32_t propertyCount; - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( - m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); - properties.resize( propertyCount ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, - reinterpret_cast( &formatInfo ), - &propertyCount, - reinterpret_cast( properties.data() ) ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get(); + d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return properties; + return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_KHR_device_group === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( + uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceGroupPeerMemoryFeaturesKHR( - m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); + return static_cast( d.vkBindAccelerationStructureMemoryNV( + static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( - uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindAccelerationStructureMemoryNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindAccelerationStructureMemoryNV && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; - d.vkGetDeviceGroupPeerMemoryFeaturesKHR( - m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBindAccelerationStructureMemoryNV( + m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); - return peerMemoryFeatures; + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); + d.vkCmdBuildAccelerationStructureNV( static_cast( m_commandBuffer ), + reinterpret_cast( pInfo ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildAccelerationStructureNV && "Function requires " ); +# endif -#if defined( VK_USE_PLATFORM_VI_NN ) - //=== VK_NN_vi_surface === + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, + reinterpret_cast( &info ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateViSurfaceNN( m_instance, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pSurface ) ) ); + d.vkCmdCopyAccelerationStructureNV( static_cast( m_commandBuffer ), + static_cast( dst ), + static_cast( src ), + static_cast( mode ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = - d.vkCreateViSurfaceNN( m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); - - return createResultValueType( static_cast( result ), surface ); + d.vkCmdTraceRaysNV( static_cast( m_commandBuffer ), + static_cast( raygenShaderBindingTableBuffer ), + static_cast( raygenShaderBindingOffset ), + static_cast( missShaderBindingTableBuffer ), + static_cast( missShaderBindingOffset ), + static_cast( missShaderBindingStride ), + static_cast( hitShaderBindingTableBuffer ), + static_cast( hitShaderBindingOffset ), + static_cast( hitShaderBindingStride ), + static_cast( callableShaderBindingTableBuffer ), + static_cast( callableShaderBindingOffset ), + static_cast( callableShaderBindingStride ), + width, + height, + depth ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Instance::createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = - d.vkCreateViSurfaceNN( m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" ); - - return createResultValueType( - static_cast( result ), - UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + return static_cast( d.vkCreateRayTracingPipelinesNV( static_cast( m_device ), + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_VI_NN*/ - - //=== VK_KHR_maintenance1 === - template - VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, - VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkTrimCommandPoolKHR( m_device, static_cast( commandPool ), static_cast( flags ) ); - } +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function requires " ); +# endif - //=== VK_KHR_device_group_creation === + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount, - VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + return ResultValue>( result, std::move( pipelines ) ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( - m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size(), pipelineAllocator ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( result, std::move( pipelines ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function requires " ); +# endif - std::vector physicalDeviceGroupProperties; - uint32_t physicalDeviceGroupCount; - VkResult result; - do - { - result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ); - if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount ) - { - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = d.vkEnumeratePhysicalDeviceGroupsKHR( - m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); - VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); - if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) - { - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - } - return createResultValueType( static_cast( result ), physicalDeviceGroupProperties ); + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue( result, std::move( pipeline ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - Instance::enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function requires " ); +# endif - std::vector physicalDeviceGroupProperties( - physicalDeviceGroupPropertiesAllocator ); - uint32_t physicalDeviceGroupCount; - VkResult result; - do - { - result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ); - if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount ) - { - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = d.vkEnumeratePhysicalDeviceGroupsKHR( - m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); - VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); - if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) { - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); } - return createResultValueType( static_cast( result ), physicalDeviceGroupProperties ); + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_KHR_external_memory_capabilities === - template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, - VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, - reinterpret_cast( pExternalBufferInfo ), - reinterpret_cast( pExternalBufferProperties ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function requires " ); +# endif + + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( result, std::move( uniquePipelines ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties - PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateRayTracingPipelinesNV && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; - d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, - reinterpret_cast( &externalBufferInfo ), - reinterpret_cast( &externalBufferProperties ) ); + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateRayTracingPipelinesNV( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - return externalBufferProperties; + return ResultValue>( + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_memory_win32 === +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, - HANDLE * pHandle, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( + static_cast( m_device ), static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesNV( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesNV && + "Function requires or " ); +# endif - HANDLE handle; - VkResult result = d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); - - return createResultValueType( static_cast( result ), handle ); - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( + m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, - HANDLE handle, - VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, - static_cast( handleType ), - handle, - reinterpret_cast( pMemoryWin32HandleProperties ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRayTracingShaderGroupHandlesNV && + "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; - VkResult result = d.vkGetMemoryWin32HandlePropertiesKHR( m_device, - static_cast( handleType ), - handle, - reinterpret_cast( &memoryWin32HandleProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( + m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); - return createResultValueType( static_cast( result ), memoryWin32HandleProperties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_memory_fd === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, - int * pFd, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + return static_cast( d.vkGetAccelerationStructureHandleNV( + static_cast( m_device ), static_cast( accelerationStructure ), dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, - Dispatch const & d ) const + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureHandleNV && "Function requires " ); +# endif - int fd; - VkResult result = d.vkGetMemoryFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); - - return createResultValueType( static_cast( result ), fd ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetAccelerationStructureHandleNV( + m_device, static_cast( accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, - int fd, - VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetMemoryFdPropertiesKHR( - m_device, static_cast( handleType ), fd, reinterpret_cast( pMemoryFdProperties ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureHandleNV && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; - VkResult result = d.vkGetMemoryFdPropertiesKHR( - m_device, static_cast( handleType ), fd, reinterpret_cast( &memoryFdProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetAccelerationStructureHandleNV( + m_device, static_cast( accelerationStructure ), sizeof( DataType ), reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); - return createResultValueType( static_cast( result ), memoryFdProperties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_KHR_external_semaphore_capabilities === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void - PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, - reinterpret_cast( pExternalSemaphoreInfo ), - reinterpret_cast( pExternalSemaphoreProperties ) ); + d.vkCmdWriteAccelerationStructuresPropertiesNV( static_cast( m_commandBuffer ), + accelerationStructureCount, + reinterpret_cast( pAccelerationStructures ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties - PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWriteAccelerationStructuresPropertiesNV && + "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; - d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, - reinterpret_cast( &externalSemaphoreInfo ), - reinterpret_cast( &externalSemaphoreProperties ) ); - - return externalSemaphoreProperties; + d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_semaphore_win32 === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( - const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t shader, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); + return static_cast( d.vkCompileDeferredNV( static_cast( m_device ), static_cast( pipeline ), shader ) ); } - -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#else template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, - Dispatch const & d ) const + Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCompileDeferredNV && "Function requires " ); +# endif - VkResult result = - d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_maintenance3 === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( - const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + d.vkGetDescriptorSetLayoutSupportKHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pSupport ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupportKHR && + "Function requires or " ); +# endif - HANDLE handle; - VkResult result = d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); - - return createResultValueType( static_cast( result ), handle ); - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_semaphore_fd === + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupportKHR( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( pImportSemaphoreFdInfo ) ) ); + return support; } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSupportKHR && + "Function requires or " ); +# endif - VkResult result = d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( &importSemaphoreFdInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get(); + d.vkGetDescriptorSetLayoutSupportKHR( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); - return createResultValueType( static_cast( result ) ); + return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_draw_indirect_count === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, - int * pFd, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + d.vkCmdDrawIndirectCountKHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - int fd; - VkResult result = d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); - - return createResultValueType( static_cast( result ), fd ); + d.vkCmdDrawIndexedIndirectCountKHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - //=== VK_KHR_push_descriptor === + //=== VK_EXT_external_memory_host === template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, - VULKAN_HPP_NAMESPACE::PipelineLayout layout, - uint32_t set, - uint32_t descriptorWriteCount, - const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPushDescriptorSetKHR( m_commandBuffer, - static_cast( pipelineBindPoint ), - static_cast( layout ), - set, - descriptorWriteCount, - reinterpret_cast( pDescriptorWrites ) ); + return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( static_cast( m_device ), + static_cast( handleType ), + pHostPointer, + reinterpret_cast( pMemoryHostPointerProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void - CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, - VULKAN_HPP_NAMESPACE::PipelineLayout layout, - uint32_t set, - VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryHostPointerPropertiesEXT && "Function requires " ); +# endif - d.vkCmdPushDescriptorSetKHR( m_commandBuffer, - static_cast( pipelineBindPoint ), - static_cast( layout ), - set, - descriptorWrites.size(), - reinterpret_cast( descriptorWrites.data() ) ); + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryHostPointerPropertiesEXT( m_device, + static_cast( handleType ), + pHostPointer, + reinterpret_cast( &memoryHostPointerProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryHostPointerProperties ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_buffer_marker === template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - VULKAN_HPP_NAMESPACE::PipelineLayout layout, - uint32_t set, - const void * pData, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPushDescriptorSetWithTemplateKHR( - m_commandBuffer, static_cast( descriptorUpdateTemplate ), static_cast( layout ), set, pData ); + d.vkCmdWriteBufferMarkerAMD( static_cast( m_commandBuffer ), + static_cast( pipelineStage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - VULKAN_HPP_NAMESPACE::PipelineLayout layout, - uint32_t set, - DataType const & data, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, - static_cast( descriptorUpdateTemplate ), - static_cast( layout ), - set, - reinterpret_cast( &data ) ); + d.vkCmdWriteBufferMarker2AMD( static_cast( m_commandBuffer ), + static_cast( stage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - //=== VK_EXT_conditional_rendering === + //=== VK_EXT_calibrated_timestamps === template - VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( pConditionalRenderingBegin ) ); + return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + static_cast( m_physicalDevice ), pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && + "Function requires or " ); +# endif - d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( &conditionalRenderingBegin ) ); + std::vector timeDomains; + uint32_t timeDomainCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); - } +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && + "Function requires or " ); +# endif - //=== VK_KHR_descriptor_update_template === + std::vector timeDomains( timeDomainKHRAllocator ); + uint32_t timeDomainCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + return static_cast( d.vkGetCalibratedTimestampsEXT( static_cast( m_device ), + timestampCount, + reinterpret_cast( pTimestampInfos ), + pTimestamps, + pMaxDeviation ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, - Optional allocator, - Dispatch const & d ) const + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && + "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; - VkResult result = d.vkCreateDescriptorUpdateTemplateKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &descriptorUpdateTemplate ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); + std::pair, uint64_t> data_( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetCalibratedTimestampsEXT( + m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); - return createResultValueType( static_cast( result ), descriptorUpdateTemplate ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, - Optional allocator, - Dispatch const & d ) const + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && + "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; - VkResult result = d.vkCreateDescriptorUpdateTemplateKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &descriptorUpdateTemplate ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" ); + std::pair, uint64_t> data_( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetCalibratedTimestampsEXT( + m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); - return createResultValueType( static_cast( result ), - UniqueHandle( - descriptorUpdateTemplate, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorUpdateTemplateKHR( - m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsEXT && + "Function requires or " ); +# endif + + std::pair data_; + uint64_t & timestamp = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_mesh_shader === -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkDestroyDescriptorUpdateTemplateKHR( - m_device, - static_cast( descriptorUpdateTemplate ), - reinterpret_cast( static_cast( allocator ) ) ); + d.vkCmdDrawMeshTasksNV( static_cast( m_commandBuffer ), taskCount, firstTask ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - const void * pData, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUpdateDescriptorSetWithTemplateKHR( - m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + d.vkCmdDrawMeshTasksIndirectNV( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - DataType const & data, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkUpdateDescriptorSetWithTemplateKHR( m_device, - static_cast( descriptorSet ), - static_cast( descriptorUpdateTemplate ), - reinterpret_cast( &data ) ); + d.vkCmdDrawMeshTasksIndirectCountNV( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - //=== VK_NV_clip_space_w_scaling === + //=== VK_NV_scissor_exclusive === template - VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, - uint32_t viewportCount, - const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pExclusiveScissorEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewportWScalings ) ); + d.vkCmdSetExclusiveScissorEnableNV( static_cast( m_commandBuffer ), + firstExclusiveScissor, + exclusiveScissorCount, + reinterpret_cast( pExclusiveScissorEnables ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void - CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, - VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportWScalings, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissorEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetExclusiveScissorEnableNV && "Function requires " ); +# endif - d.vkCmdSetViewportWScalingNV( - m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast( viewportWScalings.data() ) ); + d.vkCmdSetExclusiveScissorEnableNV( + m_commandBuffer, firstExclusiveScissor, exclusiveScissorEnables.size(), reinterpret_cast( exclusiveScissorEnables.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_EXT_direct_mode_display === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); + d.vkCmdSetExclusiveScissorNV( + static_cast( m_commandBuffer ), firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissors ) ); } -#else + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetExclusiveScissorNV && "Function requires " ); +# endif - d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ); + d.vkCmdSetExclusiveScissorNV( + m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast( exclusiveScissors.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) - //=== VK_EXT_acquire_xlib_display === + //=== VK_NV_device_diagnostic_checkpoints === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display * dpy, - VULKAN_HPP_NAMESPACE::DisplayKHR display, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast( display ) ) ); + d.vkCmdSetCheckpointNV( static_cast( m_commandBuffer ), pCheckpointMarker ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetCheckpointNV && "Function requires " ); +# endif + + d.vkCmdSetCheckpointNV( m_commandBuffer, reinterpret_cast( &checkpointMarker ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetQueueCheckpointDataNV( static_cast( m_queue ), pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointDataNV( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointDataNV && "Function requires " ); +# endif - VkResult result = d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast( display ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); + std::vector checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - return createResultValueType( static_cast( result ) ); + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display * dpy, - RROutput rrOutput, - VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointDataNV && "Function requires " ); +# endif + + std::vector checkpointData( checkpointDataNVAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const + VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetQueueCheckpointData2NV( static_cast( m_queue ), pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + } - VULKAN_HPP_NAMESPACE::DisplayKHR display; - VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointData2NV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function requires " ); +# endif + + std::vector checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - return createResultValueType( static_cast( result ), display ); + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; } -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::DisplayKHR display; - VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" ); + std::vector checkpointData( checkpointData2NVAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - return createResultValueType( static_cast( result ), - UniqueHandle( display, ObjectRelease( *this, d ) ) ); + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_display_surface_counter === + //=== VK_KHR_timeline_semaphore === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, - VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( - m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); + return static_cast( d.vkGetSemaphoreCounterValueKHR( static_cast( m_device ), static_cast( semaphore ), pValue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSemaphoreCounterValueKHR && "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; - VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( - m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); + uint64_t value; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), &value ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); - return createResultValueType( static_cast( result ), surfaceCapabilities ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_EXT_display_control === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, - const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( pDisplayPowerInfo ) ) ); + d.vkWaitSemaphoresKHR( static_cast( m_device ), reinterpret_cast( pWaitInfo ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, - const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWaitSemaphoresKHR && "Function requires or " ); +# endif - VkResult result = - d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( &displayPowerInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( &waitInfo ), timeout ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); - return createResultValueType( static_cast( result ) ); + return static_cast( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::Fence * pFence, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkRegisterDeviceEventEXT( m_device, - reinterpret_cast( pDeviceEventInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pFence ) ) ); + return static_cast( d.vkSignalSemaphoreKHR( static_cast( m_device ), reinterpret_cast( pSignalInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSignalSemaphoreKHR && "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::Fence fence; - VkResult result = d.vkRegisterDeviceEventEXT( - m_device, - reinterpret_cast( &deviceEventInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &fence ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( &signalInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); - return createResultValueType( static_cast( result ), fence ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_INTEL_performance_query === -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( + const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkInitializePerformanceApiINTEL( static_cast( m_device ), + reinterpret_cast( pInitializeInfo ) ) ); + } - VULKAN_HPP_NAMESPACE::Fence fence; - VkResult result = d.vkRegisterDeviceEventEXT( - m_device, - reinterpret_cast( &deviceEventInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &fence ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkInitializePerformanceApiINTEL && "Function requires " ); +# endif - return createResultValueType( static_cast( result ), - UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( &initializeInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, - const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::Fence * pFence, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkRegisterDisplayEventEXT( m_device, - static_cast( display ), - reinterpret_cast( pDisplayEventInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pFence ) ) ); + d.vkUninitializePerformanceApiINTEL( static_cast( m_device ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, - const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::Fence fence; - VkResult result = d.vkRegisterDisplayEventEXT( - m_device, - static_cast( display ), - reinterpret_cast( &displayEventInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &fence ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); - - return createResultValueType( static_cast( result ), fence ); + return static_cast( d.vkCmdSetPerformanceMarkerINTEL( static_cast( m_commandBuffer ), + reinterpret_cast( pMarkerInfo ) ) ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, - const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceMarkerINTEL && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::Fence fence; - VkResult result = d.vkRegisterDisplayEventEXT( - m_device, - static_cast( display ), - reinterpret_cast( &displayEventInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &fence ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); - return createResultValueType( static_cast( result ), - UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, - uint64_t * pCounterValue, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), pCounterValue ) ); + return static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( static_cast( m_commandBuffer ), + reinterpret_cast( pMarkerInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getSwapchainCounterEXT( - VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceStreamMarkerINTEL && "Function requires " ); +# endif - uint64_t counterValue; - VkResult result = - d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), &counterValue ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); - return createResultValueType( static_cast( result ), counterValue ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_GOOGLE_display_timing === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( + const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetRefreshCycleDurationGOOGLE( - m_device, static_cast( swapchain ), reinterpret_cast( pDisplayTimingProperties ) ) ); + return static_cast( d.vkCmdSetPerformanceOverrideINTEL( static_cast( m_commandBuffer ), + reinterpret_cast( pOverrideInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetPerformanceOverrideINTEL && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; - VkResult result = d.vkGetRefreshCycleDurationGOOGLE( - m_device, static_cast( swapchain ), reinterpret_cast( &displayTimingProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( &overrideInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); - return createResultValueType( static_cast( result ), displayTimingProperties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - uint32_t * pPresentationTimingCount, - VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, - static_cast( swapchain ), - pPresentationTimingCount, - reinterpret_cast( pPresentationTimings ) ) ); + return static_cast( d.vkAcquirePerformanceConfigurationINTEL( static_cast( m_device ), + reinterpret_cast( pAcquireInfo ), + reinterpret_cast( pConfiguration ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquirePerformanceConfigurationINTEL && "Function requires " ); +# endif - std::vector presentationTimings; - uint32_t presentationTimingCount; - VkResult result; - do - { - result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ); - if ( ( result == VK_SUCCESS ) && presentationTimingCount ) - { - presentationTimings.resize( presentationTimingCount ); - result = d.vkGetPastPresentationTimingGOOGLE( m_device, - static_cast( swapchain ), - &presentationTimingCount, - reinterpret_cast( presentationTimings.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); - VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); - if ( presentationTimingCount < presentationTimings.size() ) - { - presentationTimings.resize( presentationTimingCount ); - } - return createResultValueType( static_cast( result ), presentationTimings ); + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkAcquirePerformanceConfigurationINTEL( m_device, + reinterpret_cast( &acquireInfo ), + reinterpret_cast( &configuration ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( configuration ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, - Dispatch const & d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquirePerformanceConfigurationINTEL && "Function requires " ); +# endif - std::vector presentationTimings( - pastPresentationTimingGOOGLEAllocator ); - uint32_t presentationTimingCount; - VkResult result; - do - { - result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ); - if ( ( result == VK_SUCCESS ) && presentationTimingCount ) - { - presentationTimings.resize( presentationTimingCount ); - result = d.vkGetPastPresentationTimingGOOGLE( m_device, - static_cast( swapchain ), - &presentationTimingCount, - reinterpret_cast( presentationTimings.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); - VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); - if ( presentationTimingCount < presentationTimings.size() ) - { - presentationTimings.resize( presentationTimingCount ); - } - return createResultValueType( static_cast( result ), presentationTimings ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkAcquirePerformanceConfigurationINTEL( m_device, + reinterpret_cast( &acquireInfo ), + reinterpret_cast( &configuration ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" ); - //=== VK_EXT_discard_rectangles === + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( configuration, detail::ObjectRelease( *this, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, - uint32_t discardRectangleCount, - const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast( pDiscardRectangles ) ); + return static_cast( + d.vkReleasePerformanceConfigurationINTEL( static_cast( m_device ), static_cast( configuration ) ) ); } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#else template - VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, - VULKAN_HPP_NAMESPACE::ArrayProxy const & discardRectangles, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleasePerformanceConfigurationINTEL && "Function requires " ); +# endif - d.vkCmdSetDiscardRectangleEXT( - m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast( discardRectangles.data() ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - //=== VK_EXT_hdr_metadata === - +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, - const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, - const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkSetHdrMetadataEXT( - m_device, swapchainCount, reinterpret_cast( pSwapchains ), reinterpret_cast( pMetadata ) ); + return static_cast( + d.vkReleasePerformanceConfigurationINTEL( static_cast( m_device ), static_cast( configuration ) ) ); } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#else template - VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & swapchains, - VULKAN_HPP_NAMESPACE::ArrayProxy const & metadata, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); -# else - if ( swapchains.size() != metadata.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleasePerformanceConfigurationINTEL && "Function requires " ); +# endif - d.vkSetHdrMetadataEXT( m_device, - swapchains.size(), - reinterpret_cast( swapchains.data() ), - reinterpret_cast( metadata.data() ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - //=== VK_KHR_create_renderpass2 === - +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateRenderPass2KHR( m_device, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pRenderPass ) ) ); + return static_cast( + d.vkQueueSetPerformanceConfigurationINTEL( static_cast( m_queue ), static_cast( configuration ) ) ); } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#else template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueSetPerformanceConfigurationINTEL && + "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::RenderPass renderPass; - VkResult result = - d.vkCreateRenderPass2KHR( m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &renderPass ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); - return createResultValueType( static_cast( result ), renderPass ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::RenderPass renderPass; - VkResult result = - d.vkCreateRenderPass2KHR( m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &renderPass ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" ); - - return createResultValueType( - static_cast( result ), - UniqueHandle( renderPass, ObjectDestroy( *this, allocator, d ) ) ); + return static_cast( d.vkGetPerformanceParameterINTEL( + static_cast( m_device ), static_cast( parameter ), reinterpret_cast( pValue ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, - const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginRenderPass2KHR( - m_commandBuffer, reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPerformanceParameterINTEL && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPerformanceParameterINTEL( + m_device, static_cast( parameter ), reinterpret_cast( &value ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_display_native_hdr === -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, - const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkCmdBeginRenderPass2KHR( - m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); + d.vkSetLocalDimmingAMD( static_cast( m_device ), static_cast( swapChain ), static_cast( localDimmingEnable ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, - const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdNextSubpass2KHR( - m_commandBuffer, reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); + return static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, - const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImagePipeSurfaceFUCHSIA && "Function requires " ); +# endif - d.vkCmdNextSubpass2KHR( - m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateImagePipeSurfaceFUCHSIA && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); + return static_cast( d.vkCreateMetalSurfaceEXT( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_KHR_shared_presentable_image === -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMetalSurfaceEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } -#else + +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMetalSurfaceEXT && "Function requires " ); +# endif - VkResult result = d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateMetalSurfaceEXT( m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" ); - return static_cast( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_external_fence_capabilities === + //=== VK_KHR_fragment_shading_rate === template - VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, - VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, - reinterpret_cast( pExternalFenceInfo ), - reinterpret_cast( pExternalFenceProperties ) ); + return static_cast( + d.vkGetPhysicalDeviceFragmentShadingRatesKHR( static_cast( m_physicalDevice ), + pFragmentShadingRateCount, + reinterpret_cast( pFragmentShadingRates ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties - PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFragmentShadingRatesKHR && + "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; - d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, - reinterpret_cast( &externalFenceInfo ), - reinterpret_cast( &externalFenceProperties ) ); - - return externalFenceProperties; + std::vector fragmentShadingRates; + uint32_t fragmentShadingRateCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = + static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast( fragmentShadingRates.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + if ( fragmentShadingRateCount < fragmentShadingRates.size() ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fragmentShadingRates ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_fence_win32 === + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceFragmentShadingRatesKHR && + "Function requires " ); +# endif + + std::vector fragmentShadingRates( + physicalDeviceFragmentShadingRateKHRAllocator ); + uint32_t fragmentShadingRateCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = + static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( + m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast( fragmentShadingRates.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + if ( fragmentShadingRateCount < fragmentShadingRates.size() ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fragmentShadingRates ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( - const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); + d.vkCmdSetFragmentShadingRateKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pFragmentSize ), + reinterpret_cast( combinerOps ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetFragmentShadingRateKHR && "Function requires " ); +# endif - VkResult result = d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); - - return createResultValueType( static_cast( result ) ); + d.vkCmdSetFragmentShadingRateKHR( + m_commandBuffer, reinterpret_cast( &fragmentSize ), reinterpret_cast( combinerOps ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_dynamic_rendering_local_read === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, - HANDLE * pHandle, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + d.vkCmdSetRenderingAttachmentLocationsKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pLocationInfo ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetRenderingAttachmentLocationsKHR && + "Function requires or " ); +# endif - HANDLE handle; - VkResult result = d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); - - return createResultValueType( static_cast( result ), handle ); + d.vkCmdSetRenderingAttachmentLocationsKHR( m_commandBuffer, reinterpret_cast( &locationInfo ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_fence_fd === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( pImportFenceFdInfo ) ) ); + d.vkCmdSetRenderingInputAttachmentIndicesKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pInputAttachmentIndexInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetRenderingInputAttachmentIndicesKHR && + "Function requires or " ); +# endif - VkResult result = d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); - - return createResultValueType( static_cast( result ) ); + d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer, reinterpret_cast( &inputAttachmentIndexInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_buffer_device_address === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, - int * pFd, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + return static_cast( + d.vkGetBufferDeviceAddressEXT( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, - Dispatch const & d ) const + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddressEXT && + "Function requires or or " ); +# endif - int fd; - VkResult result = d.vkGetFenceFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); + VkDeviceAddress result = d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( &info ) ); - return createResultValueType( static_cast( result ), fd ); + return static_cast( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_performance_query === + //=== VK_EXT_tooling_info === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, - uint32_t * pCounterCount, - VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, - VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount, + VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, - queueFamilyIndex, - pCounterCount, - reinterpret_cast( pCounters ), - reinterpret_cast( pCounterDescriptions ) ) ); + return static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( + static_cast( m_physicalDevice ), pToolCount, reinterpret_cast( pToolProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType, - std::vector>>::type - PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const + typename ResultValueType>::type + PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolPropertiesEXT && + "Function requires or " ); +# endif - std::pair, - std::vector> - data; - std::vector & counters = data.first; - std::vector & counterDescriptions = data.second; - uint32_t counterCount; - VkResult result; + std::vector toolProperties; + uint32_t toolCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ); - if ( ( result == VK_SUCCESS ) && counterCount ) + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) { - counters.resize( counterCount ); - counterDescriptions.resize( counterCount ); - result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( - m_physicalDevice, - queueFamilyIndex, - &counterCount, - reinterpret_cast( counters.data() ), - reinterpret_cast( counterDescriptions.data() ) ); + toolProperties.resize( toolCount ); + result = static_cast( + d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); - VULKAN_HPP_ASSERT( counterCount <= counters.size() ); - if ( counterCount < counters.size() ) + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) { - counters.resize( counterCount ); - counterDescriptions.resize( counterCount ); + toolProperties.resize( toolCount ); } - return createResultValueType( static_cast( result ), data ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); } - template ::value && - std::is_same::value, - int>::type> + template < + typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType, - std::vector>>::type - PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, - PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, - PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, - Dispatch const & d ) const + typename ResultValueType>::type + PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceToolPropertiesEXT && + "Function requires or " ); +# endif - std::pair, - std::vector> - data( - std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) ); - std::vector & counters = data.first; - std::vector & counterDescriptions = data.second; - uint32_t counterCount; - VkResult result; + std::vector toolProperties( + physicalDeviceToolPropertiesAllocator ); + uint32_t toolCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ); - if ( ( result == VK_SUCCESS ) && counterCount ) + result = static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) { - counters.resize( counterCount ); - counterDescriptions.resize( counterCount ); - result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( - m_physicalDevice, - queueFamilyIndex, - &counterCount, - reinterpret_cast( counters.data() ), - reinterpret_cast( counterDescriptions.data() ) ); + toolProperties.resize( toolCount ); + result = static_cast( + d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); - VULKAN_HPP_ASSERT( counterCount <= counters.size() ); - if ( counterCount < counters.size() ) + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) { - counters.resize( counterCount ); - counterDescriptions.resize( counterCount ); + toolProperties.resize( toolCount ); } - return createResultValueType( static_cast( result ), data ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_INLINE void - PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, - uint32_t * pNumPasses, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( - m_physicalDevice, reinterpret_cast( pPerformanceQueryCreateInfo ), pNumPasses ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( - const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - uint32_t numPasses; - d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( - m_physicalDevice, reinterpret_cast( &performanceQueryCreateInfo ), &numPasses ); - - return numPasses; - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_KHR_present_wait === +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( pInfo ) ) ); + return static_cast( d.vkWaitForPresentKHR( static_cast( m_device ), static_cast( swapchain ), presentId, timeout ) ); } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#else template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWaitForPresentKHR && "Function requires " ); +# endif - VkResult result = d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( &info ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkWaitForPresentKHR( m_device, static_cast( swapchain ), presentId, timeout ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); - return createResultValueType( static_cast( result ) ); + return static_cast( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkReleaseProfilingLockKHR( m_device ); - } - - //=== VK_KHR_get_surface_capabilities2 === + //=== VK_NV_cooperative_matrix === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( + uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, - reinterpret_cast( pSurfaceInfo ), - reinterpret_cast( pSurfaceCapabilities ) ) ); + return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && + "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; - VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, - reinterpret_cast( &surfaceInfo ), - reinterpret_cast( &surfaceCapabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); - - return createResultValueType( static_cast( result ), surfaceCapabilities ); + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && + "Function requires " ); +# endif - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get(); - VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, - reinterpret_cast( &surfaceInfo ), - reinterpret_cast( &surfaceCapabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); - - return createResultValueType( static_cast( result ), structureChain ); + std::vector properties( + cooperativeMatrixPropertiesNVAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_coverage_reduction_mode === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - uint32_t * pSurfaceFormatCount, - VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( + uint32_t * pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, - reinterpret_cast( pSurfaceInfo ), - pSurfaceFormatCount, - reinterpret_cast( pSurfaceFormats ) ) ); + return static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + static_cast( m_physicalDevice ), pCombinationCount, reinterpret_cast( pCombinations ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && + "Function requires " ); +# endif - std::vector surfaceFormats; - uint32_t surfaceFormatCount; - VkResult result; + std::vector combinations; + uint32_t combinationCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( - m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ); - if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) + result = static_cast( + d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount ) { - surfaceFormats.resize( surfaceFormatCount ); - result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, - reinterpret_cast( &surfaceInfo ), - &surfaceFormatCount, - reinterpret_cast( surfaceFormats.data() ) ); + combinations.resize( combinationCount ); + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, &combinationCount, reinterpret_cast( combinations.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); - VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); - if ( surfaceFormatCount < surfaceFormats.size() ) + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + if ( combinationCount < combinations.size() ) { - surfaceFormats.resize( surfaceFormatCount ); + combinations.resize( combinationCount ); } - return createResultValueType( static_cast( result ), surfaceFormats ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( combinations ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, - SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, - Dispatch const & d ) const + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( + FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && + "Function requires " ); +# endif - std::vector surfaceFormats( surfaceFormat2KHRAllocator ); - uint32_t surfaceFormatCount; - VkResult result; + std::vector combinations( + framebufferMixedSamplesCombinationNVAllocator ); + uint32_t combinationCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( - m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ); - if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) + result = static_cast( + d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount ) { - surfaceFormats.resize( surfaceFormatCount ); - result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, - reinterpret_cast( &surfaceInfo ), - &surfaceFormatCount, - reinterpret_cast( surfaceFormats.data() ) ); + combinations.resize( combinationCount ); + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + m_physicalDevice, &combinationCount, reinterpret_cast( combinations.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); - VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); - if ( surfaceFormatCount < surfaceFormats.size() ) + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + if ( combinationCount < combinations.size() ) { - surfaceFormats.resize( surfaceFormatCount ); + combinations.resize( combinationCount ); } - return createResultValueType( static_cast( result ), surfaceFormats ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( combinations ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast( m_physicalDevice ), + reinterpret_cast( pSurfaceInfo ), + pPresentModeCount, + reinterpret_cast( pPresentModes ) ) ); + } - std::vector structureChains; - std::vector surfaceFormats; - uint32_t surfaceFormatCount; - VkResult result; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModes2EXT && + "Function requires " ); +# endif + + std::vector presentModes; + uint32_t presentModeCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( - m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ); - if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) { - structureChains.resize( surfaceFormatCount ); - surfaceFormats.resize( surfaceFormatCount ); - for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) - { - surfaceFormats[i].pNext = structureChains[i].template get().pNext; - } - result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, - reinterpret_cast( &surfaceInfo ), - &surfaceFormatCount, - reinterpret_cast( surfaceFormats.data() ) ); + presentModes.resize( presentModeCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); - VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); - if ( surfaceFormatCount < surfaceFormats.size() ) - { - structureChains.resize( surfaceFormatCount ); - } - for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) { - structureChains[i].template get() = surfaceFormats[i]; + presentModes.resize( presentModeCount ); } - return createResultValueType( static_cast( result ), structureChains ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, - StructureChainAllocator & structureChainAllocator, - Dispatch const & d ) const + typename std::enable_if::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + PresentModeKHRAllocator & presentModeKHRAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceSurfacePresentModes2EXT && + "Function requires " ); +# endif - std::vector structureChains( structureChainAllocator ); - std::vector surfaceFormats; - uint32_t surfaceFormatCount; - VkResult result; + std::vector presentModes( presentModeKHRAllocator ); + uint32_t presentModeCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( - m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ); - if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( + m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) { - structureChains.resize( surfaceFormatCount ); - surfaceFormats.resize( surfaceFormatCount ); - for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) - { - surfaceFormats[i].pNext = structureChains[i].template get().pNext; - } - result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, - reinterpret_cast( &surfaceInfo ), - &surfaceFormatCount, - reinterpret_cast( surfaceFormats.data() ) ); + presentModes.resize( presentModeCount ); + result = static_cast( + d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); - VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); - if ( surfaceFormatCount < surfaceFormats.size() ) - { - structureChains.resize( surfaceFormatCount ); - } - for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) { - structureChains[i].template get() = surfaceFormats[i]; + presentModes.resize( presentModeCount ); } - return createResultValueType( static_cast( result ), structureChains ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_get_display_properties2 === +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( swapchain ) ) ); + } +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireFullScreenExclusiveModeEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount, - VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkReleaseFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( swapchain ) ) ); + } +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleaseFullScreenExclusiveModeEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - std::vector properties; - uint32_t propertyCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) - { - properties.resize( propertyCount ); - result = - d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return createResultValueType( static_cast( result ), properties ); + return static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( static_cast( m_device ), + reinterpret_cast( pSurfaceInfo ), + reinterpret_cast( pModes ) ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceGroupSurfacePresentModes2EXT && + "Function requires " ); +# endif - std::vector properties( displayProperties2KHRAllocator ); - uint32_t propertyCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) - { - properties.resize( propertyCount ); - result = - d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return createResultValueType( static_cast( result ), properties ); + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( + m_device, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &modes ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( modes ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, - VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkCreateHeadlessSurfaceEXT( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateHeadlessSurfaceEXT && "Function requires " ); +# endif - std::vector properties; - uint32_t propertyCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) - { - properties.resize( propertyCount ); - result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( - m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return createResultValueType( static_cast( result ), properties ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateHeadlessSurfaceEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateHeadlessSurfaceEXT && "Function requires " ); +# endif - std::vector properties( displayPlaneProperties2KHRAllocator ); - uint32_t propertyCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) - { - properties.resize( propertyCount ); - result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( - m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return createResultValueType( static_cast( result ), properties ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateHeadlessSurfaceEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_buffer_device_address === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, - uint32_t * pPropertyCount, - VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDisplayModeProperties2KHR( - m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( + d.vkGetBufferDeviceAddressKHR( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferDeviceAddressKHR && + "Function requires or or " ); +# endif - std::vector properties; - uint32_t propertyCount; - VkResult result; - do - { - result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) - { - properties.resize( propertyCount ); - result = d.vkGetDisplayModeProperties2KHR( - m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return createResultValueType( static_cast( result ), properties ); - } - - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, - DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, - Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VkDeviceAddress result = d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); - std::vector properties( displayModeProperties2KHRAllocator ); - uint32_t propertyCount; - VkResult result; - do - { - result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) - { - properties.resize( propertyCount ); - result = d.vkGetDisplayModeProperties2KHR( - m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return createResultValueType( static_cast( result ), properties ); + return static_cast( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, - VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, - reinterpret_cast( pDisplayPlaneInfo ), - reinterpret_cast( pCapabilities ) ) ); + return d.vkGetBufferOpaqueCaptureAddressKHR( static_cast( m_device ), reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const + VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureAddressKHR && + "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; - VkResult result = d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, - reinterpret_cast( &displayPlaneInfo ), - reinterpret_cast( &capabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + uint64_t result = d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast( &info ) ); - return createResultValueType( static_cast( result ), capabilities ); + return result; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#if defined( VK_USE_PLATFORM_IOS_MVK ) - //=== VK_MVK_ios_surface === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateIOSSurfaceMVK( m_instance, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pSurface ) ) ); + return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( static_cast( m_device ), + reinterpret_cast( pInfo ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceMemoryOpaqueCaptureAddressKHR && + "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = - d.vkCreateIOSSurfaceMVK( m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); + uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast( &info ) ); - return createResultValueType( static_cast( result ), surface ); + return result; } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_line_rasterization === -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Instance::createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void + CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineStippleEXT( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); + } - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = - d.vkCreateIOSSurfaceMVK( m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" ); + //=== VK_EXT_host_query_reset === - return createResultValueType( - static_cast( result ), - UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + template + VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkResetQueryPoolEXT( static_cast( m_device ), static_cast( queryPool ), firstQuery, queryCount ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_IOS_MVK*/ -#if defined( VK_USE_PLATFORM_MACOS_MVK ) - //=== VK_MVK_macos_surface === + //=== VK_EXT_extended_dynamic_state === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pSurface ) ) ); + d.vkCmdSetCullModeEXT( static_cast( m_commandBuffer ), static_cast( cullMode ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetFrontFaceEXT( static_cast( m_commandBuffer ), static_cast( frontFace ) ); + } - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = - d.vkCreateMacOSSurfaceMVK( m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); - - return createResultValueType( static_cast( result ), surface ); + template + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveTopologyEXT( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Instance::createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetViewportWithCountEXT( static_cast( m_commandBuffer ), viewportCount, reinterpret_cast( pViewports ) ); + } - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = - d.vkCreateMacOSSurfaceMVK( m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewportWithCountEXT && + "Function requires or or " ); +# endif - return createResultValueType( - static_cast( result ), - UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast( viewports.data() ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - - //=== VK_EXT_debug_utils === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); + d.vkCmdSetScissorWithCountEXT( static_cast( m_commandBuffer ), scissorCount, reinterpret_cast( pScissors ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetScissorWithCountEXT && + "Function requires or or " ); +# endif - VkResult result = d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); - - return createResultValueType( static_cast( result ) ); + d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast( scissors.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + d.vkCmdBindVertexBuffers2EXT( static_cast( m_commandBuffer ), + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ), + reinterpret_cast( pSizes ), + reinterpret_cast( pStrides ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindVertexBuffers2EXT && + "Function requires or or " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - VkResult result = d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); - - return createResultValueType( static_cast( result ) ); + d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ), + reinterpret_cast( strides.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + d.vkCmdSetDepthTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + d.vkCmdSetDepthWriteEnableEXT( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkQueueEndDebugUtilsLabelEXT( m_queue ); + d.vkCmdSetDepthCompareOpEXT( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); } template - VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + d.vkCmdSetDepthBoundsTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilTestEnableEXT( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); + } - d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetStencilOpEXT( static_cast( m_commandBuffer ), + static_cast( faceMask ), + static_cast( failOp ), + static_cast( passOp ), + static_cast( depthFailOp ), + static_cast( compareOp ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_deferred_host_operations === template - VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + return static_cast( d.vkCreateDeferredOperationKHR( static_cast( m_device ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDeferredOperation ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createDeferredOperationKHR( Optional allocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDeferredOperationKHR && "Function requires " ); +# endif - d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDeferredOperationKHR( + m_device, + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &deferredOperation ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( deferredOperation ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createDeferredOperationKHRUnique( Optional allocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDeferredOperationKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDeferredOperationKHR( + m_device, + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &deferredOperation ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( deferredOperation, detail::ObjectDestroy( *this, allocator, d ) ) ); } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + d.vkDestroyDeferredOperationKHR( + static_cast( m_device ), static_cast( operation ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDeferredOperationKHR && "Function requires " ); +# endif - d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + d.vkDestroyDeferredOperationKHR( + m_device, + static_cast( operation ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pMessenger ) ) ); + d.vkDestroyDeferredOperationKHR( + static_cast( m_device ), static_cast( operation ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyDeferredOperationKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; - VkResult result = d.vkCreateDebugUtilsMessengerEXT( - m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &messenger ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); - - return createResultValueType( static_cast( result ), messenger ); + d.vkDestroyDeferredOperationKHR( + m_device, + static_cast( operation ), + reinterpret_cast( static_cast( allocator ) ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Instance::createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; - VkResult result = d.vkCreateDebugUtilsMessengerEXT( - m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &messenger ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" ); - - return createResultValueType( - static_cast( result ), - UniqueHandle( messenger, ObjectDestroy( *this, allocator, d ) ) ); + return d.vkGetDeferredOperationMaxConcurrencyKHR( static_cast( m_device ), static_cast( operation ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDebugUtilsMessengerEXT( - m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); + return static_cast( d.vkGetDeferredOperationResultKHR( static_cast( m_device ), static_cast( operation ) ) ); } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#else template - VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeferredOperationResultKHR && "Function requires " ); +# endif - d.vkDestroyDebugUtilsMessengerEXT( - m_instance, - static_cast( messenger ), - reinterpret_cast( static_cast( allocator ) ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); + + return static_cast( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDebugUtilsMessengerEXT( - m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); + return static_cast( d.vkDeferredOperationJoinKHR( static_cast( m_device ), static_cast( operation ) ) ); } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#else template - VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDeferredOperationJoinKHR && "Function requires " ); +# endif - d.vkDestroyDebugUtilsMessengerEXT( - m_instance, - static_cast( messenger ), - reinterpret_cast( static_cast( allocator ) ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); + + return static_cast( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_KHR_pipeline_executable_properties === + template - VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, - VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, - const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkSubmitDebugUtilsMessageEXT( m_instance, - static_cast( messageSeverity ), - static_cast( messageTypes ), - reinterpret_cast( pCallbackData ) ); + return static_cast( d.vkGetPipelineExecutablePropertiesKHR( static_cast( m_device ), + reinterpret_cast( pPipelineInfo ), + pExecutableCount, + reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, - VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, - const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutablePropertiesKHR && + "Function requires " ); +# endif - d.vkSubmitDebugUtilsMessageEXT( m_instance, - static_cast( messageSeverity ), - static_cast( messageTypes ), - reinterpret_cast( &callbackData ) ); + std::vector properties; + uint32_t executableCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount ) + { + properties.resize( executableCount ); + result = static_cast( + d.vkGetPipelineExecutablePropertiesKHR( m_device, + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + if ( executableCount < properties.size() ) + { + properties.resize( executableCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_ANDROID_external_memory_android_hardware_buffer === + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, + PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutablePropertiesKHR && + "Function requires " ); +# endif + + std::vector properties( + pipelineExecutablePropertiesKHRAllocator ); + uint32_t executableCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount ) + { + properties.resize( executableCount ); + result = static_cast( + d.vkGetPipelineExecutablePropertiesKHR( m_device, + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + if ( executableCount < properties.size() ) + { + properties.resize( executableCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer, - VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetPipelineExecutableStatisticsKHR( static_cast( m_device ), + reinterpret_cast( pExecutableInfo ), + pStatisticCount, + reinterpret_cast( pStatistics ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableStatisticsKHR && + "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; - VkResult result = - d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast( &properties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); - - return createResultValueType( static_cast( result ), properties ); + std::vector statistics; + uint32_t statisticCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, reinterpret_cast( &executableInfo ), &statisticCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = static_cast( + d.vkGetPipelineExecutableStatisticsKHR( m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + if ( statisticCount < statistics.size() ) + { + statistics.resize( statisticCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( statistics ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableStatisticsKHR && + "Function requires " ); +# endif - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = - structureChain.template get(); - VkResult result = - d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast( &properties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); - - return createResultValueType( static_cast( result ), structureChain ); + std::vector statistics( + pipelineExecutableStatisticKHRAllocator ); + uint32_t statisticCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( + m_device, reinterpret_cast( &executableInfo ), &statisticCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = static_cast( + d.vkGetPipelineExecutableStatisticsKHR( m_device, + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + if ( statisticCount < statistics.size() ) + { + statistics.resize( statisticCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( statistics ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, - struct AHardwareBuffer ** pBuffer, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( pInfo ), pBuffer ) ); + d.vkGetPipelineExecutableInternalRepresentationsKHR( static_cast( m_device ), + reinterpret_cast( pExecutableInfo ), + pInternalRepresentationCount, + reinterpret_cast( pInternalRepresentations ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableInternalRepresentationsKHR && + "Function requires " ); +# endif + + std::vector + internalRepresentations; + uint32_t internalRepresentationCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + if ( internalRepresentationCount < internalRepresentations.size() ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( internalRepresentations ) ); + } - struct AHardwareBuffer * buffer; - VkResult result = - d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( &info ), &buffer ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + Device::getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineExecutableInternalRepresentationsKHR && + "Function requires " ); +# endif - return createResultValueType( static_cast( result ), buffer ); + std::vector + internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator ); + uint32_t internalRepresentationCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( + m_device, + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + if ( internalRepresentationCount < internalRepresentations.size() ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( internalRepresentations ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_sample_locations === + //=== VK_EXT_host_image_copy === template - VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( pSampleLocationsInfo ) ); + return static_cast( + d.vkCopyMemoryToImageEXT( static_cast( m_device ), reinterpret_cast( pCopyMemoryToImageInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMemoryToImageEXT && "Function requires or " ); +# endif - d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( &sampleLocationsInfo ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast( ©MemoryToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, - VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceMultisamplePropertiesEXT( - m_physicalDevice, static_cast( samples ), reinterpret_cast( pMultisampleProperties ) ); + return static_cast( + d.vkCopyImageToMemoryEXT( static_cast( m_device ), reinterpret_cast( pCopyImageToMemoryInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT - PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyImageToMemoryEXT && "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; - d.vkGetPhysicalDeviceMultisamplePropertiesEXT( - m_physicalDevice, static_cast( samples ), reinterpret_cast( &multisampleProperties ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast( ©ImageToMemoryInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); - return multisampleProperties; + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_KHR_get_memory_requirements2 === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, - VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageMemoryRequirements2KHR( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + return static_cast( + d.vkCopyImageToImageEXT( static_cast( m_device ), reinterpret_cast( pCopyImageToImageInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 - Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetImageMemoryRequirements2KHR( - m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - - return memoryRequirements; - } - - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyImageToImageEXT && "Function requires or " ); +# endif - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); - d.vkGetImageMemoryRequirements2KHR( - m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCopyImageToImageEXT( m_device, reinterpret_cast( ©ImageToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); - return structureChain; + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, - VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayoutEXT( uint32_t transitionCount, + const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetBufferMemoryRequirements2KHR( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + return static_cast( d.vkTransitionImageLayoutEXT( + static_cast( m_device ), transitionCount, reinterpret_cast( pTransitions ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 - Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetBufferMemoryRequirements2KHR( - m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - - return memoryRequirements; - } - - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkTransitionImageLayoutEXT && "Function requires or " ); +# endif - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); - d.vkGetBufferMemoryRequirements2KHR( - m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkTransitionImageLayoutEXT( m_device, transitions.size(), reinterpret_cast( transitions.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); - return structureChain; + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSparseMemoryRequirements2KHR( m_device, - reinterpret_cast( pInfo ), - pSparseMemoryRequirementCount, - reinterpret_cast( pSparseMemoryRequirements ) ); + d.vkGetImageSubresourceLayout2EXT( static_cast( m_device ), + static_cast( image ), + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 Device::getImageSubresourceLayout2EXT( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2EXT && + "Function requires or or or " ); +# endif - std::vector sparseMemoryRequirements; - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements2KHR( - m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements2KHR( m_device, - reinterpret_cast( &info ), - &sparseMemoryRequirementCount, - reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + d.vkGetImageSubresourceLayout2EXT( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); - VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); - if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) - { - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - } - return sparseMemoryRequirements; + return layout; } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, - SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, - Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getImageSubresourceLayout2EXT( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2EXT && + "Function requires or or or " ); +# endif - std::vector sparseMemoryRequirements( - sparseImageMemoryRequirements2Allocator ); - uint32_t sparseMemoryRequirementCount; - d.vkGetImageSparseMemoryRequirements2KHR( - m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetImageSparseMemoryRequirements2KHR( m_device, - reinterpret_cast( &info ), - &sparseMemoryRequirementCount, - reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + d.vkGetImageSubresourceLayout2EXT( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); - VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); - if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) - { - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - } - return sparseMemoryRequirements; + return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_acceleration_structure === + //=== VK_KHR_map_memory2 === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, + void ** ppData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateAccelerationStructureKHR( m_device, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pAccelerationStructure ) ) ); + return static_cast( d.vkMapMemory2KHR( static_cast( m_device ), reinterpret_cast( pMemoryMapInfo ), ppData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkMapMemory2KHR && "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; - VkResult result = d.vkCreateAccelerationStructureKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &accelerationStructure ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); + void * pData; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkMapMemory2KHR( m_device, reinterpret_cast( &memoryMapInfo ), &pData ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); - return createResultValueType( static_cast( result ), accelerationStructure ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkUnmapMemory2KHR( static_cast( m_device ), reinterpret_cast( pMemoryUnmapInfo ) ) ); + } - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; - VkResult result = d.vkCreateAccelerationStructureKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &accelerationStructure ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUnmapMemory2KHR && "Function requires or " ); +# endif - return createResultValueType( - static_cast( result ), - UniqueHandle( accelerationStructure, ObjectDestroy( *this, allocator, d ) ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkUnmapMemory2KHR( m_device, reinterpret_cast( &memoryUnmapInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_swapchain_maintenance1 === template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyAccelerationStructureKHR( - m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + return static_cast( + d.vkReleaseSwapchainImagesEXT( static_cast( m_device ), reinterpret_cast( pReleaseInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE typename ResultValueType::type + Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleaseSwapchainImagesEXT && "Function requires " ); +# endif - d.vkDestroyAccelerationStructureKHR( - m_device, - static_cast( accelerationStructure ), - reinterpret_cast( static_cast( allocator ) ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast( &releaseInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_generated_commands === template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyAccelerationStructureKHR( - m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + d.vkGetGeneratedCommandsMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsNV && + "Function requires " ); +# endif - d.vkDestroyAccelerationStructureKHR( - m_device, - static_cast( accelerationStructure ), - reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); - template - VULKAN_HPP_INLINE void - CommandBuffer::buildAccelerationStructuresKHR( uint32_t infoCount, - const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, - const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, - infoCount, - reinterpret_cast( pInfos ), - reinterpret_cast( ppBuildRangeInfos ) ); + return memoryRequirements; } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( - VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, - VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); -# else - if ( infos.size() != pBuildRangeInfos.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsNV && + "Function requires " ); +# endif - d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, - infos.size(), - reinterpret_cast( infos.data() ), - reinterpret_cast( pBuildRangeInfos.data() ) ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( uint32_t infoCount, - const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, - const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, - const uint32_t * pIndirectStrides, - const uint32_t * const * ppMaxPrimitiveCounts, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, - infoCount, - reinterpret_cast( pInfos ), - reinterpret_cast( pIndirectDeviceAddresses ), - pIndirectStrides, - ppMaxPrimitiveCounts ); + d.vkCmdPreprocessGeneratedCommandsNV( static_cast( m_commandBuffer ), + reinterpret_cast( pGeneratedCommandsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( - VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, - VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectDeviceAddresses, - VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectStrides, - VULKAN_HPP_NAMESPACE::ArrayProxy const & pMaxPrimitiveCounts, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() ); - VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() ); - VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() ); -# else - if ( infos.size() != indirectDeviceAddresses.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); - } - if ( infos.size() != indirectStrides.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); - } - if ( infos.size() != pMaxPrimitiveCounts.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPreprocessGeneratedCommandsNV && "Function requires " ); +# endif - d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, - infos.size(), - reinterpret_cast( infos.data() ), - reinterpret_cast( indirectDeviceAddresses.data() ), - indirectStrides.data(), - pMaxPrimitiveCounts.data() ); + d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - uint32_t infoCount, - const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, - const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkBuildAccelerationStructuresKHR( m_device, - static_cast( deferredOperation ), - infoCount, - reinterpret_cast( pInfos ), - reinterpret_cast( ppBuildRangeInfos ) ) ); + d.vkCmdExecuteGeneratedCommandsNV( static_cast( m_commandBuffer ), + static_cast( isPreprocessed ), + reinterpret_cast( pGeneratedCommandsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR( - VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, - VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos, - Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); -# else - if ( infos.size() != pBuildRangeInfos.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdExecuteGeneratedCommandsNV && "Function requires " ); +# endif - VkResult result = - d.vkBuildAccelerationStructuresKHR( m_device, - static_cast( deferredOperation ), - infos.size(), - reinterpret_cast( infos.data() ), - reinterpret_cast( pBuildRangeInfos.data() ) ); - resultCheck( - static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); - - return static_cast( result ); + d.vkCmdExecuteGeneratedCommandsNV( + m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( &generatedCommandsInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCopyAccelerationStructureKHR( - m_device, static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); + d.vkCmdBindPipelineShaderGroupNV( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result - Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VkResult result = d.vkCopyAccelerationStructureKHR( - m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ); - resultCheck( - static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); - - return static_cast( result ); + return static_cast( d.vkCreateIndirectCommandsLayoutNV( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pIndirectCommandsLayout ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCopyAccelerationStructureToMemoryKHR( - m_device, static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectCommandsLayout ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result - Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutNV && "Function requires " ); +# endif - VkResult result = d.vkCopyAccelerationStructureToMemoryKHR( - m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ); - resultCheck( - static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectCommandsLayoutNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" ); - return static_cast( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + indirectCommandsLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCopyMemoryToAccelerationStructureKHR( - m_device, static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); + d.vkDestroyIndirectCommandsLayoutNV( static_cast( m_device ), + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result - Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, - Dispatch const & d ) const + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutNV && "Function requires " ); +# endif - VkResult result = d.vkCopyMemoryToAccelerationStructureKHR( - m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ); - resultCheck( - static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); - - return static_cast( result ); + d.vkDestroyIndirectCommandsLayoutNV( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, - const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - size_t dataSize, - void * pData, - size_t stride, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, - accelerationStructureCount, - reinterpret_cast( pAccelerationStructures ), - static_cast( queryType ), - dataSize, - pData, - stride ) ); + d.vkDestroyIndirectCommandsLayoutNV( static_cast( m_device ), + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::writeAccelerationStructuresPropertiesKHR( - VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - size_t dataSize, - size_t stride, - Dispatch const & d ) const + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutNV && "Function requires " ); +# endif - VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); - std::vector data( dataSize / sizeof( DataType ) ); - VkResult result = d.vkWriteAccelerationStructuresPropertiesKHR( m_device, - accelerationStructures.size(), - reinterpret_cast( accelerationStructures.data() ), - static_cast( queryType ), - data.size() * sizeof( DataType ), - reinterpret_cast( data.data() ), - stride ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); - - return createResultValueType( static_cast( result ), data ); + d.vkDestroyIndirectCommandsLayoutNV( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( static_cast( allocator ) ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::writeAccelerationStructuresPropertyKHR( - VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - size_t stride, - Dispatch const & d ) const + //=== VK_EXT_depth_bias_control === + + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT * pDepthBiasInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthBias2EXT( static_cast( m_commandBuffer ), reinterpret_cast( pDepthBiasInfo ) ); + } - DataType data; - VkResult result = d.vkWriteAccelerationStructuresPropertiesKHR( m_device, - accelerationStructures.size(), - reinterpret_cast( accelerationStructures.data() ), - static_cast( queryType ), - sizeof( DataType ), - reinterpret_cast( &data ), - stride ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetDepthBias2EXT && "Function requires " ); +# endif - return createResultValueType( static_cast( result ), data ); + d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast( &depthBiasInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_acquire_drm_display === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, + VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + return static_cast( d.vkAcquireDrmDisplayEXT( static_cast( m_physicalDevice ), drmFd, static_cast( display ) ) ); } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#else template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireDrmDisplayEXT && "Function requires " ); +# endif - d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast( display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, + uint32_t connectorId, + VULKAN_HPP_NAMESPACE::DisplayKHR * display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + return static_cast( + d.vkGetDrmDisplayEXT( static_cast( m_physicalDevice ), drmFd, connectorId, reinterpret_cast( display ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDrmDisplayEXT && "Function requires " ); +# endif - d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( &info ) ); + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( &display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDrmDisplayEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( &display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_private_data === -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( &info ) ); + return static_cast( d.vkCreatePrivateDataSlotEXT( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPrivateDataSlot ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast( pInfo ) ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlotEXT && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreatePrivateDataSlotEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( privateDataSlot ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress - Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePrivateDataSlotEXT && "Function requires or " ); +# endif - VkDeviceAddress result = - d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreatePrivateDataSlotEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" ); - return static_cast( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( privateDataSlot, detail::ObjectDestroy( *this, allocator, d ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void - CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, - const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, - accelerationStructureCount, - reinterpret_cast( pAccelerationStructures ), - static_cast( queryType ), - static_cast( queryPool ), - firstQuery ); + d.vkDestroyPrivateDataSlotEXT( + static_cast( m_device ), static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( - VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, - accelerationStructures.size(), - reinterpret_cast( accelerationStructures.data() ), - static_cast( queryType ), - static_cast( queryPool ), - firstQuery ); + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPrivateDataSlotEXT && "Function requires or " ); +# endif + + d.vkDestroyPrivateDataSlotEXT( + m_device, + static_cast( privateDataSlot ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, - VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, - reinterpret_cast( pVersionInfo ), - reinterpret_cast( pCompatibility ) ); + return static_cast( d.vkSetPrivateDataEXT( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#else template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR - Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE typename ResultValueType::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetPrivateDataEXT && "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; - d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, - reinterpret_cast( &versionInfo ), - reinterpret_cast( &compatibility ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkSetPrivateDataEXT( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); - return compatibility; + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, - const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, - const uint32_t * pMaxPrimitiveCounts, - VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetAccelerationStructureBuildSizesKHR( m_device, - static_cast( buildType ), - reinterpret_cast( pBuildInfo ), - pMaxPrimitiveCounts, - reinterpret_cast( pSizeInfo ) ); + d.vkGetPrivateDataEXT( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR - Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, - const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, - VULKAN_HPP_NAMESPACE::ArrayProxy const & maxPrimitiveCounts, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount ); -# else - if ( maxPrimitiveCounts.size() != buildInfo.geometryCount ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPrivateDataEXT && "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; - d.vkGetAccelerationStructureBuildSizesKHR( m_device, - static_cast( buildType ), - reinterpret_cast( &buildInfo ), - maxPrimitiveCounts.data(), - reinterpret_cast( &sizeInfo ) ); + uint64_t data; + d.vkGetPrivateDataEXT( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), &data ); - return sizeInfo; + return data; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_sampler_ycbcr_conversion === + //=== VK_KHR_video_encode_queue === template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pYcbcrConversion ) ) ); + return static_cast( + d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( pQualityLevelInfo ), + reinterpret_cast( pQualityLevelProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && + "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; - VkResult result = d.vkCreateSamplerYcbcrConversionKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &ycbcrConversion ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR qualityLevelProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, + reinterpret_cast( &qualityLevelInfo ), + reinterpret_cast( &qualityLevelProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); - return createResultValueType( static_cast( result ), ycbcrConversion ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( qualityLevelProperties ) ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, - Optional allocator, - Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && + "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; - VkResult result = d.vkCreateSamplerYcbcrConversionKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &ycbcrConversion ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR & qualityLevelProperties = + structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, + reinterpret_cast( &qualityLevelInfo ), + reinterpret_cast( &qualityLevelProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( ycbcrConversion, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, + size_t * pDataSize, + void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySamplerYcbcrConversionKHR( - m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + return static_cast( + d.vkGetEncodedVideoSessionParametersKHR( static_cast( m_device ), + reinterpret_cast( pVideoSessionParametersInfo ), + reinterpret_cast( pFeedbackInfo ), + pDataSize, + pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>>::type + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function requires " ); +# endif - d.vkDestroySamplerYcbcrConversionKHR( - m_device, - static_cast( ycbcrConversion ), - reinterpret_cast( static_cast( allocator ) ) ); + std::pair> data_; + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; + std::vector & data = data_.second; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - //=== VK_KHR_bind_memory2 === + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>>::type + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function requires " ); +# endif + + std::pair> data_( + std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; + std::vector & data = data_.second; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, - const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, - Dispatch const & d ) const + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, std::vector>>::type + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function requires " ); +# endif - VkResult result = d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); + std::pair, std::vector> data_; + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = + data_.first.template get(); + std::vector & data = data_.second; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType, std::vector>>::type + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetEncodedVideoSessionParametersKHR && "Function requires " ); +# endif + + std::pair, std::vector> data_( + std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = + data_.first.template get(); + std::vector & data = data_.second; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetEncodedVideoSessionParametersKHR( m_device, + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, - const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + d.vkCmdEncodeVideoKHR( static_cast( m_commandBuffer ), reinterpret_cast( pEncodeInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdEncodeVideoKHR && "Function requires " ); +# endif - VkResult result = d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); - - return createResultValueType( static_cast( result ) ); + d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast( &encodeInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_image_drm_format_modifier === +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( - VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( - m_device, static_cast( image ), reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkCreateCudaModuleNV( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pModule ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCudaModuleNV && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; - VkResult result = d.vkGetImageDrmFormatModifierPropertiesEXT( - m_device, static_cast( image ), reinterpret_cast( &properties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); + VULKAN_HPP_NAMESPACE::CudaModuleNV module; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCudaModuleNV( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNV" ); - return createResultValueType( static_cast( result ), properties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( module ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_EXT_validation_cache === +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createCudaModuleNVUnique( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateValidationCacheEXT( m_device, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pValidationCache ) ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCudaModuleNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::CudaModuleNV module; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCudaModuleNV( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNVUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( module, detail::ObjectDestroy( *this, allocator, d ) ) ); } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + size_t * pCacheSize, + void * pCacheData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; - VkResult result = d.vkCreateValidationCacheEXT( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &validationCache ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); - - return createResultValueType( static_cast( result ), validationCache ); + return static_cast( d.vkGetCudaModuleCacheNV( static_cast( m_device ), static_cast( module ), pCacheSize, pCacheData ) ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, - Optional allocator, - Dispatch const & d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCudaModuleCacheNV && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; - VkResult result = d.vkCreateValidationCacheEXT( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &validationCache ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" ); - - return createResultValueType( - static_cast( result ), - UniqueHandle( validationCache, ObjectDestroy( *this, allocator, d ) ) ); + std::vector cacheData; + size_t cacheSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetCudaModuleCacheNV( m_device, static_cast( module ), &cacheSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && cacheSize ) + { + cacheData.resize( cacheSize ); + result = static_cast( + d.vkGetCudaModuleCacheNV( m_device, static_cast( module ), &cacheSize, reinterpret_cast( cacheData.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); + VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); + if ( cacheSize < cacheData.size() ) + { + cacheData.resize( cacheSize ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( cacheData ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyValidationCacheEXT( - m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCudaModuleCacheNV && "Function requires " ); +# endif + + std::vector cacheData( uint8_tAllocator ); + size_t cacheSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetCudaModuleCacheNV( m_device, static_cast( module ), &cacheSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && cacheSize ) + { + cacheData.resize( cacheSize ); + result = static_cast( + d.vkGetCudaModuleCacheNV( m_device, static_cast( module ), &cacheSize, reinterpret_cast( cacheData.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); + VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); + if ( cacheSize < cacheData.size() ) + { + cacheData.resize( cacheSize ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( cacheData ) ); } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkDestroyValidationCacheEXT( - m_device, - static_cast( validationCache ), - reinterpret_cast( static_cast( allocator ) ) ); + return static_cast( d.vkCreateCudaFunctionNV( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pFunction ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyValidationCacheEXT( - m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCudaFunctionNV && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::CudaFunctionNV function; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCudaFunctionNV( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( function ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createCudaFunctionNVUnique( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateCudaFunctionNV && "Function requires " ); +# endif - d.vkDestroyValidationCacheEXT( - m_device, - static_cast( validationCache ), - reinterpret_cast( static_cast( allocator ) ) ); + VULKAN_HPP_NAMESPACE::CudaFunctionNV function; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateCudaFunctionNV( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNVUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( function, detail::ObjectDestroy( *this, allocator, d ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, - uint32_t srcCacheCount, - const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkMergeValidationCachesEXT( - m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); + d.vkDestroyCudaModuleNV( + static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches, - Dispatch const & d ) const + VULKAN_HPP_INLINE void Device::destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCudaModuleNV && "Function requires " ); +# endif - VkResult result = d.vkMergeValidationCachesEXT( - m_device, static_cast( dstCache ), srcCaches.size(), reinterpret_cast( srcCaches.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); - - return createResultValueType( static_cast( result ) ); + d.vkDestroyCudaModuleNV( m_device, + static_cast( module ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, - size_t * pDataSize, - void * pData, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), pDataSize, pData ) ); + d.vkDestroyCudaModuleNV( + static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCudaModuleNV && "Function requires " ); +# endif - std::vector data; - size_t dataSize; - VkResult result; - do - { - result = d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ); - if ( ( result == VK_SUCCESS ) && dataSize ) - { - data.resize( dataSize ); - result = - d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); - VULKAN_HPP_ASSERT( dataSize <= data.size() ); - if ( dataSize < data.size() ) - { - data.resize( dataSize ); - } - return createResultValueType( static_cast( result ), data ); + d.vkDestroyCudaModuleNV( m_device, + static_cast( module ), + reinterpret_cast( static_cast( allocator ) ) ); } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const + template + VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - std::vector data( uint8_tAllocator ); - size_t dataSize; - VkResult result; - do - { - result = d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ); - if ( ( result == VK_SUCCESS ) && dataSize ) - { - data.resize( dataSize ); - result = - d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); - VULKAN_HPP_ASSERT( dataSize <= data.size() ); - if ( dataSize < data.size() ) - { - data.resize( dataSize ); - } - return createResultValueType( static_cast( result ), data ); + d.vkDestroyCudaFunctionNV( + static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_NV_shading_rate_image === +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCudaFunctionNV && "Function requires " ); +# endif + + d.vkDestroyCudaFunctionNV( m_device, + static_cast( function ), + reinterpret_cast( static_cast( allocator ) ) ); } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, - uint32_t viewportCount, - const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetViewportShadingRatePaletteNV( - m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pShadingRatePalettes ) ); + d.vkDestroyCudaFunctionNV( + static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( - uint32_t firstViewport, - VULKAN_HPP_NAMESPACE::ArrayProxy const & shadingRatePalettes, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyCudaFunctionNV && "Function requires " ); +# endif - d.vkCmdSetViewportShadingRatePaletteNV( - m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast( shadingRatePalettes.data() ) ); + d.vkDestroyCudaFunctionNV( m_device, + static_cast( function ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, - uint32_t customSampleOrderCount, - const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, - static_cast( sampleOrderType ), - customSampleOrderCount, - reinterpret_cast( pCustomSampleOrders ) ); + d.vkCmdCudaLaunchKernelNV( static_cast( m_commandBuffer ), reinterpret_cast( pLaunchInfo ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void - CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, - VULKAN_HPP_NAMESPACE::ArrayProxy const & customSampleOrders, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCudaLaunchKernelNV && "Function requires " ); +# endif - d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, - static_cast( sampleOrderType ), - customSampleOrders.size(), - reinterpret_cast( customSampleOrders.data() ) ); + d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast( &launchInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_NV_ray_tracing === +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateAccelerationStructureNV( m_device, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pAccelerationStructure ) ) ); + d.vkExportMetalObjectsEXT( static_cast( m_device ), reinterpret_cast( pMetalObjectsInfo ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT + Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkExportMetalObjectsEXT && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; - VkResult result = d.vkCreateAccelerationStructureNV( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &accelerationStructure ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); + VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo; + d.vkExportMetalObjectsEXT( m_device, reinterpret_cast( &metalObjectsInfo ) ); - return createResultValueType( static_cast( result ), accelerationStructure ); + return metalObjectsInfo; } -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, - Optional allocator, - Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkExportMetalObjectsEXT && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; - VkResult result = d.vkCreateAccelerationStructureNV( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &accelerationStructure ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get(); + d.vkExportMetalObjectsEXT( m_device, reinterpret_cast( &metalObjectsInfo ) ); - return createResultValueType( - static_cast( result ), - UniqueHandle( accelerationStructure, ObjectDestroy( *this, allocator, d ) ) ); + return structureChain; } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyAccelerationStructureNV( - m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + d.vkCmdSetEvent2KHR( + static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( pDependencyInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetEvent2KHR && "Function requires or " ); +# endif - d.vkDestroyAccelerationStructureNV( - m_device, - static_cast( accelerationStructure ), - reinterpret_cast( static_cast( allocator ) ) ); + d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast( event ), reinterpret_cast( &dependencyInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyAccelerationStructureNV( - m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + d.vkCmdResetEvent2KHR( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWaitEvents2KHR( static_cast( m_commandBuffer ), + eventCount, + reinterpret_cast( pEvents ), + reinterpret_cast( pDependencyInfos ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWaitEvents2KHR && "Function requires or " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); +# else + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - d.vkDestroyAccelerationStructureNV( - m_device, - static_cast( accelerationStructure ), - reinterpret_cast( static_cast( allocator ) ) ); + d.vkCmdWaitEvents2KHR( m_commandBuffer, + events.size(), + reinterpret_cast( events.data() ), + reinterpret_cast( dependencyInfos.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void - Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, - VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, - reinterpret_cast( pInfo ), - reinterpret_cast( pMemoryRequirements ) ); + d.vkCmdPipelineBarrier2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pDependencyInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR - Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPipelineBarrier2KHR && "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; - d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); - - return memoryRequirements; + d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast( &dependencyInfo ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get(); - d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); - - return structureChain; + d.vkCmdWriteTimestamp2KHR( + static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( - uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + d.vkQueueSubmit2KHR( static_cast( m_queue ), submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Device::bindAccelerationStructureMemoryNV( - VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, Dispatch const & d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::submit2KHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueSubmit2KHR && "Function requires or " ); +# endif - VkResult result = d.vkBindAccelerationStructureMemoryNV( - m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_descriptor_buffer === template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, - VULKAN_HPP_NAMESPACE::Buffer instanceData, - VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, - VULKAN_HPP_NAMESPACE::Bool32 update, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, - VULKAN_HPP_NAMESPACE::Buffer scratch, - VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, - reinterpret_cast( pInfo ), - static_cast( instanceData ), - static_cast( instanceOffset ), - static_cast( update ), - static_cast( dst ), - static_cast( src ), - static_cast( scratch ), - static_cast( scratchOffset ) ); + d.vkGetDescriptorSetLayoutSizeEXT( + static_cast( m_device ), static_cast( layout ), reinterpret_cast( pLayoutSizeInBytes ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, - VULKAN_HPP_NAMESPACE::Buffer instanceData, - VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, - VULKAN_HPP_NAMESPACE::Bool32 update, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, - VULKAN_HPP_NAMESPACE::Buffer scratch, - VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize + Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutSizeEXT && "Function requires " ); +# endif - d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, - reinterpret_cast( &info ), - static_cast( instanceData ), - static_cast( instanceOffset ), - static_cast( update ), - static_cast( dst ), - static_cast( src ), - static_cast( scratch ), - static_cast( scratchOffset ) ); + VULKAN_HPP_NAMESPACE::DeviceSize layoutSizeInBytes; + d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast( layout ), reinterpret_cast( &layoutSizeInBytes ) ); + + return layoutSizeInBytes; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + uint32_t binding, + VULKAN_HPP_NAMESPACE::DeviceSize * pOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, - static_cast( dst ), - static_cast( src ), - static_cast( mode ) ); + d.vkGetDescriptorSetLayoutBindingOffsetEXT( + static_cast( m_device ), static_cast( layout ), binding, reinterpret_cast( pOffset ) ); } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, - VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, - VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, - VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, - VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, - VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, - VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, - uint32_t width, - uint32_t height, - uint32_t depth, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getDescriptorSetLayoutBindingOffsetEXT( + VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, uint32_t binding, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdTraceRaysNV( m_commandBuffer, - static_cast( raygenShaderBindingTableBuffer ), - static_cast( raygenShaderBindingOffset ), - static_cast( missShaderBindingTableBuffer ), - static_cast( missShaderBindingOffset ), - static_cast( missShaderBindingStride ), - static_cast( hitShaderBindingTableBuffer ), - static_cast( hitShaderBindingOffset ), - static_cast( hitShaderBindingStride ), - static_cast( callableShaderBindingTableBuffer ), - static_cast( callableShaderBindingOffset ), - static_cast( callableShaderBindingStride ), - width, - height, - depth ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutBindingOffsetEXT && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::DeviceSize offset; + d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast( layout ), binding, reinterpret_cast( &offset ) ); + + return offset; } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - uint32_t createInfoCount, - const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo, + size_t dataSize, + void * pDescriptor, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateRayTracingPipelinesNV( m_device, - static_cast( pipelineCache ), - createInfoCount, - reinterpret_cast( pCreateInfos ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pPipelines ) ) ); + d.vkGetDescriptorEXT( static_cast( m_device ), reinterpret_cast( pDescriptorInfo ), dataSize, pDescriptor ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> - Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - Dispatch const & d ) const + template + VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, + size_t dataSize, + void * pDescriptor, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorEXT && "Function requires " ); +# endif - std::vector pipelines( createInfos.size() ); - VkResult result = d.vkCreateRayTracingPipelinesNV( - m_device, - static_cast( pipelineCache ), - createInfos.size(), - reinterpret_cast( createInfos.data() ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( pipelines.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - - return ResultValue>( static_cast( result ), pipelines ); + d.vkGetDescriptorEXT( m_device, reinterpret_cast( &descriptorInfo ), dataSize, pDescriptor ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> - Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - PipelineAllocator & pipelineAllocator, - Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorType Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorEXT && "Function requires " ); +# endif - std::vector pipelines( createInfos.size(), pipelineAllocator ); - VkResult result = d.vkCreateRayTracingPipelinesNV( - m_device, - static_cast( pipelineCache ), - createInfos.size(), - reinterpret_cast( createInfos.data() ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( pipelines.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + DescriptorType descriptor; + d.vkGetDescriptorEXT( + m_device, reinterpret_cast( &descriptorInfo ), sizeof( DescriptorType ), reinterpret_cast( &descriptor ) ); - return ResultValue>( static_cast( result ), pipelines ); + return descriptor; } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue - Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT( uint32_t bufferCount, + const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorBuffersEXT( + static_cast( m_commandBuffer ), bufferCount, reinterpret_cast( pBindingInfos ) ); + } - VULKAN_HPP_NAMESPACE::Pipeline pipeline; - VkResult result = d.vkCreateRayTracingPipelinesNV( - m_device, - static_cast( pipelineCache ), - 1, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &pipeline ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindingInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorBuffersEXT && "Function requires " ); +# endif - return ResultValue( static_cast( result ), pipeline ); + d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bindingInfos.size(), reinterpret_cast( bindingInfos.data() ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> - Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - Dispatch const & d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t setCount, + const uint32_t * pBufferIndices, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDescriptorBufferOffsetsEXT( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + setCount, + pBufferIndices, + reinterpret_cast( pOffsets ) ); + } - std::vector pipelines( createInfos.size() ); - VkResult result = d.vkCreateRayTracingPipelinesNV( - m_device, - static_cast( pipelineCache ), - createInfos.size(), - reinterpret_cast( createInfos.data() ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( pipelines.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - std::vector, PipelineAllocator> uniquePipelines; - uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( auto const & pipeline : pipelines ) +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferIndices, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetDescriptorBufferOffsetsEXT && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( bufferIndices.size() == offsets.size() ); +# else + if ( bufferIndices.size() != offsets.size() ) { - uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" ); } - return ResultValue, PipelineAllocator>>( - static_cast( result ), std::move( uniquePipelines ) ); +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + bufferIndices.size(), + bufferIndices.data(), + reinterpret_cast( offsets.data() ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template >::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> - Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - PipelineAllocator & pipelineAllocator, - Dispatch const & d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - std::vector pipelines( createInfos.size() ); - VkResult result = d.vkCreateRayTracingPipelinesNV( - m_device, - static_cast( pipelineCache ), - createInfos.size(), - reinterpret_cast( createInfos.data() ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( pipelines.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); - uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( auto const & pipeline : pipelines ) - { - uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); - } - return ResultValue, PipelineAllocator>>( - static_cast( result ), std::move( uniquePipelines ) ); + d.vkCmdBindDescriptorBufferEmbeddedSamplersEXT( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( layout ), set ); } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> - Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getBufferOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetBufferOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); + } - VULKAN_HPP_NAMESPACE::Pipeline pipeline; - VkResult result = d.vkCreateRayTracingPipelinesNV( - m_device, - static_cast( pipelineCache ), - 1, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &pipeline ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferOpaqueCaptureDescriptorDataEXT && + "Function requires " ); +# endif - return ResultValue>( - static_cast( result ), - UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); - } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" ); - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - uint32_t firstGroup, - uint32_t groupCount, - size_t dataSize, - void * pData, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + return static_cast( d.vkGetImageOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesNV( - VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageOpaqueCaptureDescriptorDataEXT && + "Function requires " ); +# endif - VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); - std::vector data( dataSize / sizeof( DataType ) ); - VkResult result = d.vkGetRayTracingShaderGroupHandlesNV( - m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - return createResultValueType( static_cast( result ), data ); + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const + Device::getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT && + "Function requires " ); +# endif - DataType data; - VkResult result = d.vkGetRayTracingShaderGroupHandlesNV( - m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" ); - return createResultValueType( static_cast( result ), data ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, - size_t dataSize, - void * pData, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSamplerOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), dataSize, pData ) ); + return static_cast( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT && + "Function requires " ); +# endif - VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); - std::vector data( dataSize / sizeof( DataType ) ); - VkResult result = d.vkGetAccelerationStructureHandleNV( - m_device, static_cast( accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - return createResultValueType( static_cast( result ), data ); + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Dispatch const & d ) const + Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT && + "Function requires " ); +# endif - DataType data; - VkResult result = d.vkGetAccelerationStructureHandleNV( - m_device, static_cast( accelerationStructure ), sizeof( DataType ), reinterpret_cast( &data ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + m_device, reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" ); - return createResultValueType( static_cast( result ), data ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_fragment_shading_rate_enums === template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, - const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, - accelerationStructureCount, - reinterpret_cast( pAccelerationStructures ), - static_cast( queryType ), - static_cast( queryPool ), - firstQuery ); + d.vkCmdSetFragmentShadingRateEnumNV( static_cast( m_commandBuffer ), + static_cast( shadingRate ), + reinterpret_cast( combinerOps ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + //=== VK_EXT_mesh_shader === + template - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( - VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, - accelerationStructures.size(), - reinterpret_cast( accelerationStructures.data() ), - static_cast( queryType ), - static_cast( queryPool ), - firstQuery ); + d.vkCmdDrawMeshTasksEXT( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - uint32_t shader, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); + d.vkCmdDrawMeshTasksIndirectEXT( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); } -#else + template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VkResult result = d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); - - return createResultValueType( static_cast( result ) ); + d.vkCmdDrawMeshTasksIndirectCountEXT( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - //=== VK_KHR_maintenance3 === + //=== VK_KHR_copy_commands2 === template - VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorSetLayoutSupportKHR( - m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); + d.vkCmdCopyBuffer2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pCopyBufferInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport - Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBuffer2KHR && "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; - d.vkGetDescriptorSetLayoutSupportKHR( - m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); - - return support; + d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( ©BufferInfo ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pCopyImageInfo ) ); + } - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get(); - d.vkGetDescriptorSetLayoutSupportKHR( - m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImage2KHR && "Function requires or " ); +# endif - return structureChain; + d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( ©ImageInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_KHR_draw_indirect_count === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirectCountKHR( m_commandBuffer, - static_cast( buffer ), - static_cast( offset ), - static_cast( countBuffer ), - static_cast( countBufferOffset ), - maxDrawCount, - stride ); + d.vkCmdCopyBufferToImage2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pCopyBufferToImageInfo ) ); } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, - static_cast( buffer ), - static_cast( offset ), - static_cast( countBuffer ), - static_cast( countBufferOffset ), - maxDrawCount, - stride ); - } +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyBufferToImage2KHR && "Function requires or " ); +# endif - //=== VK_EXT_external_memory_host === + d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast( ©BufferToImageInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, - const void * pHostPointer, - VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( m_device, - static_cast( handleType ), - pHostPointer, - reinterpret_cast( pMemoryHostPointerProperties ) ) ); + d.vkCmdCopyImageToBuffer2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pCopyImageToBufferInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, - const void * pHostPointer, - Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyImageToBuffer2KHR && "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; - VkResult result = d.vkGetMemoryHostPointerPropertiesEXT( m_device, - static_cast( handleType ), - pHostPointer, - reinterpret_cast( &memoryHostPointerProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); - - return createResultValueType( static_cast( result ), memoryHostPointerProperties ); + d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast( ©ImageToBufferInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_AMD_buffer_marker === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - uint32_t marker, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, - static_cast( pipelineStage ), - static_cast( dstBuffer ), - static_cast( dstOffset ), - marker ); + d.vkCmdBlitImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pBlitImageInfo ) ); } - //=== VK_EXT_calibrated_timestamps === - +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, - VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBlitImage2KHR && "Function requires or " ); +# endif + + d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( &blitImageInfo ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - std::vector timeDomains; - uint32_t timeDomainCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ); - if ( ( result == VK_SUCCESS ) && timeDomainCount ) - { - timeDomains.resize( timeDomainCount ); - result = - d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); - VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); - if ( timeDomainCount < timeDomains.size() ) - { - timeDomains.resize( timeDomainCount ); - } - return createResultValueType( static_cast( result ), timeDomains ); + d.vkCmdResolveImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pResolveImageInfo ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, Dispatch const & d ) const +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdResolveImage2KHR && "Function requires or " ); +# endif - std::vector timeDomains( timeDomainEXTAllocator ); - uint32_t timeDomainCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ); - if ( ( result == VK_SUCCESS ) && timeDomainCount ) - { - timeDomains.resize( timeDomainCount ); - result = - d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); - VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); - if ( timeDomainCount < timeDomains.size() ) - { - timeDomains.resize( timeDomainCount ); - } - return createResultValueType( static_cast( result ), timeDomains ); + d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( &resolveImageInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_device_fault === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, - const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos, - uint64_t * pTimestamps, - uint64_t * pMaxDeviation, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, + VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetCalibratedTimestampsEXT( - m_device, timestampCount, reinterpret_cast( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); + return static_cast( d.vkGetDeviceFaultInfoEXT( + static_cast( m_device ), reinterpret_cast( pFaultCounts ), reinterpret_cast( pFaultInfo ) ) ); } +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type - Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, - Dispatch const & d ) const +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkAcquireWinrtDisplayNV( static_cast( m_physicalDevice ), static_cast( display ) ) ); + } +# else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAcquireWinrtDisplayNV && "Function requires " ); +# endif - std::pair, uint64_t> data( - std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); - std::vector & timestamps = data.first; - uint64_t & maxDeviation = data.second; - VkResult result = d.vkGetCalibratedTimestampsEXT( - m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast( display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" ); - return createResultValueType( static_cast( result ), data ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type - Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, - Uint64_tAllocator & uint64_tAllocator, - Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, + VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetWinrtDisplayNV( static_cast( m_physicalDevice ), deviceRelativeId, reinterpret_cast( pDisplay ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetWinrtDisplayNV && "Function requires " ); +# endif - std::pair, uint64_t> data( - std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) ); - std::vector & timestamps = data.first; - uint64_t & maxDeviation = data.second; - VkResult result = d.vkGetCalibratedTimestampsEXT( - m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" ); - return createResultValueType( static_cast( result ), data ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); } +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetWinrtDisplayNV && "Function requires " ); +# endif - std::pair data; - uint64_t & timestamp = data.first; - uint64_t & maxDeviation = data.second; - VkResult result = - d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" ); - return createResultValueType( static_cast( result ), data ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_NV_mesh_shader === +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); + return static_cast( d.vkCreateDirectFBSurfaceEXT( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - uint32_t drawCount, - uint32_t stride, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDirectFBSurfaceEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDirectFBSurfaceEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Instance::createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, - static_cast( buffer ), - static_cast( offset ), - static_cast( countBuffer ), - static_cast( countBufferOffset ), - maxDrawCount, - stride ); - } +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateDirectFBSurfaceEXT && "Function requires " ); +# endif - //=== VK_NV_scissor_exclusive === + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateDirectFBSurfaceEXT( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, - uint32_t exclusiveScissorCount, - const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, + IDirectFB * dfb, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissors ) ); + return static_cast( + d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( static_cast( m_physicalDevice ), queueFamilyIndex, dfb ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, - VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissors, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT && + "Function requires " ); +# endif - d.vkCmdSetExclusiveScissorNV( - m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast( exclusiveScissors.data() ) ); + VkBool32 result = d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb ); + + return static_cast( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - //=== VK_NV_device_diagnostic_checkpoints === + //=== VK_EXT_vertex_input_dynamic_state === template - VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( uint32_t vertexBindingDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); + d.vkCmdSetVertexInputEXT( static_cast( m_commandBuffer ), + vertexBindingDescriptionCount, + reinterpret_cast( pVertexBindingDescriptions ), + vertexAttributeDescriptionCount, + reinterpret_cast( pVertexAttributeDescriptions ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexBindingDescriptions, + VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexAttributeDescriptions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetVertexInputEXT && "Function requires or " ); +# endif - d.vkCmdSetCheckpointNV( m_commandBuffer, reinterpret_cast( &checkpointMarker ) ); + d.vkCmdSetVertexInputEXT( m_commandBuffer, + vertexBindingDescriptions.size(), + reinterpret_cast( vertexBindingDescriptions.data() ), + vertexAttributeDescriptions.size(), + reinterpret_cast( vertexAttributeDescriptions.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount, - VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); - } +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Queue::getCheckpointDataNV( Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - std::vector checkpointData; - uint32_t checkpointDataCount; - d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - - VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); - if ( checkpointDataCount < checkpointData.size() ) - { - checkpointData.resize( checkpointDataCount ); - } - return checkpointData; + return static_cast( d.vkGetMemoryZirconHandleFUCHSIA( + static_cast( m_device ), reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryZirconHandleFUCHSIA && "Function requires " ); +# endif - std::vector checkpointData( checkpointDataNVAllocator ); - uint32_t checkpointDataCount; - d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + zx_handle_t zirconHandle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); - VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); - if ( checkpointDataCount < checkpointData.size() ) - { - checkpointData.resize( checkpointDataCount ); - } - return checkpointData; + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( zirconHandle ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_KHR_timeline_semaphore === +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, - uint64_t * pValue, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), pValue ) ); + return static_cast( + d.vkGetMemoryZirconHandlePropertiesFUCHSIA( static_cast( m_device ), + static_cast( handleType ), + zirconHandle, + reinterpret_cast( pMemoryZirconHandleProperties ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryZirconHandlePropertiesFUCHSIA && + "Function requires " ); +# endif - uint64_t value; - VkResult result = d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), &value ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, + static_cast( handleType ), + zirconHandle, + reinterpret_cast( &memoryZirconHandleProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); - return createResultValueType( static_cast( result ), value ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryZirconHandleProperties ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, - uint64_t timeout, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA( + const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); + return static_cast( d.vkImportSemaphoreZirconHandleFUCHSIA( + static_cast( m_device ), reinterpret_cast( pImportSemaphoreZirconHandleInfo ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result - Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkImportSemaphoreZirconHandleFUCHSIA && "Function requires " ); +# endif - VkResult result = d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( &waitInfo ), timeout ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkImportSemaphoreZirconHandleFUCHSIA( + m_device, reinterpret_cast( &importSemaphoreZirconHandleInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); - return static_cast( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( pSignalInfo ) ) ); + return static_cast( d.vkGetSemaphoreZirconHandleFUCHSIA( + static_cast( m_device ), reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetSemaphoreZirconHandleFUCHSIA && "Function requires " ); +# endif - VkResult result = d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( &signalInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); + zx_handle_t zirconHandle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( zirconHandle ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_INTEL_performance_query === +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( - const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( pInitializeInfo ) ) ); + return static_cast( d.vkCreateBufferCollectionFUCHSIA( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pCollection ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBufferCollectionFUCHSIA && "Function requires " ); +# endif - VkResult result = d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( &initializeInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateBufferCollectionFUCHSIA( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &collection ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( collection ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUninitializePerformanceApiINTEL( m_device ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateBufferCollectionFUCHSIA && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateBufferCollectionFUCHSIA( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &collection ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, + UniqueHandle( collection, detail::ObjectDestroy( *this, allocator, d ) ) ); } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + return static_cast( + d.vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast( m_device ), + static_cast( collection ), + reinterpret_cast( pImageConstraintsInfo ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const + Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetBufferCollectionImageConstraintsFUCHSIA && + "Function requires " ); +# endif - VkResult result = d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetBufferCollectionImageConstraintsFUCHSIA( + m_device, static_cast( collection ), reinterpret_cast( &imageConstraintsInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( - const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + d.vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast( m_device ), + static_cast( collection ), + reinterpret_cast( pBufferConstraintsInfo ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const + Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetBufferCollectionBufferConstraintsFUCHSIA && + "Function requires " ); +# endif - VkResult result = d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( + m_device, static_cast( collection ), reinterpret_cast( &bufferConstraintsInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( - const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( pOverrideInfo ) ) ); + d.vkDestroyBufferCollectionFUCHSIA( + static_cast( m_device ), static_cast( collection ), reinterpret_cast( pAllocator ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d ) const + VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBufferCollectionFUCHSIA && "Function requires " ); +# endif - VkResult result = d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( &overrideInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); - - return createResultValueType( static_cast( result ) ); + d.vkDestroyBufferCollectionFUCHSIA( + m_device, + static_cast( collection ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, - VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAcquirePerformanceConfigurationINTEL( m_device, - reinterpret_cast( pAcquireInfo ), - reinterpret_cast( pConfiguration ) ) ); + d.vkDestroyBufferCollectionFUCHSIA( + static_cast( m_device ), static_cast( collection ), reinterpret_cast( pAllocator ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyBufferCollectionFUCHSIA && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; - VkResult result = d.vkAcquirePerformanceConfigurationINTEL( m_device, - reinterpret_cast( &acquireInfo ), - reinterpret_cast( &configuration ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); + d.vkDestroyBufferCollectionFUCHSIA( + m_device, + static_cast( collection ), + reinterpret_cast( static_cast( allocator ) ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - return createResultValueType( static_cast( result ), configuration ); + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetBufferCollectionPropertiesFUCHSIA( static_cast( m_device ), + static_cast( collection ), + reinterpret_cast( pProperties ) ) ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetBufferCollectionPropertiesFUCHSIA && + "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; - VkResult result = d.vkAcquirePerformanceConfigurationINTEL( m_device, - reinterpret_cast( &acquireInfo ), - reinterpret_cast( &configuration ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" ); + VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetBufferCollectionPropertiesFUCHSIA( + m_device, static_cast( collection ), reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( configuration, ObjectRelease( *this, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, + VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); + return static_cast( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + static_cast( m_device ), static_cast( renderpass ), reinterpret_cast( pMaxWorkgroupSize ) ) ); } -#else + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI && + "Function requires " ); +# endif - VkResult result = d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); + VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + m_device, static_cast( renderpass ), reinterpret_cast( &maxWorkgroupSize ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( maxWorkgroupSize ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); + d.vkCmdSubpassShadingHUAWEI( static_cast( m_commandBuffer ) ); } -#else + + //=== VK_HUAWEI_invocation_mask === + template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VkResult result = d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::release" ); - - return createResultValueType( static_cast( result ) ); + d.vkCmdBindInvocationMaskHUAWEI( + static_cast( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + //=== VK_NV_external_memory_rdma === + template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, + VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); + return static_cast( d.vkGetMemoryRemoteAddressNV( static_cast( m_device ), + reinterpret_cast( pMemoryGetRemoteAddressInfo ), + reinterpret_cast( pAddress ) ) ); } -#else + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryRemoteAddressNV && "Function requires " ); +# endif - VkResult result = d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); + VULKAN_HPP_NAMESPACE::RemoteAddressNV address; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryRemoteAddressNV( + m_device, reinterpret_cast( &memoryGetRemoteAddressInfo ), reinterpret_cast( &address ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( address ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_pipeline_properties === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, - VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo, + VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPerformanceParameterINTEL( - m_device, static_cast( parameter ), reinterpret_cast( pValue ) ) ); + return static_cast( d.vkGetPipelinePropertiesEXT( static_cast( m_device ), + reinterpret_cast( pPipelineInfo ), + reinterpret_cast( pPipelineProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelinePropertiesEXT && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; - VkResult result = d.vkGetPerformanceParameterINTEL( - m_device, static_cast( parameter ), reinterpret_cast( &value ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); + VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPipelinePropertiesEXT( + m_device, reinterpret_cast( &pipelineInfo ), reinterpret_cast( &pipelineProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); - return createResultValueType( static_cast( result ), value ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineProperties ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_AMD_display_native_hdr === + //=== VK_EXT_extended_dynamic_state2 === template - VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, - VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkSetLocalDimmingAMD( m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); + d.vkCmdSetPatchControlPointsEXT( static_cast( m_commandBuffer ), patchControlPoints ); } -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_imagepipe_surface === - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pSurface ) ) ); + d.vkCmdSetRasterizerDiscardEnableEXT( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = d.vkCreateImagePipeSurfaceFUCHSIA( - m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); - - return createResultValueType( static_cast( result ), surface ); + d.vkCmdSetDepthBiasEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Instance::createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLogicOpEXT( static_cast( m_commandBuffer ), static_cast( logicOp ) ); + } - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = d.vkCreateImagePipeSurfaceFUCHSIA( - m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" ); - - return createResultValueType( - static_cast( result ), - UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + template + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetPrimitiveRestartEnableEXT( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_FUCHSIA*/ -#if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_surface === +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateMetalSurfaceEXT( m_instance, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pSurface ) ) ); + return static_cast( d.vkCreateScreenSurfaceQNX( static_cast( m_instance ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSurface ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, - Optional allocator, - Dispatch const & d ) const + Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateScreenSurfaceQNX && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = - d.vkCreateMetalSurfaceEXT( m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateScreenSurfaceQNX( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" ); - return createResultValueType( static_cast( result ), surface ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Instance::createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, - Optional allocator, - Dispatch const & d ) const + Instance::createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateScreenSurfaceQNX && "Function requires " ); +# endif VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = - d.vkCreateMetalSurfaceEXT( m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateScreenSurfaceQNX( + m_instance, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" ); - return createResultValueType( - static_cast( result ), - UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_KHR_fragment_shading_rate === +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - PhysicalDevice::getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount, - VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, + struct _screen_window * window, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( - m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast( pFragmentShadingRates ) ) ); + return static_cast( + d.vkGetPhysicalDeviceScreenPresentationSupportQNX( static_cast( m_physicalDevice ), queueFamilyIndex, window ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceScreenPresentationSupportQNX && + "Function requires " ); +# endif - std::vector fragmentShadingRates; - uint32_t fragmentShadingRateCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ); - if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount ) - { - fragmentShadingRates.resize( fragmentShadingRateCount ); - result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( - m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast( fragmentShadingRates.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); - VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); - if ( fragmentShadingRateCount < fragmentShadingRates.size() ) - { - fragmentShadingRates.resize( fragmentShadingRateCount ); - } - return createResultValueType( static_cast( result ), fragmentShadingRates ); - } - - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, - Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VkBool32 result = d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window ); - std::vector fragmentShadingRates( - physicalDeviceFragmentShadingRateKHRAllocator ); - uint32_t fragmentShadingRateCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ); - if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount ) - { - fragmentShadingRates.resize( fragmentShadingRateCount ); - result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( - m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast( fragmentShadingRates.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); - VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); - if ( fragmentShadingRateCount < fragmentShadingRates.size() ) - { - fragmentShadingRates.resize( fragmentShadingRateCount ); - } - return createResultValueType( static_cast( result ), fragmentShadingRates ); + return static_cast( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === template - VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, - const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetFragmentShadingRateKHR( - m_commandBuffer, reinterpret_cast( pFragmentSize ), reinterpret_cast( combinerOps ) ); + d.vkCmdSetColorWriteEnableEXT( static_cast( m_commandBuffer ), attachmentCount, reinterpret_cast( pColorWriteEnables ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, - const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetColorWriteEnableEXT && "Function requires " ); +# endif - d.vkCmdSetFragmentShadingRateKHR( - m_commandBuffer, reinterpret_cast( &fragmentSize ), reinterpret_cast( combinerOps ) ); + d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, colorWriteEnables.size(), reinterpret_cast( colorWriteEnables.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_buffer_device_address === - - template - VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( pInfo ) ) ); - } + //=== VK_KHR_ray_tracing_maintenance1 === -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VkDeviceAddress result = d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( &info ) ); - - return static_cast( result ); + d.vkCmdTraceRaysIndirect2KHR( static_cast( m_commandBuffer ), static_cast( indirectDeviceAddress ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - //=== VK_EXT_tooling_info === + //=== VK_EXT_multi_draw === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount, - VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( uint32_t drawCount, + const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast( pToolProperties ) ) ); + d.vkCmdDrawMultiEXT( static_cast( m_commandBuffer ), + drawCount, + reinterpret_cast( pVertexInfo ), + instanceCount, + firstInstance, + stride ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - std::vector toolProperties; - uint32_t toolCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ); - if ( ( result == VK_SUCCESS ) && toolCount ) - { - toolProperties.resize( toolCount ); - result = - d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); - VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); - if ( toolCount < toolProperties.size() ) - { - toolProperties.resize( toolCount ); - } - return createResultValueType( static_cast( result ), toolProperties ); - } - - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & vertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDrawMultiEXT && "Function requires " ); +# endif - std::vector toolProperties( - physicalDeviceToolPropertiesAllocator ); - uint32_t toolCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ); - if ( ( result == VK_SUCCESS ) && toolCount ) - { - toolProperties.resize( toolCount ); - result = - d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); - VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); - if ( toolCount < toolProperties.size() ) - { - toolProperties.resize( toolCount ); - } - return createResultValueType( static_cast( result ), toolProperties ); + d.vkCmdDrawMultiEXT( m_commandBuffer, + vertexInfo.size(), + reinterpret_cast( vertexInfo.data() ), + instanceCount, + firstInstance, + vertexInfo.stride() ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_KHR_present_wait === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - uint64_t presentId, - uint64_t timeout, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( uint32_t drawCount, + const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t * pVertexOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkWaitForPresentKHR( m_device, static_cast( swapchain ), presentId, timeout ) ); + d.vkCmdDrawMultiIndexedEXT( static_cast( m_commandBuffer ), + drawCount, + reinterpret_cast( pIndexInfo ), + instanceCount, + firstInstance, + stride, + pVertexOffset ); } -#else + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result - Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d ) const + VULKAN_HPP_INLINE void + CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & indexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Optional vertexOffset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDrawMultiIndexedEXT && "Function requires " ); +# endif - VkResult result = d.vkWaitForPresentKHR( m_device, static_cast( swapchain ), presentId, timeout ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); - - return static_cast( result ); + d.vkCmdDrawMultiIndexedEXT( m_commandBuffer, + indexInfo.size(), + reinterpret_cast( indexInfo.data() ), + instanceCount, + firstInstance, + indexInfo.stride(), + static_cast( vertexOffset ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_NV_cooperative_matrix === + //=== VK_EXT_opacity_micromap === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( - uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( - m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkCreateMicromapEXT( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pMicromap ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMicromapEXT && "Function requires " ); +# endif - std::vector properties; - uint32_t propertyCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) - { - properties.resize( propertyCount ); - result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( - m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return createResultValueType( static_cast( result ), properties ); + VULKAN_HPP_NAMESPACE::MicromapEXT micromap; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateMicromapEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( µmap ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( micromap ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, - Dispatch const & d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateMicromapEXT && "Function requires " ); +# endif - std::vector properties( - cooperativeMatrixPropertiesNVAllocator ); - uint32_t propertyCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) - { - properties.resize( propertyCount ); - result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( - m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return createResultValueType( static_cast( result ), properties ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NAMESPACE::MicromapEXT micromap; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateMicromapEXT( m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( µmap ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" ); - //=== VK_NV_coverage_reduction_mode === + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( micromap, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( - uint32_t * pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - m_physicalDevice, pCombinationCount, reinterpret_cast( pCombinations ) ) ); + d.vkDestroyMicromapEXT( + static_cast( m_device ), static_cast( micromap ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const + template + VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyMicromapEXT && "Function requires " ); +# endif - std::vector combinations; - uint32_t combinationCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ); - if ( ( result == VK_SUCCESS ) && combinationCount ) - { - combinations.resize( combinationCount ); - result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - m_physicalDevice, &combinationCount, reinterpret_cast( combinations.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); - VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); - if ( combinationCount < combinations.size() ) - { - combinations.resize( combinationCount ); - } - return createResultValueType( static_cast( result ), combinations ); + d.vkDestroyMicromapEXT( m_device, + static_cast( micromap ), + reinterpret_cast( static_cast( allocator ) ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( - FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d ) const + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - std::vector combinations( - framebufferMixedSamplesCombinationNVAllocator ); - uint32_t combinationCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ); - if ( ( result == VK_SUCCESS ) && combinationCount ) - { - combinations.resize( combinationCount ); - result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - m_physicalDevice, &combinationCount, reinterpret_cast( combinations.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); - VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); - if ( combinationCount < combinations.size() ) - { - combinations.resize( combinationCount ); - } - return createResultValueType( static_cast( result ), combinations ); + d.vkDestroyMicromapEXT( + static_cast( m_device ), static_cast( micromap ), reinterpret_cast( pAllocator ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - uint32_t * pPresentModeCount, - VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, - reinterpret_cast( pSurfaceInfo ), - pPresentModeCount, - reinterpret_cast( pPresentModes ) ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyMicromapEXT && "Function requires " ); +# endif + + d.vkDestroyMicromapEXT( m_device, + static_cast( micromap ), + reinterpret_cast( static_cast( allocator ) ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - std::vector presentModes; - uint32_t presentModeCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( - m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, nullptr ); - if ( ( result == VK_SUCCESS ) && presentModeCount ) - { - presentModes.resize( presentModeCount ); - result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, - reinterpret_cast( &surfaceInfo ), - &presentModeCount, - reinterpret_cast( presentModes.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); - VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); - if ( presentModeCount < presentModes.size() ) - { - presentModes.resize( presentModeCount ); - } - return createResultValueType( static_cast( result ), presentModes ); + d.vkCmdBuildMicromapsEXT( static_cast( m_commandBuffer ), infoCount, reinterpret_cast( pInfos ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, - PresentModeKHRAllocator & presentModeKHRAllocator, - Dispatch const & d ) const +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildMicromapsEXT && "Function requires " ); +# endif - std::vector presentModes( presentModeKHRAllocator ); - uint32_t presentModeCount; - VkResult result; - do - { - result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( - m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, nullptr ); - if ( ( result == VK_SUCCESS ) && presentModeCount ) - { - presentModes.resize( presentModeCount ); - result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, - reinterpret_cast( &surfaceInfo ), - &presentModeCount, - reinterpret_cast( presentModes.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); - VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); - if ( presentModeCount < presentModes.size() ) - { - presentModes.resize( presentModeCount ); - } - return createResultValueType( static_cast( result ), presentModes ); + d.vkCmdBuildMicromapsEXT( m_commandBuffer, infos.size(), reinterpret_cast( infos.data() ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + return static_cast( d.vkBuildMicromapsEXT( static_cast( m_device ), + static_cast( deferredOperation ), + infoCount, + reinterpret_cast( pInfos ) ) ); } -# else + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBuildMicromapsEXT && "Function requires " ); +# endif - VkResult result = d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBuildMicromapsEXT( + m_device, static_cast( deferredOperation ), infos.size(), reinterpret_cast( infos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); - return createResultValueType( static_cast( result ) ); + return static_cast( result ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + return static_cast( d.vkCopyMicromapEXT( + static_cast( m_device ), static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); } -# else + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMicromapEXT && "Function requires " ); +# endif - VkResult result = d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCopyMicromapEXT( m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); - return createResultValueType( static_cast( result ) ); + return static_cast( result ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( - m_device, reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pModes ) ) ); + return static_cast( d.vkCopyMicromapToMemoryEXT( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapToMemoryEXT( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMicromapToMemoryEXT && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; - VkResult result = d.vkGetDeviceGroupSurfacePresentModes2EXT( - m_device, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &modes ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyMicromapToMemoryEXT( + m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); - return createResultValueType( static_cast( result ), modes ); + return static_cast( result ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_EXT_headless_surface === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pSurface ) ) ); + return static_cast( d.vkCopyMemoryToMicromapEXT( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMemoryToMicromapEXT( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMemoryToMicromapEXT && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = d.vkCreateHeadlessSurfaceEXT( - m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyMemoryToMicromapEXT( + m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); - return createResultValueType( static_cast( result ), surface ); + return static_cast( result ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Instance::createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeMicromapsPropertiesEXT( uint32_t micromapCount, + const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkWriteMicromapsPropertiesEXT( static_cast( m_device ), + micromapCount, + reinterpret_cast( pMicromaps ), + static_cast( queryType ), + dataSize, + pData, + stride ) ); + } - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = d.vkCreateHeadlessSurfaceEXT( - m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWriteMicromapsPropertiesEXT && "Function requires " ); +# endif - return createResultValueType( - static_cast( result ), - UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkWriteMicromapsPropertiesEXT( m_device, + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + stride ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - //=== VK_KHR_buffer_device_address === + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkWriteMicromapsPropertiesEXT && "Function requires " ); +# endif + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkWriteMicromapsPropertiesEXT( m_device, + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + sizeof( DataType ), + reinterpret_cast( &data ), + stride ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( pInfo ) ) ); + d.vkCmdCopyMicromapEXT( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyMicromapEXT && "Function requires " ); +# endif - VkDeviceAddress result = d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( &info ) ); - - return static_cast( result ); + d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast( &info ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast( pInfo ) ); + d.vkCmdCopyMicromapToMemoryEXT( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyMicromapToMemoryEXT && "Function requires " ); +# endif - uint64_t result = d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast( &info ) ); + d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - return result; + template + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdCopyMemoryToMicromapEXT( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToMicromapEXT && "Function requires " ); +# endif + + d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast( &info ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeMicromapsPropertiesEXT( uint32_t micromapCount, + const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast( pInfo ) ); + d.vkCmdWriteMicromapsPropertiesEXT( static_cast( m_commandBuffer ), + micromapCount, + reinterpret_cast( pMicromaps ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdWriteMicromapsPropertiesEXT && "Function requires " ); +# endif - uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast( &info ) ); + d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer, + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - return result; + template + VULKAN_HPP_INLINE void Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceMicromapCompatibilityEXT( static_cast( m_device ), + reinterpret_cast( pVersionInfo ), + reinterpret_cast( pCompatibility ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - //=== VK_EXT_line_rasterization === +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceMicromapCompatibilityEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; + d.vkGetDeviceMicromapCompatibilityEXT( m_device, + reinterpret_cast( &versionInfo ), + reinterpret_cast( &compatibility ) ); + + return compatibility; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void - CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo, + VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); + d.vkGetMicromapBuildSizesEXT( static_cast( m_device ), + static_cast( buildType ), + reinterpret_cast( pBuildInfo ), + reinterpret_cast( pSizeInfo ) ); } - //=== VK_EXT_host_query_reset === - +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT + Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkResetQueryPoolEXT( m_device, static_cast( queryPool ), firstQuery, queryCount ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMicromapBuildSizesEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo; + d.vkGetMicromapBuildSizesEXT( m_device, + static_cast( buildType ), + reinterpret_cast( &buildInfo ), + reinterpret_cast( &sizeInfo ) ); + + return sizeInfo; } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_extended_dynamic_state === + //=== VK_HUAWEI_cluster_culling_shader === template - VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast( cullMode ) ); + d.vkCmdDrawClusterHUAWEI( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); } template - VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast( frontFace ) ); + d.vkCmdDrawClusterIndirectHUAWEI( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ) ); } + //=== VK_EXT_pageable_device_local_memory === + template - VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast( primitiveTopology ) ); + d.vkSetDeviceMemoryPriorityEXT( static_cast( m_device ), static_cast( memory ), priority ); } + //=== VK_KHR_maintenance4 === + template - VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, - const VULKAN_HPP_NAMESPACE::Viewport * pViewports, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast( pViewports ) ); + d.vkGetDeviceBufferMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirementsKHR && + "Function requires or " ); +# endif - d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast( viewports.data() ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetDeviceBufferMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - template - VULKAN_HPP_INLINE void - CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast( pScissors ) ); + return memoryRequirements; } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceBufferMemoryRequirementsKHR && + "Function requires or " ); +# endif - d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast( scissors.data() ) ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetDeviceBufferMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, - uint32_t bindingCount, - const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, - const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, - const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, - const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, - firstBinding, - bindingCount, - reinterpret_cast( pBuffers ), - reinterpret_cast( pOffsets ), - reinterpret_cast( pSizes ), - reinterpret_cast( pStrides ) ); + d.vkGetDeviceImageMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, - VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, - VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, - VULKAN_HPP_NAMESPACE::ArrayProxy const & strides, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); - VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); - VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); -# else - if ( buffers.size() != offsets.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); - } - if ( !sizes.empty() && buffers.size() != sizes.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); - } - if ( !strides.empty() && buffers.size() != strides.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirementsKHR && + "Function requires or " ); +# endif - d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, - firstBinding, - buffers.size(), - reinterpret_cast( buffers.data() ), - reinterpret_cast( offsets.data() ), - reinterpret_cast( sizes.data() ), - reinterpret_cast( strides.data() ) ); + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetDeviceImageMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast( depthTestEnable ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageMemoryRequirementsKHR && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetDeviceImageMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast( depthWriteEnable ) ); + d.vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + pSparseMemoryRequirementCount, + reinterpret_cast( pSparseMemoryRequirements ) ); } - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast( depthCompareOp ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirementsKHR && + "Function requires or " ); +# endif + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; } - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast( depthBoundsTestEnable ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSparseMemoryRequirementsKHR && + "Function requires or " ); +# endif + + std::vector sparseMemoryRequirements( + sparseImageMemoryRequirements2Allocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetDeviceImageSparseMemoryRequirementsKHR( + m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VALVE_descriptor_set_host_mapping === template - VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast( stencilTestEnable ) ); + d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( static_cast( m_device ), + reinterpret_cast( pBindingReference ), + reinterpret_cast( pHostMapping ) ); } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, - VULKAN_HPP_NAMESPACE::StencilOp failOp, - VULKAN_HPP_NAMESPACE::StencilOp passOp, - VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, - VULKAN_HPP_NAMESPACE::CompareOp compareOp, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE + Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilOpEXT( m_commandBuffer, - static_cast( faceMask ), - static_cast( failOp ), - static_cast( passOp ), - static_cast( depthFailOp ), - static_cast( compareOp ) ); - } +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetLayoutHostMappingInfoVALVE && + "Function requires " ); +# endif - //=== VK_KHR_deferred_host_operations === + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping; + d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, + reinterpret_cast( &bindingReference ), + reinterpret_cast( &hostMapping ) ); + + return hostMapping; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDeferredOperationKHR( - m_device, reinterpret_cast( pAllocator ), reinterpret_cast( pDeferredOperation ) ) ); + d.vkGetDescriptorSetHostMappingVALVE( static_cast( m_device ), static_cast( descriptorSet ), ppData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createDeferredOperationKHR( Optional allocator, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDescriptorSetHostMappingVALVE && + "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; - VkResult result = d.vkCreateDeferredOperationKHR( - m_device, - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &deferredOperation ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); + void * pData; + d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast( descriptorSet ), &pData ); - return createResultValueType( static_cast( result ), deferredOperation ); + return pData; } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_copy_memory_indirect === -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createDeferredOperationKHRUnique( Optional allocator, Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; - VkResult result = d.vkCreateDeferredOperationKHR( - m_device, - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &deferredOperation ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" ); - - return createResultValueType( - static_cast( result ), - UniqueHandle( deferredOperation, ObjectDestroy( *this, allocator, d ) ) ); + d.vkCmdCopyMemoryIndirectNV( static_cast( m_commandBuffer ), static_cast( copyBufferAddress ), copyCount, stride ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDeferredOperationKHR( - m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); + d.vkCmdCopyMemoryToImageIndirectNV( static_cast( m_commandBuffer ), + static_cast( copyBufferAddress ), + copyCount, + stride, + static_cast( dstImage ), + static_cast( dstImageLayout ), + reinterpret_cast( pImageSubresources ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkDestroyDeferredOperationKHR( - m_device, - static_cast( operation ), - reinterpret_cast( static_cast( allocator ) ) ); + VULKAN_HPP_INLINE void + CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageSubresources, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdCopyMemoryToImageIndirectNV && "Function requires " ); +# endif + + d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, + static_cast( copyBufferAddress ), + imageSubresources.size(), + stride, + static_cast( dstImage ), + static_cast( dstImageLayout ), + reinterpret_cast( imageSubresources.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_memory_decompression === template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( uint32_t decompressRegionCount, + const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDeferredOperationKHR( - m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); + d.vkCmdDecompressMemoryNV( static_cast( m_commandBuffer ), + decompressRegionCount, + reinterpret_cast( pDecompressMemoryRegions ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & decompressMemoryRegions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdDecompressMemoryNV && "Function requires " ); +# endif - d.vkDestroyDeferredOperationKHR( - m_device, - static_cast( operation ), - reinterpret_cast( static_cast( allocator ) ) ); + d.vkCmdDecompressMemoryNV( + m_commandBuffer, decompressMemoryRegions.size(), reinterpret_cast( decompressMemoryRegions.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, + uint32_t stride, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast( operation ) ); + d.vkCmdDecompressMemoryIndirectCountNV( static_cast( m_commandBuffer ), + static_cast( indirectCommandsAddress ), + static_cast( indirectCommandsCountAddress ), + stride ); } -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + //=== VK_NV_device_generated_commands_compute === + template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); + d.vkGetPipelineIndirectMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pMemoryRequirements ) ); } -#else + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result - Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectMemoryRequirementsNV && + "Function requires " ); +# endif - VkResult result = d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ); + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetPipelineIndirectMemoryRequirementsNV( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &memoryRequirements ) ); - return static_cast( result ); + return memoryRequirements; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectMemoryRequirementsNV && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetPipelineIndirectMemoryRequirementsNV( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); + d.vkCmdUpdatePipelineIndirectBufferNV( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); } -#else + template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, - Dispatch const & d ) const + VULKAN_HPP_INLINE DeviceAddress Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPipelineIndirectDeviceAddressNV( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineIndirectDeviceAddressNV && + "Function requires " ); +# endif - VkResult result = d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); + VkDeviceAddress result = d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast( &info ) ); - return static_cast( result ); + return static_cast( result ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_pipeline_executable_properties === + //=== VK_EXT_extended_dynamic_state3 === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, - uint32_t * pExecutableCount, - VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, - reinterpret_cast( pPipelineInfo ), - pExecutableCount, - reinterpret_cast( pProperties ) ) ); + d.vkCmdSetDepthClampEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClampEnable ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - std::vector properties; - uint32_t executableCount; - VkResult result; - do - { - result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ); - if ( ( result == VK_SUCCESS ) && executableCount ) - { - properties.resize( executableCount ); - result = d.vkGetPipelineExecutablePropertiesKHR( m_device, - reinterpret_cast( &pipelineInfo ), - &executableCount, - reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); - VULKAN_HPP_ASSERT( executableCount <= properties.size() ); - if ( executableCount < properties.size() ) - { - properties.resize( executableCount ); - } - return createResultValueType( static_cast( result ), properties ); + d.vkCmdSetPolygonModeEXT( static_cast( m_commandBuffer ), static_cast( polygonMode ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, - PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, - Dispatch const & d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - std::vector properties( - pipelineExecutablePropertiesKHRAllocator ); - uint32_t executableCount; - VkResult result; - do - { - result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ); - if ( ( result == VK_SUCCESS ) && executableCount ) - { - properties.resize( executableCount ); - result = d.vkGetPipelineExecutablePropertiesKHR( m_device, - reinterpret_cast( &pipelineInfo ), - &executableCount, - reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); - VULKAN_HPP_ASSERT( executableCount <= properties.size() ); - if ( executableCount < properties.size() ) - { - properties.resize( executableCount ); - } - return createResultValueType( static_cast( result ), properties ); + d.vkCmdSetRasterizationSamplesEXT( static_cast( m_commandBuffer ), static_cast( rasterizationSamples ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, - uint32_t * pStatisticCount, - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, - reinterpret_cast( pExecutableInfo ), - pStatisticCount, - reinterpret_cast( pStatistics ) ) ); + d.vkCmdSetSampleMaskEXT( + static_cast( m_commandBuffer ), static_cast( samples ), reinterpret_cast( pSampleMask ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sampleMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - std::vector statistics; - uint32_t statisticCount; - VkResult result; - do - { - result = - d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, nullptr ); - if ( ( result == VK_SUCCESS ) && statisticCount ) - { - statistics.resize( statisticCount ); - result = d.vkGetPipelineExecutableStatisticsKHR( m_device, - reinterpret_cast( &executableInfo ), - &statisticCount, - reinterpret_cast( statistics.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); - VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); - if ( statisticCount < statistics.size() ) +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetSampleMaskEXT && "Function requires or " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( sampleMask.size() == ( static_cast( samples ) + 31 ) / 32 ); +# else + if ( sampleMask.size() != ( static_cast( samples ) + 31 ) / 32 ) { - statistics.resize( statisticCount ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setSampleMaskEXT: sampleMask.size() != ( static_cast( samples ) + 31 ) / 32" ); } - return createResultValueType( static_cast( result ), statistics ); +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast( samples ), reinterpret_cast( sampleMask.data() ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, - PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, - Dispatch const & d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - std::vector statistics( - pipelineExecutableStatisticKHRAllocator ); - uint32_t statisticCount; - VkResult result; - do - { - result = - d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, nullptr ); - if ( ( result == VK_SUCCESS ) && statisticCount ) - { - statistics.resize( statisticCount ); - result = d.vkGetPipelineExecutableStatisticsKHR( m_device, - reinterpret_cast( &executableInfo ), - &statisticCount, - reinterpret_cast( statistics.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); - VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); - if ( statisticCount < statistics.size() ) - { - statistics.resize( statisticCount ); - } - return createResultValueType( static_cast( result ), statistics ); + d.vkCmdSetAlphaToCoverageEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToCoverageEnable ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, - uint32_t * pInternalRepresentationCount, - VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, - reinterpret_cast( pExecutableInfo ), - pInternalRepresentationCount, - reinterpret_cast( pInternalRepresentations ) ) ); + d.vkCmdSetAlphaToOneEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToOneEnable ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< - std::vector>::type - Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const + template + VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLogicOpEnableEXT( static_cast( m_commandBuffer ), static_cast( logicOpEnable ) ); + } - std::vector - internalRepresentations; - uint32_t internalRepresentationCount; - VkResult result; - do - { - result = d.vkGetPipelineExecutableInternalRepresentationsKHR( - m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ); - if ( ( result == VK_SUCCESS ) && internalRepresentationCount ) - { - internalRepresentations.resize( internalRepresentationCount ); - result = d.vkGetPipelineExecutableInternalRepresentationsKHR( - m_device, - reinterpret_cast( &executableInfo ), - &internalRepresentationCount, - reinterpret_cast( internalRepresentations.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); - VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); - if ( internalRepresentationCount < internalRepresentations.size() ) - { - internalRepresentations.resize( internalRepresentationCount ); - } - return createResultValueType( static_cast( result ), internalRepresentations ); + template + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorBlendEnableEXT( + static_cast( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast( pColorBlendEnables ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< - std::vector>::type - Device::getPipelineExecutableInternalRepresentationsKHR( - const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, - PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, - Dispatch const & d ) const +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEnables, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendEnableEXT && + "Function requires or " ); +# endif - std::vector - internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator ); - uint32_t internalRepresentationCount; - VkResult result; - do - { - result = d.vkGetPipelineExecutableInternalRepresentationsKHR( - m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ); - if ( ( result == VK_SUCCESS ) && internalRepresentationCount ) - { - internalRepresentations.resize( internalRepresentationCount ); - result = d.vkGetPipelineExecutableInternalRepresentationsKHR( - m_device, - reinterpret_cast( &executableInfo ), - &internalRepresentationCount, - reinterpret_cast( internalRepresentations.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); - VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); - if ( internalRepresentationCount < internalRepresentations.size() ) - { - internalRepresentations.resize( internalRepresentationCount ); - } - return createResultValueType( static_cast( result ), internalRepresentations ); + d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, colorBlendEnables.size(), reinterpret_cast( colorBlendEnables.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_NV_device_generated_commands === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, - VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, - reinterpret_cast( pInfo ), - reinterpret_cast( pMemoryRequirements ) ); + d.vkCmdSetColorBlendEquationEXT( static_cast( m_commandBuffer ), + firstAttachment, + attachmentCount, + reinterpret_cast( pColorBlendEquations ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 - Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEquations, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendEquationEXT && + "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + d.vkCmdSetColorBlendEquationEXT( + m_commandBuffer, firstAttachment, colorBlendEquations.size(), reinterpret_cast( colorBlendEquations.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - return memoryRequirements; + template + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetColorWriteMaskEXT( + static_cast( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast( pColorWriteMasks ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteMasks, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetColorWriteMaskEXT && + "Function requires or " ); +# endif - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); - d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + d.vkCmdSetColorWriteMaskEXT( + m_commandBuffer, firstAttachment, colorWriteMasks.size(), reinterpret_cast( colorWriteMasks.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - return structureChain; + template + VULKAN_HPP_INLINE void CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetTessellationDomainOriginEXT( static_cast( m_commandBuffer ), static_cast( domainOrigin ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( pGeneratedCommandsInfo ) ); + d.vkCmdSetRasterizationStreamEXT( static_cast( m_commandBuffer ), rasterizationStream ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ) ); + d.vkCmdSetConservativeRasterizationModeEXT( static_cast( m_commandBuffer ), + static_cast( conservativeRasterizationMode ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, - const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdExecuteGeneratedCommandsNV( - m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( pGeneratedCommandsInfo ) ); + d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast( m_commandBuffer ), extraPrimitiveOverestimationSize ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, - const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkCmdExecuteGeneratedCommandsNV( - m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( &generatedCommandsInfo ) ); + d.vkCmdSetDepthClipEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClipEnable ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, - VULKAN_HPP_NAMESPACE::Pipeline pipeline, - uint32_t groupIndex, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); + d.vkCmdSetSampleLocationsEnableEXT( static_cast( m_commandBuffer ), static_cast( sampleLocationsEnable ) ); } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateIndirectCommandsLayoutNV( m_device, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pIndirectCommandsLayout ) ) ); + d.vkCmdSetColorBlendAdvancedEXT( static_cast( m_commandBuffer ), + firstAttachment, + attachmentCount, + reinterpret_cast( pColorBlendAdvanced ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void + CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendAdvanced, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetColorBlendAdvancedEXT && + "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; - VkResult result = d.vkCreateIndirectCommandsLayoutNV( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &indirectCommandsLayout ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); - - return createResultValueType( static_cast( result ), indirectCommandsLayout ); + d.vkCmdSetColorBlendAdvancedEXT( + m_commandBuffer, firstAttachment, colorBlendAdvanced.size(), reinterpret_cast( colorBlendAdvanced.data() ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; - VkResult result = d.vkCreateIndirectCommandsLayoutNV( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &indirectCommandsLayout ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" ); - - return createResultValueType( static_cast( result ), - UniqueHandle( - indirectCommandsLayout, ObjectDestroy( *this, allocator, d ) ) ); + d.vkCmdSetProvokingVertexModeEXT( static_cast( m_commandBuffer ), static_cast( provokingVertexMode ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyIndirectCommandsLayoutNV( - m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + d.vkCmdSetLineRasterizationModeEXT( static_cast( m_commandBuffer ), static_cast( lineRasterizationMode ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkDestroyIndirectCommandsLayoutNV( - m_device, - static_cast( indirectCommandsLayout ), - reinterpret_cast( static_cast( allocator ) ) ); + d.vkCmdSetLineStippleEnableEXT( static_cast( m_commandBuffer ), static_cast( stippledLineEnable ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyIndirectCommandsLayoutNV( - m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + d.vkCmdSetDepthClipNegativeOneToOneEXT( static_cast( m_commandBuffer ), static_cast( negativeOneToOne ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkDestroyIndirectCommandsLayoutNV( - m_device, - static_cast( indirectCommandsLayout ), - reinterpret_cast( static_cast( allocator ) ) ); + d.vkCmdSetViewportWScalingEnableNV( static_cast( m_commandBuffer ), static_cast( viewportWScalingEnable ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - //=== VK_EXT_acquire_drm_display === - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, - VULKAN_HPP_NAMESPACE::DisplayKHR display, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast( display ) ) ); + d.vkCmdSetViewportSwizzleNV( + static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pViewportSwizzles ) ); } -#else + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type - PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + VULKAN_HPP_INLINE void + CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportSwizzles, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetViewportSwizzleNV && + "Function requires or " ); +# endif - VkResult result = d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast( display ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); - - return createResultValueType( static_cast( result ) ); + d.vkCmdSetViewportSwizzleNV( + m_commandBuffer, firstViewport, viewportSwizzles.size(), reinterpret_cast( viewportSwizzles.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, - uint32_t connectorId, - VULKAN_HPP_NAMESPACE::DisplayKHR * display, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( display ) ) ); + d.vkCmdSetCoverageToColorEnableNV( static_cast( m_commandBuffer ), static_cast( coverageToColorEnable ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::DisplayKHR display; - VkResult result = d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( &display ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" ); - - return createResultValueType( static_cast( result ), display ); + d.vkCmdSetCoverageToColorLocationNV( static_cast( m_commandBuffer ), coverageToColorLocation ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::DisplayKHR display; - VkResult result = d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( &display ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" ); - - return createResultValueType( static_cast( result ), - UniqueHandle( display, ObjectRelease( *this, d ) ) ); + d.vkCmdSetCoverageModulationModeNV( static_cast( m_commandBuffer ), static_cast( coverageModulationMode ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - //=== VK_EXT_private_data === + template + VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetCoverageModulationTableEnableNV( static_cast( m_commandBuffer ), static_cast( coverageModulationTableEnable ) ); + } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( uint32_t coverageModulationTableCount, + const float * pCoverageModulationTable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreatePrivateDataSlotEXT( m_device, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pPrivateDataSlot ) ) ); + d.vkCmdSetCoverageModulationTableNV( static_cast( m_commandBuffer ), coverageModulationTableCount, pCoverageModulationTable ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & coverageModulationTable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetCoverageModulationTableNV && + "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; - VkResult result = d.vkCreatePrivateDataSlotEXT( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &privateDataSlot ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" ); - - return createResultValueType( static_cast( result ), privateDataSlot ); + d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTable.size(), coverageModulationTable.data() ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; - VkResult result = d.vkCreatePrivateDataSlotEXT( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &privateDataSlot ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" ); - - return createResultValueType( - static_cast( result ), - UniqueHandle( privateDataSlot, ObjectDestroy( *this, allocator, d ) ) ); + d.vkCmdSetShadingRateImageEnableNV( static_cast( m_commandBuffer ), static_cast( shadingRateImageEnable ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); + d.vkCmdSetRepresentativeFragmentTestEnableNV( static_cast( m_commandBuffer ), static_cast( representativeFragmentTestEnable ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - d.vkDestroyPrivateDataSlotEXT( - m_device, - static_cast( privateDataSlot ), - reinterpret_cast( static_cast( allocator ) ) ); + d.vkCmdSetCoverageReductionModeNV( static_cast( m_commandBuffer ), static_cast( coverageReductionMode ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + //=== VK_EXT_shader_module_identifier === + template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, - uint64_t objectHandle, - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - uint64_t data, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkSetPrivateDataEXT( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), data ) ); + d.vkGetShaderModuleIdentifierEXT( + static_cast( m_device ), static_cast( shaderModule ), reinterpret_cast( pIdentifier ) ); } -#else + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, - uint64_t objectHandle, - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - uint64_t data, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT + Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderModuleIdentifierEXT && "Function requires " ); +# endif - VkResult result = - d.vkSetPrivateDataEXT( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), data ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; + d.vkGetShaderModuleIdentifierEXT( m_device, static_cast( shaderModule ), reinterpret_cast( &identifier ) ); - return createResultValueType( static_cast( result ) ); + return identifier; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, - uint64_t objectHandle, - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - uint64_t * pData, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPrivateDataEXT( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), pData ); + d.vkGetShaderModuleCreateInfoIdentifierEXT( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pIdentifier ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, - uint64_t objectHandle, - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT + Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderModuleCreateInfoIdentifierEXT && + "Function requires " ); +# endif - uint64_t data; - d.vkGetPrivateDataEXT( m_device, static_cast( objectType ), objectHandle, static_cast( privateDataSlot ), &data ); + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; + d.vkGetShaderModuleCreateInfoIdentifierEXT( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &identifier ) ); - return data; + return identifier; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_encode_queue === + //=== VK_NV_optical_flow === template - VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, + uint32_t * pFormatCount, + VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast( pEncodeInfo ) ); + return static_cast( + d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( static_cast( m_physicalDevice ), + reinterpret_cast( pOpticalFlowImageFormatInfo ), + pFormatCount, + reinterpret_cast( pImageFormatProperties ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV && + "Function requires " ); +# endif - d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast( &encodeInfo ) ); + std::vector imageFormatProperties; + uint32_t formatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + m_physicalDevice, reinterpret_cast( &opticalFlowImageFormatInfo ), &formatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && formatCount ) + { + imageFormatProperties.resize( formatCount ); + result = static_cast( + d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, + reinterpret_cast( &opticalFlowImageFormatInfo ), + &formatCount, + reinterpret_cast( imageFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); + VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); + if ( formatCount < imageFormatProperties.size() ) + { + imageFormatProperties.resize( formatCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, + OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV && + "Function requires " ); +# endif + + std::vector imageFormatProperties( + opticalFlowImageFormatPropertiesNVAllocator ); + uint32_t formatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + m_physicalDevice, reinterpret_cast( &opticalFlowImageFormatInfo ), &formatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && formatCount ) + { + imageFormatProperties.resize( formatCount ); + result = static_cast( + d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, + reinterpret_cast( &opticalFlowImageFormatInfo ), + &formatCount, + reinterpret_cast( imageFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); + VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); + if ( formatCount < imageFormatProperties.size() ) + { + imageFormatProperties.resize( formatCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkExportMetalObjectsEXT( m_device, reinterpret_cast( pMetalObjectsInfo ) ); + return static_cast( d.vkCreateOpticalFlowSessionNV( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pSession ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT - Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateOpticalFlowSessionNV && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo; - d.vkExportMetalObjectsEXT( m_device, reinterpret_cast( &metalObjectsInfo ) ); + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateOpticalFlowSessionNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &session ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" ); - return metalObjectsInfo; + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( session ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateOpticalFlowSessionNV && "Function requires " ); +# endif - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get(); - d.vkExportMetalObjectsEXT( m_device, reinterpret_cast( &metalObjectsInfo ) ); + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateOpticalFlowSessionNV( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &session ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" ); - return structureChain; + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( session, detail::ObjectDestroy( *this, allocator, d ) ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_KHR_synchronization2 === +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, - const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast( event ), reinterpret_cast( pDependencyInfo ) ); + d.vkDestroyOpticalFlowSessionNV( + static_cast( m_device ), static_cast( session ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, - const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyOpticalFlowSessionNV && "Function requires " ); +# endif - d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast( event ), reinterpret_cast( &dependencyInfo ) ); + d.vkDestroyOpticalFlowSessionNV( + m_device, + static_cast( session ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, - VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + d.vkDestroyOpticalFlowSessionNV( + static_cast( m_device ), static_cast( session ), reinterpret_cast( pAllocator ) ); } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount, - const VULKAN_HPP_NAMESPACE::Event * pEvents, - const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWaitEvents2KHR( - m_commandBuffer, eventCount, reinterpret_cast( pEvents ), reinterpret_cast( pDependencyInfos ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyOpticalFlowSessionNV && "Function requires " ); +# endif + + d.vkDestroyOpticalFlowSessionNV( + m_device, + static_cast( session ), + reinterpret_cast( static_cast( allocator ) ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, - VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); -# else - if ( events.size() != dependencyInfos.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + return static_cast( d.vkBindOpticalFlowSessionImageNV( static_cast( m_device ), + static_cast( session ), + static_cast( bindingPoint ), + static_cast( view ), + static_cast( layout ) ) ); + } +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkBindOpticalFlowSessionImageNV && "Function requires " ); +# endif - d.vkCmdWaitEvents2KHR( m_commandBuffer, - events.size(), - reinterpret_cast( events.data() ), - reinterpret_cast( dependencyInfos.data() ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkBindOpticalFlowSessionImageNV( m_device, + static_cast( session ), + static_cast( bindingPoint ), + static_cast( view ), + static_cast( layout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast( pDependencyInfo ) ); + d.vkCmdOpticalFlowExecuteNV( static_cast( m_commandBuffer ), + static_cast( session ), + reinterpret_cast( pExecuteInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdOpticalFlowExecuteNV && "Function requires " ); +# endif - d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast( &dependencyInfo ) ); + d.vkCmdOpticalFlowExecuteNV( + m_commandBuffer, static_cast( session ), reinterpret_cast( &executeInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_maintenance5 === template - VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t query, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast( stage ), static_cast( queryPool ), query ); + d.vkCmdBindIndexBuffer2KHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( size ), + static_cast( indexType ) ); } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount, - const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, - VULKAN_HPP_NAMESPACE::Fence fence, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + d.vkGetRenderingAreaGranularityKHR( + static_cast( m_device ), reinterpret_cast( pRenderingAreaInfo ), reinterpret_cast( pGranularity ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type Queue::submit2KHR( - VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRenderingAreaGranularityKHR && "Function requires or " ); +# endif - VkResult result = d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); - - return createResultValueType( static_cast( result ) ); - } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NAMESPACE::Extent2D granularity; + d.vkGetRenderingAreaGranularityKHR( + m_device, reinterpret_cast( &renderingAreaInfo ), reinterpret_cast( &granularity ) ); - template - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - uint32_t marker, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteBufferMarker2AMD( - m_commandBuffer, static_cast( stage ), static_cast( dstBuffer ), static_cast( dstOffset ), marker ); + return granularity; } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, - VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + d.vkGetDeviceImageSubresourceLayoutKHR( + static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pLayout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Queue::getCheckpointData2NV( Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && + "Function requires or " ); +# endif - std::vector checkpointData; - uint32_t checkpointDataCount; - d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + d.vkGetDeviceImageSubresourceLayoutKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); - VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); - if ( checkpointDataCount < checkpointData.size() ) - { - checkpointData.resize( checkpointDataCount ); - } - return checkpointData; + return layout; } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && + "Function requires or " ); +# endif - std::vector checkpointData( checkpointData2NVAllocator ); - uint32_t checkpointDataCount; - d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + d.vkGetDeviceImageSubresourceLayoutKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); - VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); - if ( checkpointDataCount < checkpointData.size() ) - { - checkpointData.resize( checkpointDataCount ); - } - return checkpointData; + return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_NV_fragment_shading_rate_enums === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, - const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetFragmentShadingRateEnumNV( - m_commandBuffer, static_cast( shadingRate ), reinterpret_cast( combinerOps ) ); + d.vkGetImageSubresourceLayout2KHR( static_cast( m_device ), + static_cast( image ), + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); } - //=== VK_EXT_mesh_shader === - +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void - CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 Device::getImageSubresourceLayout2KHR( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksEXT( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); - } +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2KHR && + "Function requires or or or " ); +# endif - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - uint32_t drawCount, - uint32_t stride, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksIndirectEXT( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + d.vkGetImageSubresourceLayout2KHR( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; } - template - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getImageSubresourceLayout2KHR( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksIndirectCountEXT( m_commandBuffer, - static_cast( buffer ), - static_cast( offset ), - static_cast( countBuffer ), - static_cast( countBufferOffset ), - maxDrawCount, - stride ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2KHR && + "Function requires or or or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + d.vkGetImageSubresourceLayout2KHR( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_copy_commands2 === + //=== VK_AMD_anti_lag === template - VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( pCopyBufferInfo ) ); + d.vkAntiLagUpdateAMD( static_cast( m_device ), reinterpret_cast( pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAntiLagUpdateAMD && "Function requires " ); +# endif - d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( ©BufferInfo ) ); + d.vkAntiLagUpdateAMD( m_device, reinterpret_cast( &data ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_shader_object === template - VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShadersEXT( uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( pCopyImageInfo ) ); + return static_cast( d.vkCreateShadersEXT( static_cast( m_device ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pShaders ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function requires " ); +# endif + + std::vector shaders( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); - d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( ©ImageInfo ) ); + return ResultValue>( result, std::move( shaders ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast( pCopyBufferToImageInfo ) ); + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function requires " ); +# endif + + std::vector shaders( createInfos.size(), shaderEXTAllocator ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); + + return ResultValue>( result, std::move( shaders ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function requires " ); +# endif - d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast( ©BufferToImageInfo ) ); + VULKAN_HPP_NAMESPACE::ShaderEXT shader; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateShadersEXT( m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shader ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); + + return ResultValue( result, std::move( shader ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template < + typename Dispatch, + typename ShaderEXTAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, ShaderEXTAllocator>> + Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function requires " ); +# endif + + std::vector shaders( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); + std::vector, ShaderEXTAllocator> uniqueShaders; + uniqueShaders.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & shader : shaders ) + { + uniqueShaders.push_back( UniqueHandle( shader, deleter ) ); + } + return ResultValue, ShaderEXTAllocator>>( result, std::move( uniqueShaders ) ); + } + + template < + typename Dispatch, + typename ShaderEXTAllocator, + typename std::enable_if>::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, ShaderEXTAllocator>> + Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast( pCopyImageToBufferInfo ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function requires " ); +# endif + + std::vector shaders( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateShadersEXT( m_device, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); + std::vector, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator ); + uniqueShaders.reserve( createInfos.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & shader : shaders ) + { + uniqueShaders.push_back( UniqueHandle( shader, deleter ) ); + } + return ResultValue, ShaderEXTAllocator>>( result, std::move( uniqueShaders ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateShadersEXT && "Function requires " ); +# endif - d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast( ©ImageToBufferInfo ) ); + VULKAN_HPP_NAMESPACE::ShaderEXT shader; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCreateShadersEXT( m_device, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shader ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); + + return ResultValue>( + result, UniqueHandle( shader, detail::ObjectDestroy( *this, allocator, d ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( pBlitImageInfo ) ); + d.vkDestroyShaderEXT( + static_cast( m_device ), static_cast( shader ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyShaderEXT && "Function requires " ); +# endif - d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( &blitImageInfo ) ); + d.vkDestroyShaderEXT( m_device, + static_cast( shader ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( pResolveImageInfo ) ); + d.vkDestroyShaderEXT( + static_cast( m_device ), static_cast( shader ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyShaderEXT && "Function requires " ); +# endif - d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( &resolveImageInfo ) ); + d.vkDestroyShaderEXT( m_device, + static_cast( shader ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_EXT_image_compression_control === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource, - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSubresourceLayout2EXT( m_device, - static_cast( image ), - reinterpret_cast( pSubresource ), - reinterpret_cast( pLayout ) ); + return static_cast( d.vkGetShaderBinaryDataEXT( static_cast( m_device ), static_cast( shader ), pDataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT Device::getImageSubresourceLayout2EXT( - VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderBinaryDataEXT && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout; - d.vkGetImageSubresourceLayout2EXT( m_device, - static_cast( image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); - - return layout; + std::vector data; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain Device::getImageSubresourceLayout2EXT( - VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetShaderBinaryDataEXT && "Function requires " ); +# endif - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get(); - d.vkGetImageSubresourceLayout2EXT( m_device, - static_cast( image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); - - return structureChain; + std::vector data( uint8_tAllocator ); + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( + d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_acquire_winrt_display === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast( display ) ) ); - } -# else template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( uint32_t stageCount, + const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages, + const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VkResult result = d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast( display ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" ); - - return createResultValueType( static_cast( result ) ); + d.vkCmdBindShadersEXT( static_cast( m_commandBuffer ), + stageCount, + reinterpret_cast( pStages ), + reinterpret_cast( pShaders ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, - VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shaders, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( pDisplay ) ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindShadersEXT && "Function requires " ); +# endif +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( stages.size() == shaders.size() ); +# else + if ( stages.size() != shaders.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + d.vkCmdBindShadersEXT( m_commandBuffer, + stages.size(), + reinterpret_cast( stages.data() ), + reinterpret_cast( shaders.data() ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::DisplayKHR display; - VkResult result = d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" ); - - return createResultValueType( static_cast( result ), display ); + d.vkCmdSetDepthClampRangeEXT( static_cast( m_commandBuffer ), + static_cast( depthClampMode ), + reinterpret_cast( pDepthClampRange ) ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + Optional depthClampRange, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetDepthClampRangeEXT && + "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::DisplayKHR display; - VkResult result = d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" ); - - return createResultValueType( static_cast( result ), - UniqueHandle( display, ObjectRelease( *this, d ) ) ); + d.vkCmdSetDepthClampRangeEXT( + m_commandBuffer, + static_cast( depthClampMode ), + reinterpret_cast( static_cast( depthClampRange ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) - //=== VK_EXT_directfb_surface === + //=== VK_KHR_pipeline_binary === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR * pBinaries, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDirectFBSurfaceEXT( m_instance, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pSurface ) ) ); + return static_cast( d.vkCreatePipelineBinariesKHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pBinaries ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, - Optional allocator, - Dispatch const & d ) const +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = d.vkCreateDirectFBSurfaceEXT( - m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" ); + std::vector pipelineBinaries; + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; + VULKAN_HPP_NAMESPACE::Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + } + + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); - return createResultValueType( static_cast( result ), surface ); + return ResultValue>( result, std::move( pipelineBinaries ) ); } -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Instance::createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, - Optional allocator, - Dispatch const & d ) const + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = d.vkCreateDirectFBSurfaceEXT( - m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" ); + std::vector pipelineBinaries( pipelineBinaryKHRAllocator ); + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; + VULKAN_HPP_NAMESPACE::Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + } - return createResultValueType( - static_cast( result ), - UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); + + return ResultValue>( result, std::move( pipelineBinaries ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, - IDirectFB * dfb, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineBinaryKHRAllocator>> + Device::createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function requires " ); +# endif + + std::vector pipelineBinaries; + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; + VULKAN_HPP_NAMESPACE::Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + } + + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); + std::vector, PipelineBinaryKHRAllocator> uniquePipelineBinaries; + uniquePipelineBinaries.reserve( pipelineBinaries.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipelineBinary : pipelineBinaries ) + { + uniquePipelineBinaries.push_back( UniqueHandle( pipelineBinary, deleter ) ); + } + return ResultValue, PipelineBinaryKHRAllocator>>( + result, std::move( uniquePipelineBinaries ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 - PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template >::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineBinaryKHRAllocator>> + Device::createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function requires " ); +# endif - VkBool32 result = d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb ); + std::vector pipelineBinaries; + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; + VULKAN_HPP_NAMESPACE::Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + } - return static_cast( result ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); + std::vector, PipelineBinaryKHRAllocator> uniquePipelineBinaries( + pipelineBinaryKHRAllocator ); + uniquePipelineBinaries.reserve( pipelineBinaries.size() ); + detail::ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipelineBinary : pipelineBinaries ) + { + uniquePipelineBinaries.push_back( UniqueHandle( pipelineBinary, deleter ) ); + } + return ResultValue, PipelineBinaryKHRAllocator>>( + result, std::move( uniquePipelineBinaries ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - - //=== VK_KHR_ray_tracing_pipeline === +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, - uint32_t width, - uint32_t height, - uint32_t depth, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdTraceRaysKHR( m_commandBuffer, - reinterpret_cast( pRaygenShaderBindingTable ), - reinterpret_cast( pMissShaderBindingTable ), - reinterpret_cast( pHitShaderBindingTable ), - reinterpret_cast( pCallableShaderBindingTable ), - width, - height, - depth ); + d.vkDestroyPipelineBinaryKHR( + static_cast( m_device ), static_cast( pipelineBinary ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, - uint32_t width, - uint32_t height, - uint32_t depth, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineBinaryKHR && "Function requires " ); +# endif - d.vkCmdTraceRaysKHR( m_commandBuffer, - reinterpret_cast( &raygenShaderBindingTable ), - reinterpret_cast( &missShaderBindingTable ), - reinterpret_cast( &hitShaderBindingTable ), - reinterpret_cast( &callableShaderBindingTable ), - width, - height, - depth ); + d.vkDestroyPipelineBinaryKHR( + m_device, + static_cast( pipelineBinary ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - uint32_t createInfoCount, - const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, - static_cast( deferredOperation ), - static_cast( pipelineCache ), - createInfoCount, - reinterpret_cast( pCreateInfos ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pPipelines ) ) ); + d.vkDestroyPipelineBinaryKHR( + static_cast( m_device ), static_cast( pipelineBinary ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> - Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - Dispatch const & d ) const + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineBinaryKHR && "Function requires " ); +# endif - std::vector pipelines( createInfos.size() ); - VkResult result = d.vkCreateRayTracingPipelinesKHR( + d.vkDestroyPipelineBinaryKHR( m_device, - static_cast( deferredOperation ), - static_cast( pipelineCache ), - createInfos.size(), - reinterpret_cast( createInfos.data() ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( pipelines.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - - return ResultValue>( static_cast( result ), pipelines ); + static_cast( pipelineBinary ), + reinterpret_cast( static_cast( allocator ) ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> - Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - PipelineAllocator & pipelineAllocator, - Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineKeyKHR( const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo, + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineKey, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - std::vector pipelines( createInfos.size(), pipelineAllocator ); - VkResult result = d.vkCreateRayTracingPipelinesKHR( - m_device, - static_cast( deferredOperation ), - static_cast( pipelineCache ), - createInfos.size(), - reinterpret_cast( createInfos.data() ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( pipelines.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - - return ResultValue>( static_cast( result ), pipelines ); + return static_cast( d.vkGetPipelineKeyKHR( static_cast( m_device ), + reinterpret_cast( pPipelineCreateInfo ), + reinterpret_cast( pPipelineKey ) ) ); } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue - Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getPipelineKeyKHR( Optional pipelineCreateInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineKeyKHR && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::Pipeline pipeline; - VkResult result = d.vkCreateRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR pipelineKey; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPipelineKeyKHR( m_device, - static_cast( deferredOperation ), - static_cast( pipelineCache ), - 1, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &pipeline ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + reinterpret_cast( static_cast( pipelineCreateInfo ) ), + reinterpret_cast( &pipelineKey ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineKeyKHR" ); - return ResultValue( static_cast( result ), pipeline ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineKey ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> - Device::createRayTracingPipelinesKHRUnique( - VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR * pInfo, + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKey, + size_t * pPipelineBinaryDataSize, + void * pPipelineBinaryData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPipelineBinaryDataKHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pPipelineBinaryKey ), + pPipelineBinaryDataSize, + pPipelineBinaryData ) ); + } - std::vector pipelines( createInfos.size() ); - VkResult result = d.vkCreateRayTracingPipelinesKHR( - m_device, - static_cast( deferredOperation ), - static_cast( pipelineCache ), - createInfos.size(), - reinterpret_cast( createInfos.data() ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( pipelines.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - std::vector, PipelineAllocator> uniquePipelines; - uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( auto const & pipeline : pipelines ) +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>>::type + Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineBinaryDataKHR && "Function requires " ); +# endif + + std::pair> data_; + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first; + std::vector & pipelineBinaryData = data_.second; + size_t pipelineBinaryDataSize; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetPipelineBinaryDataKHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + nullptr ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + pipelineBinaryData.resize( pipelineBinaryDataSize ); + result = static_cast( d.vkGetPipelineBinaryDataKHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + reinterpret_cast( pipelineBinaryData.data() ) ) ); } - return ResultValue, PipelineAllocator>>( - static_cast( result ), std::move( uniquePipelines ) ); - } - template >::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> - Device::createRayTracingPipelinesKHRUnique( - VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - PipelineAllocator & pipelineAllocator, - Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" ); - std::vector pipelines( createInfos.size() ); - VkResult result = d.vkCreateRayTracingPipelinesKHR( - m_device, - static_cast( deferredOperation ), - static_cast( pipelineCache ), - createInfos.size(), - reinterpret_cast( createInfos.data() ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( pipelines.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); - uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); - for ( auto const & pipeline : pipelines ) + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>>::type + Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineBinaryDataKHR && "Function requires " ); +# endif + + std::pair> data_( + std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first; + std::vector & pipelineBinaryData = data_.second; + size_t pipelineBinaryDataSize; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetPipelineBinaryDataKHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + nullptr ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + pipelineBinaryData.resize( pipelineBinaryDataSize ); + result = static_cast( d.vkGetPipelineBinaryDataKHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + reinterpret_cast( pipelineBinaryData.data() ) ) ); } - return ResultValue, PipelineAllocator>>( - static_cast( result ), std::move( uniquePipelines ) ); + + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> - Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE Result Device::releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR * pInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkReleaseCapturedPipelineDataKHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pAllocator ) ) ); + } - VULKAN_HPP_NAMESPACE::Pipeline pipeline; - VkResult result = d.vkCreateRayTracingPipelinesKHR( - m_device, - static_cast( deferredOperation ), - static_cast( pipelineCache ), - 1, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &pipeline ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleaseCapturedPipelineDataKHR && "Function requires " ); +# endif - return ResultValue>( - static_cast( result ), - UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + d.vkReleaseCapturedPipelineDataKHR( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( static_cast( allocator ) ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_QCOM_tile_properties === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - uint32_t firstGroup, - uint32_t groupCount, - size_t dataSize, - void * pData, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + uint32_t * pPropertiesCount, + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + return static_cast( d.vkGetFramebufferTilePropertiesQCOM( static_cast( m_device ), + static_cast( framebuffer ), + pPropertiesCount, + reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesKHR( - VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetFramebufferTilePropertiesQCOM && "Function requires " ); +# endif - VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); - std::vector data( dataSize / sizeof( DataType ) ); - VkResult result = d.vkGetRayTracingShaderGroupHandlesKHR( - m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); + std::vector properties; + uint32_t propertiesCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast( framebuffer ), &propertiesCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertiesCount ) + { + properties.resize( propertiesCount ); + result = static_cast( d.vkGetFramebufferTilePropertiesQCOM( + m_device, static_cast( framebuffer ), &propertiesCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - return createResultValueType( static_cast( result ), data ); + VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); + if ( propertiesCount < properties.size() ) + { + properties.resize( propertiesCount ); + } + return properties; } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetFramebufferTilePropertiesQCOM && "Function requires " ); +# endif - DataType data; - VkResult result = d.vkGetRayTracingShaderGroupHandlesKHR( - m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); + std::vector properties( tilePropertiesQCOMAllocator ); + uint32_t propertiesCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast( framebuffer ), &propertiesCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertiesCount ) + { + properties.resize( propertiesCount ); + result = static_cast( d.vkGetFramebufferTilePropertiesQCOM( + m_device, static_cast( framebuffer ), &propertiesCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - return createResultValueType( static_cast( result ), data ); + VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); + if ( propertiesCount < properties.size() ) + { + properties.resize( propertiesCount ); + } + return properties; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - uint32_t firstGroup, - uint32_t groupCount, - size_t dataSize, - void * pData, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE Result Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + return static_cast( d.vkGetDynamicRenderingTilePropertiesQCOM( static_cast( m_device ), + reinterpret_cast( pRenderingInfo ), + reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( - VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM + Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDynamicRenderingTilePropertiesQCOM && "Function requires " ); +# endif - VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); - std::vector data( dataSize / sizeof( DataType ) ); - VkResult result = d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( - m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties; + d.vkGetDynamicRenderingTilePropertiesQCOM( + m_device, reinterpret_cast( &renderingInfo ), reinterpret_cast( &properties ) ); - return createResultValueType( static_cast( result ), data ); + return properties; } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR( - VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const + //=== VK_NV_cooperative_vector === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeVectorPropertiesNV( + uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeVectorPropertiesNV * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - DataType data; - VkResult result = d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( - m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); - - return createResultValueType( static_cast( result ), data ); + return static_cast( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, - VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeVectorPropertiesNV( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, - reinterpret_cast( pRaygenShaderBindingTable ), - reinterpret_cast( pMissShaderBindingTable ), - reinterpret_cast( pHitShaderBindingTable ), - reinterpret_cast( pCallableShaderBindingTable ), - static_cast( indirectDeviceAddress ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV && + "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeVectorPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, - VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeVectorPropertiesNV( CooperativeVectorPropertiesNVAllocator & cooperativeVectorPropertiesNVAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV && + "Function requires " ); +# endif - d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, - reinterpret_cast( &raygenShaderBindingTable ), - reinterpret_cast( &missShaderBindingTable ), - reinterpret_cast( &hitShaderBindingTable ), - reinterpret_cast( &callableShaderBindingTable ), - static_cast( indirectDeviceAddress ) ); + std::vector properties( + cooperativeVectorPropertiesNVAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeVectorPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeVectorPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE DeviceSize Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - uint32_t group, - VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::convertCooperativeVectorMatrixNV( + const VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetRayTracingShaderGroupStackSizeKHR( m_device, static_cast( pipeline ), group, static_cast( groupShader ) ) ); + return static_cast( + d.vkConvertCooperativeVectorMatrixNV( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::convertCooperativeVectorMatrixNV( const VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV & info, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize ); - } +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkConvertCooperativeVectorMatrixNV && "Function requires " ); +# endif - //=== VK_EXT_vertex_input_dynamic_state === + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkConvertCooperativeVectorMatrixNV( m_device, reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::convertCooperativeVectorMatrixNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( uint32_t vertexBindingDescriptionCount, - const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, - uint32_t vertexAttributeDescriptionCount, - const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::convertCooperativeVectorMatrixNV( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV * pInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetVertexInputEXT( m_commandBuffer, - vertexBindingDescriptionCount, - reinterpret_cast( pVertexBindingDescriptions ), - vertexAttributeDescriptionCount, - reinterpret_cast( pVertexAttributeDescriptions ) ); + d.vkCmdConvertCooperativeVectorMatrixNV( + static_cast( m_commandBuffer ), infoCount, reinterpret_cast( pInfos ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( - VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexBindingDescriptions, - VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexAttributeDescriptions, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::convertCooperativeVectorMatrixNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdConvertCooperativeVectorMatrixNV && "Function requires " ); +# endif - d.vkCmdSetVertexInputEXT( m_commandBuffer, - vertexBindingDescriptions.size(), - reinterpret_cast( vertexBindingDescriptions.data() ), - vertexAttributeDescriptions.size(), - reinterpret_cast( vertexAttributeDescriptions.data() ) ); + d.vkCmdConvertCooperativeVectorMatrixNV( m_commandBuffer, infos.size(), reinterpret_cast( infos.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_memory === + //=== VK_NV_low_latency2 === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, - zx_handle_t * pZirconHandle, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV * pSleepModeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); + return static_cast( d.vkSetLatencySleepModeNV( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pSleepModeInfo ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const + VULKAN_HPP_INLINE typename ResultValueType::type Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetLatencySleepModeNV && "Function requires " ); +# endif - zx_handle_t zirconHandle; - VkResult result = - d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkSetLatencySleepModeNV( m_device, static_cast( swapchain ), reinterpret_cast( &sleepModeInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setLatencySleepModeNV" ); - return createResultValueType( static_cast( result ), zirconHandle ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, - zx_handle_t zirconHandle, - VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE Result Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV * pSleepInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, - static_cast( handleType ), - zirconHandle, - reinterpret_cast( pMemoryZirconHandleProperties ) ) ); + return static_cast( d.vkLatencySleepNV( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pSleepInfo ) ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, - zx_handle_t zirconHandle, - Dispatch const & d ) const + VULKAN_HPP_INLINE void Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkLatencySleepNV && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; - VkResult result = d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, - static_cast( handleType ), - zirconHandle, - reinterpret_cast( &memoryZirconHandleProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); - - return createResultValueType( static_cast( result ), memoryZirconHandleProperties ); + d.vkLatencySleepNV( m_device, static_cast( swapchain ), reinterpret_cast( &sleepInfo ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_semaphore === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA( - const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV * pLatencyMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkImportSemaphoreZirconHandleFUCHSIA( - m_device, reinterpret_cast( pImportSemaphoreZirconHandleInfo ) ) ); + d.vkSetLatencyMarkerNV( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pLatencyMarkerInfo ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, - Dispatch const & d ) const + VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkSetLatencyMarkerNV && "Function requires " ); +# endif - VkResult result = d.vkImportSemaphoreZirconHandleFUCHSIA( - m_device, reinterpret_cast( &importSemaphoreZirconHandleInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); - - return createResultValueType( static_cast( result ) ); + d.vkSetLatencyMarkerNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, - zx_handle_t * pZirconHandle, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); + d.vkGetLatencyTimingsNV( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pLatencyMarkerInfo ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename LatencyTimingsFrameReportNVAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetLatencyTimingsNV && "Function requires " ); +# endif - zx_handle_t zirconHandle; - VkResult result = - d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); + std::vector timings; + VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; + d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); + timings.resize( latencyMarkerInfo.timingCount ); + latencyMarkerInfo.pTimings = timings.data(); + d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); - return createResultValueType( static_cast( result ), zirconHandle ); + return timings; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_FUCHSIA*/ -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === + template < + typename LatencyTimingsFrameReportNVAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + LatencyTimingsFrameReportNVAllocator & latencyTimingsFrameReportNVAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetLatencyTimingsNV && "Function requires " ); +# endif + + std::vector timings( latencyTimingsFrameReportNVAllocator ); + VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; + d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); + timings.resize( latencyMarkerInfo.timingCount ); + latencyMarkerInfo.pTimings = timings.data(); + d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); + + return timings; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV * pQueueTypeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateBufferCollectionFUCHSIA( m_device, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pCollection ) ) ); + d.vkQueueNotifyOutOfBandNV( static_cast( m_queue ), reinterpret_cast( pQueueTypeInfo ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkQueueNotifyOutOfBandNV && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; - VkResult result = d.vkCreateBufferCollectionFUCHSIA( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &collection ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" ); - - return createResultValueType( static_cast( result ), collection ); + d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast( &queueTypeInfo ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_cooperative_matrix === -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesKHR( + uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } - VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; - VkResult result = d.vkCreateBufferCollectionFUCHSIA( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &collection ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR && + "Function requires " ); +# endif - return createResultValueType( - static_cast( result ), - UniqueHandle( collection, ObjectDestroy( *this, allocator, d ) ) ); + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, - const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>::type + PhysicalDevice::getCooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHRAllocator & cooperativeMatrixPropertiesKHRAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkSetBufferCollectionImageConstraintsFUCHSIA( - m_device, static_cast( collection ), reinterpret_cast( pImageConstraintsInfo ) ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR && + "Function requires " ); +# endif + + std::vector properties( + cooperativeMatrixPropertiesKHRAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, - const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo, - Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VkResult result = d.vkSetBufferCollectionImageConstraintsFUCHSIA( - m_device, static_cast( collection ), reinterpret_cast( &imageConstraintsInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" ); - - return createResultValueType( static_cast( result ) ); + d.vkCmdSetAttachmentFeedbackLoopEnableEXT( static_cast( m_commandBuffer ), static_cast( aspectMask ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, - const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getScreenBufferPropertiesQNX( const struct _screen_buffer * buffer, + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( - m_device, static_cast( collection ), reinterpret_cast( pBufferConstraintsInfo ) ) ); + return static_cast( + d.vkGetScreenBufferPropertiesQNX( static_cast( m_device ), buffer, reinterpret_cast( pProperties ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, - const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetScreenBufferPropertiesQNX && "Function requires " ); +# endif - VkResult result = d.vkSetBufferCollectionBufferConstraintsFUCHSIA( - m_device, static_cast( collection ), reinterpret_cast( &bufferConstraintsInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" ); + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); - return createResultValueType( static_cast( result ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetScreenBufferPropertiesQNX && "Function requires " ); +# endif + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX & properties = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_line_rasterization === template - VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::setLineStippleKHR( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyBufferCollectionFUCHSIA( - m_device, static_cast( collection ), reinterpret_cast( pAllocator ) ); + d.vkCmdSetLineStippleKHR( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + //=== VK_KHR_calibrated_timestamps === + template - VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsKHR( uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( + static_cast( m_physicalDevice ), pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); + } - d.vkDestroyBufferCollectionFUCHSIA( - m_device, - static_cast( collection ), - reinterpret_cast( static_cast( allocator ) ) ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsKHR( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR && + "Function requires or " ); +# endif + + std::vector timeDomains; + uint32_t timeDomainCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getCalibrateableTimeDomainsKHR( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR && + "Function requires or " ); +# endif + + std::vector timeDomains( timeDomainKHRAllocator ); + uint32_t timeDomainCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( + d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsKHR( uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyBufferCollectionFUCHSIA( - m_device, static_cast( collection ), reinterpret_cast( pAllocator ) ); + return static_cast( d.vkGetCalibratedTimestampsKHR( static_cast( m_device ), + timestampCount, + reinterpret_cast( pTimestampInfos ), + pTimestamps, + pMaxDeviation ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && + "Function requires or " ); +# endif - d.vkDestroyBufferCollectionFUCHSIA( - m_device, - static_cast( collection ), - reinterpret_cast( static_cast( allocator ) ) ); + std::pair, uint64_t> data_( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetCalibratedTimestampsKHR( + m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, - VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type + Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetBufferCollectionPropertiesFUCHSIA( - m_device, static_cast( collection ), reinterpret_cast( pProperties ) ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && + "Function requires or " ); +# endif + + std::pair, uint64_t> data_( + std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetCalibratedTimestampsKHR( + m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetCalibratedTimestampsKHR && + "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties; - VkResult result = d.vkGetBufferCollectionPropertiesFUCHSIA( - m_device, static_cast( collection ), reinterpret_cast( &properties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" ); + std::pair data_; + uint64_t & timestamp = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetCalibratedTimestampsKHR( m_device, 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" ); - return createResultValueType( static_cast( result ), properties ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_HUAWEI_subpass_shading === + //=== VK_KHR_maintenance6 === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, - VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( - m_device, static_cast( renderpass ), reinterpret_cast( pMaxWorkgroupSize ) ) ); + d.vkCmdBindDescriptorSets2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pBindDescriptorSetsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue - Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets2KHR && "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; - VkResult result = d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( - m_device, static_cast( renderpass ), reinterpret_cast( &maxWorkgroupSize ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); - - return ResultValue( static_cast( result ), maxWorkgroupSize ); + d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast( &bindDescriptorSetsInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSubpassShadingHUAWEI( m_commandBuffer ); + d.vkCmdPushConstants2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pPushConstantsInfo ) ); } - //=== VK_HUAWEI_invocation_mask === - +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindInvocationMaskHUAWEI( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); - } +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushConstants2KHR && "Function requires or " ); +# endif - //=== VK_NV_external_memory_rdma === + d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast( &pushConstantsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, - VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetMemoryRemoteAddressNV( - m_device, reinterpret_cast( pMemoryGetRemoteAddressInfo ), reinterpret_cast( pAddress ) ) ); + d.vkCmdPushDescriptorSet2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pPushDescriptorSetInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d ) const + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet2KHR && "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::RemoteAddressNV address; - VkResult result = d.vkGetMemoryRemoteAddressNV( - m_device, reinterpret_cast( &memoryGetRemoteAddressInfo ), reinterpret_cast( &address ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); - - return createResultValueType( static_cast( result ), address ); + d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast( &pushDescriptorSetInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_EXT_pipeline_properties === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo, - VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPipelinePropertiesEXT( - m_device, reinterpret_cast( pPipelineInfo ), reinterpret_cast( pPipelineProperties ) ) ); + d.vkCmdPushDescriptorSetWithTemplate2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pPushDescriptorSetWithTemplateInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d ) const + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplate2KHR && + "Function requires or " ); +# endif - VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties; - VkResult result = d.vkGetPipelinePropertiesEXT( - m_device, reinterpret_cast( &pipelineInfo ), reinterpret_cast( &pipelineProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); - - return createResultValueType( static_cast( result ), pipelineProperties ); + d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer, + reinterpret_cast( &pushDescriptorSetWithTemplateInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_EXT_extended_dynamic_state2 === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints ); + d.vkCmdSetDescriptorBufferOffsets2EXT( static_cast( m_commandBuffer ), + reinterpret_cast( pSetDescriptorBufferOffsetsInfo ) ); } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast( rasterizerDiscardEnable ) ); - } +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetDescriptorBufferOffsets2EXT && "Function requires " ); +# endif - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast( depthBiasEnable ) ); + d.vkCmdSetDescriptorBufferOffsets2EXT( m_commandBuffer, reinterpret_cast( &setDescriptorBufferOffsetsInfo ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast( logicOp ) ); + d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + static_cast( m_commandBuffer ), + reinterpret_cast( pBindDescriptorBufferEmbeddedSamplersInfo ) ); } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast( primitiveRestartEnable ) ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT && + "Function requires " ); +# endif + + d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + m_commandBuffer, reinterpret_cast( &bindDescriptorBufferEmbeddedSamplersInfo ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_screen_surface === + //=== VK_NV_cluster_acceleration_structure === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getClusterAccelerationStructureBuildSizesNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateScreenSurfaceQNX( m_instance, - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pSurface ) ) ); + d.vkGetClusterAccelerationStructureBuildSizesNV( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pSizeInfo ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + Device::getClusterAccelerationStructureBuildSizesNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetClusterAccelerationStructureBuildSizesNV && + "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = d.vkCreateScreenSurfaceQNX( - m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" ); + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + d.vkGetClusterAccelerationStructureBuildSizesNV( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &sizeInfo ) ); - return createResultValueType( static_cast( result ), surface ); + return sizeInfo; } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Instance::createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, - Optional allocator, - Dispatch const & d ) const + VULKAN_HPP_INLINE void + CommandBuffer::buildClusterAccelerationStructureIndirectNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureCommandsInfoNV * pCommandInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBuildClusterAccelerationStructureIndirectNV( static_cast( m_commandBuffer ), + reinterpret_cast( pCommandInfos ) ); + } - VULKAN_HPP_NAMESPACE::SurfaceKHR surface; - VkResult result = d.vkCreateScreenSurfaceQNX( - m_instance, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &surface ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::buildClusterAccelerationStructureIndirectNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureCommandsInfoNV & commandInfos, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildClusterAccelerationStructureIndirectNV && + "Function requires " ); +# endif - return createResultValueType( - static_cast( result ), - UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + d.vkCmdBuildClusterAccelerationStructureIndirectNV( m_commandBuffer, + reinterpret_cast( &commandInfos ) ); } -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_partitioned_acceleration_structure === template - VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, - struct _screen_window * window, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + Device::getPartitionedAccelerationStructuresBuildSizesNV( const VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV * pInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) ); + d.vkGetPartitionedAccelerationStructuresBuildSizesNV( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pSizeInfo ) ); } -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 - PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + Device::getPartitionedAccelerationStructuresBuildSizesNV( const VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPartitionedAccelerationStructuresBuildSizesNV && + "Function requires " ); +# endif - VkBool32 result = d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window ); + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + d.vkGetPartitionedAccelerationStructuresBuildSizesNV( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &sizeInfo ) ); - return static_cast( result ); + return sizeInfo; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - - //=== VK_EXT_color_write_enable === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount, - const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::buildPartitionedAccelerationStructuresNV( const VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV * pBuildInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, attachmentCount, reinterpret_cast( pColorWriteEnables ) ); + d.vkCmdBuildPartitionedAccelerationStructuresNV( static_cast( m_commandBuffer ), + reinterpret_cast( pBuildInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteEnables, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::buildPartitionedAccelerationStructuresNV( const VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV & buildInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBuildPartitionedAccelerationStructuresNV && + "Function requires " ); +# endif - d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, colorWriteEnables.size(), reinterpret_cast( colorWriteEnables.data() ) ); + d.vkCmdBuildPartitionedAccelerationStructuresNV( m_commandBuffer, reinterpret_cast( &buildInfo ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_ray_tracing_maintenance1 === + //=== VK_EXT_device_generated_commands === template - VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdTraceRaysIndirect2KHR( m_commandBuffer, static_cast( indirectDeviceAddress ) ); + d.vkGetGeneratedCommandsMemoryRequirementsEXT( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } - //=== VK_EXT_multi_draw === - +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( uint32_t drawCount, - const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - uint32_t stride, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMultiEXT( m_commandBuffer, drawCount, reinterpret_cast( pVertexInfo ), instanceCount, firstInstance, stride ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsEXT && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetGeneratedCommandsMemoryRequirementsEXT( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & vertexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsEXT && + "Function requires " ); +# endif - d.vkCmdDrawMultiEXT( m_commandBuffer, - vertexInfo.size(), - reinterpret_cast( vertexInfo.data() ), - instanceCount, - firstInstance, - vertexInfo.stride() ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetGeneratedCommandsMemoryRequirementsEXT( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( uint32_t drawCount, - const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - uint32_t stride, - const int32_t * pVertexOffset, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMultiIndexedEXT( - m_commandBuffer, drawCount, reinterpret_cast( pIndexInfo ), instanceCount, firstInstance, stride, pVertexOffset ); + d.vkCmdPreprocessGeneratedCommandsEXT( static_cast( m_commandBuffer ), + reinterpret_cast( pGeneratedCommandsInfo ), + static_cast( stateCommandBuffer ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void - CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & indexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - Optional vertexOffset, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPreprocessGeneratedCommandsEXT && "Function requires " ); +# endif - d.vkCmdDrawMultiIndexedEXT( m_commandBuffer, - indexInfo.size(), - reinterpret_cast( indexInfo.data() ), - instanceCount, - firstInstance, - indexInfo.stride(), - static_cast( vertexOffset ) ); + d.vkCmdPreprocessGeneratedCommandsEXT( + m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ), static_cast( stateCommandBuffer ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_EXT_pageable_device_local_memory === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkSetDeviceMemoryPriorityEXT( m_device, static_cast( memory ), priority ); + d.vkCmdExecuteGeneratedCommandsEXT( static_cast( m_commandBuffer ), + static_cast( isPreprocessed ), + reinterpret_cast( pGeneratedCommandsInfo ) ); } - //=== VK_KHR_maintenance4 === +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdExecuteGeneratedCommandsEXT && "Function requires " ); +# endif + + d.vkCmdExecuteGeneratedCommandsEXT( + m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( &generatedCommandsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, - VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createIndirectCommandsLayoutEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT * pIndirectCommandsLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceBufferMemoryRequirementsKHR( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + return static_cast( d.vkCreateIndirectCommandsLayoutEXT( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pIndirectCommandsLayout ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 - Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createIndirectCommandsLayoutEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutEXT && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetDeviceBufferMemoryRequirementsKHR( - m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - - return memoryRequirements; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectCommandsLayoutEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectCommandsLayout ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createIndirectCommandsLayoutEXTUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutEXT && "Function requires " ); +# endif - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); - d.vkGetDeviceBufferMemoryRequirementsKHR( - m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectCommandsLayoutEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutEXTUnique" ); - return structureChain; + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + indirectCommandsLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, - VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceImageMemoryRequirementsKHR( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkDestroyIndirectCommandsLayoutEXT( static_cast( m_device ), + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 - Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutEXT && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetDeviceImageMemoryRequirementsKHR( - m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); - - return memoryRequirements; + d.vkDestroyIndirectCommandsLayoutEXT( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( static_cast( allocator ) ) ); } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectCommandsLayoutEXT( static_cast( m_device ), + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); + } - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); - d.vkGetDeviceImageMemoryRequirementsKHR( - m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutEXT && "Function requires " ); +# endif - return structureChain; + d.vkDestroyIndirectCommandsLayoutEXT( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createIndirectExecutionSetEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT * pIndirectExecutionSet, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, - reinterpret_cast( pInfo ), - pSparseMemoryRequirementCount, - reinterpret_cast( pSparseMemoryRequirements ) ); + return static_cast( d.vkCreateIndirectExecutionSetEXT( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pIndirectExecutionSet ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createIndirectExecutionSetEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectExecutionSetEXT && "Function requires " ); +# endif - std::vector sparseMemoryRequirements; - uint32_t sparseMemoryRequirementCount; - d.vkGetDeviceImageSparseMemoryRequirementsKHR( - m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, - reinterpret_cast( &info ), - &sparseMemoryRequirementCount, - reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectExecutionSetEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectExecutionSet ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectExecutionSetEXT" ); - VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); - if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) - { - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - } - return sparseMemoryRequirements; + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectExecutionSet ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, - SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, - Dispatch const & d ) const +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createIndirectExecutionSetEXTUnique( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectExecutionSetEXT && "Function requires " ); +# endif - std::vector sparseMemoryRequirements( - sparseImageMemoryRequirements2Allocator ); - uint32_t sparseMemoryRequirementCount; - d.vkGetDeviceImageSparseMemoryRequirementsKHR( - m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, - reinterpret_cast( &info ), - &sparseMemoryRequirementCount, - reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectExecutionSetEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectExecutionSet ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectExecutionSetEXTUnique" ); - VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); - if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) - { - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - } - return sparseMemoryRequirements; + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + indirectExecutionSet, detail::ObjectDestroy( *this, allocator, d ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_VALVE_descriptor_set_host_mapping === +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference, - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, - reinterpret_cast( pBindingReference ), - reinterpret_cast( pHostMapping ) ); + d.vkDestroyIndirectExecutionSetEXT( static_cast( m_device ), + static_cast( indirectExecutionSet ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE - Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroyIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectExecutionSetEXT && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping; - d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, - reinterpret_cast( &bindingReference ), - reinterpret_cast( &hostMapping ) ); - - return hostMapping; + d.vkDestroyIndirectExecutionSetEXT( + m_device, + static_cast( indirectExecutionSet ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void - Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast( descriptorSet ), ppData ); + d.vkDestroyIndirectExecutionSetEXT( static_cast( m_device ), + static_cast( indirectExecutionSet ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectExecutionSetEXT && "Function requires " ); +# endif - void * pData; - d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast( descriptorSet ), &pData ); - - return pData; + d.vkDestroyIndirectExecutionSetEXT( + m_device, + static_cast( indirectExecutionSet ), + reinterpret_cast( static_cast( allocator ) ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_EXT_shader_module_identifier === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, - VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetPipelineEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT * pExecutionSetWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetShaderModuleIdentifierEXT( m_device, static_cast( shaderModule ), reinterpret_cast( pIdentifier ) ); + d.vkUpdateIndirectExecutionSetPipelineEXT( static_cast( m_device ), + static_cast( indirectExecutionSet ), + executionSetWriteCount, + reinterpret_cast( pExecutionSetWrites ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT - Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetPipelineEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateIndirectExecutionSetPipelineEXT && + "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; - d.vkGetShaderModuleIdentifierEXT( m_device, static_cast( shaderModule ), reinterpret_cast( &identifier ) ); - - return identifier; + d.vkUpdateIndirectExecutionSetPipelineEXT( m_device, + static_cast( indirectExecutionSet ), + executionSetWrites.size(), + reinterpret_cast( executionSetWrites.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, - VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetShaderEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT * pExecutionSetWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetShaderModuleCreateInfoIdentifierEXT( - m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pIdentifier ) ); + d.vkUpdateIndirectExecutionSetShaderEXT( static_cast( m_device ), + static_cast( indirectExecutionSet ), + executionSetWriteCount, + reinterpret_cast( pExecutionSetWrites ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT - Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetShaderEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateIndirectExecutionSetShaderEXT && + "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; - d.vkGetShaderModuleCreateInfoIdentifierEXT( - m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &identifier ) ); - - return identifier; + d.vkUpdateIndirectExecutionSetShaderEXT( m_device, + static_cast( indirectExecutionSet ), + executionSetWrites.size(), + reinterpret_cast( executionSetWrites.data() ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_QCOM_tile_properties === + //=== VK_NV_cooperative_matrix2 === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, - uint32_t * pPropertiesCount, - VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetFramebufferTilePropertiesQCOM( - m_device, static_cast( framebuffer ), pPropertiesCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + static_cast( m_physicalDevice ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d ) const + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && + "Function requires " ); +# endif - std::vector properties; - uint32_t propertiesCount; - VkResult result; + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast( framebuffer ), &propertiesCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertiesCount ) + result = static_cast( + d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) { - properties.resize( propertiesCount ); - result = d.vkGetFramebufferTilePropertiesQCOM( - m_device, static_cast( framebuffer ), &propertiesCount, reinterpret_cast( properties.data() ) ); + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - - VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); - if ( propertiesCount < properties.size() ) + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) { - properties.resize( propertiesCount ); + properties.resize( propertyCount ); } - return properties; + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, - TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator, - Dispatch const & d ) const + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( + CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator & cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && + "Function requires " ); +# endif - std::vector properties( tilePropertiesQCOMAllocator ); - uint32_t propertiesCount; - VkResult result; + std::vector properties( + cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast( framebuffer ), &propertiesCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertiesCount ) + result = static_cast( + d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) { - properties.resize( propertiesCount ); - result = d.vkGetFramebufferTilePropertiesQCOM( - m_device, static_cast( framebuffer ), &propertiesCount, reinterpret_cast( properties.data() ) ); + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - - VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); - if ( propertiesCount < properties.size() ) + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) { - properties.resize( propertiesCount ); + properties.resize( propertyCount ); } - return properties; + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === template - VULKAN_HPP_INLINE Result Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, - VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryMetalHandleEXT( const VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT * pGetMetalHandleInfo, + void ** pHandle, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDynamicRenderingTilePropertiesQCOM( - m_device, reinterpret_cast( pRenderingInfo ), reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetMemoryMetalHandleEXT( + static_cast( m_device ), reinterpret_cast( pGetMetalHandleInfo ), pHandle ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM - Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryMetalHandleEXT( const VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT & getMetalHandleInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryMetalHandleEXT && "Function requires " ); +# endif - VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties; - d.vkGetDynamicRenderingTilePropertiesQCOM( - m_device, reinterpret_cast( &renderingInfo ), reinterpret_cast( &properties ) ); + void * handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryMetalHandleEXT( m_device, reinterpret_cast( &getMetalHandleInfo ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryMetalHandleEXT" ); - return properties; + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); } -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getMemoryMetalHandlePropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHandle, + VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT * pMemoryMetalHandleProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetMemoryMetalHandlePropertiesEXT( static_cast( m_device ), + static_cast( handleType ), + pHandle, + reinterpret_cast( pMemoryMetalHandleProperties ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getMemoryMetalHandlePropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HandleType const & handle, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetMemoryMetalHandlePropertiesEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT memoryMetalHandleProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkGetMemoryMetalHandlePropertiesEXT( m_device, + static_cast( handleType ), + reinterpret_cast( &handle ), + reinterpret_cast( &memoryMetalHandleProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryMetalHandlePropertiesEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryMetalHandleProperties ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ } // namespace VULKAN_HPP_NAMESPACE #endif diff --git a/src/libraries/vulkanheaders/vulkan_ggp.h b/src/libraries/vulkanheaders/vulkan_ggp.h index 19dfd2261..4b6affc05 100644 --- a/src/libraries/vulkanheaders/vulkan_ggp.h +++ b/src/libraries/vulkanheaders/vulkan_ggp.h @@ -2,7 +2,7 @@ #define VULKAN_GGP_H_ 1 /* -** Copyright 2015-2022 The Khronos Group Inc. +** Copyright 2015-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -19,6 +19,7 @@ extern "C" { +// VK_GGP_stream_descriptor_surface is a preprocessor guard. Do not pass it to API calls. #define VK_GGP_stream_descriptor_surface 1 #define VK_GGP_STREAM_DESCRIPTOR_SURFACE_SPEC_VERSION 1 #define VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME "VK_GGP_stream_descriptor_surface" @@ -41,6 +42,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateStreamDescriptorSurfaceGGP( #endif +// VK_GGP_frame_token is a preprocessor guard. Do not pass it to API calls. #define VK_GGP_frame_token 1 #define VK_GGP_FRAME_TOKEN_SPEC_VERSION 1 #define VK_GGP_FRAME_TOKEN_EXTENSION_NAME "VK_GGP_frame_token" diff --git a/src/libraries/vulkanheaders/vulkan_handles.hpp b/src/libraries/vulkanheaders/vulkan_handles.hpp index 5254183ff..799582030 100644 --- a/src/libraries/vulkanheaders/vulkan_handles.hpp +++ b/src/libraries/vulkanheaders/vulkan_handles.hpp @@ -1,4 +1,4 @@ -// Copyright 2015-2022 The Khronos Group Inc. +// Copyright 2015-2025 The Khronos Group Inc. // // SPDX-License-Identifier: Apache-2.0 OR MIT // @@ -8,8 +8,12 @@ #ifndef VULKAN_HANDLES_HPP #define VULKAN_HANDLES_HPP +// include-what-you-use: make sure, vulkan.hpp is used by code-completers +// IWYU pragma: private; include "vulkan.hpp" + namespace VULKAN_HPP_NAMESPACE { + //=================================== //=== STRUCT forward declarations === //=================================== @@ -432,6 +436,7 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties; struct PipelineShaderStageRequiredSubgroupSizeCreateInfo; using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; struct PhysicalDeviceInlineUniformBlockFeatures; using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures; struct PhysicalDeviceInlineUniformBlockProperties; @@ -469,6 +474,120 @@ namespace VULKAN_HPP_NAMESPACE struct DeviceImageMemoryRequirements; using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements; + //=== VK_VERSION_1_4 === + struct PhysicalDeviceVulkan14Features; + struct PhysicalDeviceVulkan14Properties; + struct DeviceQueueGlobalPriorityCreateInfo; + using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfo; + using DeviceQueueGlobalPriorityCreateInfoKHR = DeviceQueueGlobalPriorityCreateInfo; + struct PhysicalDeviceGlobalPriorityQueryFeatures; + using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeatures; + using PhysicalDeviceGlobalPriorityQueryFeaturesKHR = PhysicalDeviceGlobalPriorityQueryFeatures; + struct QueueFamilyGlobalPriorityProperties; + using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityProperties; + using QueueFamilyGlobalPriorityPropertiesKHR = QueueFamilyGlobalPriorityProperties; + struct PhysicalDeviceShaderSubgroupRotateFeatures; + using PhysicalDeviceShaderSubgroupRotateFeaturesKHR = PhysicalDeviceShaderSubgroupRotateFeatures; + struct PhysicalDeviceShaderFloatControls2Features; + using PhysicalDeviceShaderFloatControls2FeaturesKHR = PhysicalDeviceShaderFloatControls2Features; + struct PhysicalDeviceShaderExpectAssumeFeatures; + using PhysicalDeviceShaderExpectAssumeFeaturesKHR = PhysicalDeviceShaderExpectAssumeFeatures; + struct PhysicalDeviceLineRasterizationFeatures; + using PhysicalDeviceLineRasterizationFeaturesEXT = PhysicalDeviceLineRasterizationFeatures; + using PhysicalDeviceLineRasterizationFeaturesKHR = PhysicalDeviceLineRasterizationFeatures; + struct PhysicalDeviceLineRasterizationProperties; + using PhysicalDeviceLineRasterizationPropertiesEXT = PhysicalDeviceLineRasterizationProperties; + using PhysicalDeviceLineRasterizationPropertiesKHR = PhysicalDeviceLineRasterizationProperties; + struct PipelineRasterizationLineStateCreateInfo; + using PipelineRasterizationLineStateCreateInfoEXT = PipelineRasterizationLineStateCreateInfo; + using PipelineRasterizationLineStateCreateInfoKHR = PipelineRasterizationLineStateCreateInfo; + struct PhysicalDeviceVertexAttributeDivisorProperties; + using PhysicalDeviceVertexAttributeDivisorPropertiesKHR = PhysicalDeviceVertexAttributeDivisorProperties; + struct VertexInputBindingDivisorDescription; + using VertexInputBindingDivisorDescriptionEXT = VertexInputBindingDivisorDescription; + using VertexInputBindingDivisorDescriptionKHR = VertexInputBindingDivisorDescription; + struct PipelineVertexInputDivisorStateCreateInfo; + using PipelineVertexInputDivisorStateCreateInfoEXT = PipelineVertexInputDivisorStateCreateInfo; + using PipelineVertexInputDivisorStateCreateInfoKHR = PipelineVertexInputDivisorStateCreateInfo; + struct PhysicalDeviceVertexAttributeDivisorFeatures; + using PhysicalDeviceVertexAttributeDivisorFeaturesEXT = PhysicalDeviceVertexAttributeDivisorFeatures; + using PhysicalDeviceVertexAttributeDivisorFeaturesKHR = PhysicalDeviceVertexAttributeDivisorFeatures; + struct PhysicalDeviceIndexTypeUint8Features; + using PhysicalDeviceIndexTypeUint8FeaturesEXT = PhysicalDeviceIndexTypeUint8Features; + using PhysicalDeviceIndexTypeUint8FeaturesKHR = PhysicalDeviceIndexTypeUint8Features; + struct MemoryMapInfo; + using MemoryMapInfoKHR = MemoryMapInfo; + struct MemoryUnmapInfo; + using MemoryUnmapInfoKHR = MemoryUnmapInfo; + struct PhysicalDeviceMaintenance5Features; + using PhysicalDeviceMaintenance5FeaturesKHR = PhysicalDeviceMaintenance5Features; + struct PhysicalDeviceMaintenance5Properties; + using PhysicalDeviceMaintenance5PropertiesKHR = PhysicalDeviceMaintenance5Properties; + struct RenderingAreaInfo; + using RenderingAreaInfoKHR = RenderingAreaInfo; + struct DeviceImageSubresourceInfo; + using DeviceImageSubresourceInfoKHR = DeviceImageSubresourceInfo; + struct ImageSubresource2; + using ImageSubresource2EXT = ImageSubresource2; + using ImageSubresource2KHR = ImageSubresource2; + struct SubresourceLayout2; + using SubresourceLayout2EXT = SubresourceLayout2; + using SubresourceLayout2KHR = SubresourceLayout2; + struct PipelineCreateFlags2CreateInfo; + using PipelineCreateFlags2CreateInfoKHR = PipelineCreateFlags2CreateInfo; + struct BufferUsageFlags2CreateInfo; + using BufferUsageFlags2CreateInfoKHR = BufferUsageFlags2CreateInfo; + struct PhysicalDevicePushDescriptorProperties; + using PhysicalDevicePushDescriptorPropertiesKHR = PhysicalDevicePushDescriptorProperties; + struct PhysicalDeviceDynamicRenderingLocalReadFeatures; + using PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR = PhysicalDeviceDynamicRenderingLocalReadFeatures; + struct RenderingAttachmentLocationInfo; + using RenderingAttachmentLocationInfoKHR = RenderingAttachmentLocationInfo; + struct RenderingInputAttachmentIndexInfo; + using RenderingInputAttachmentIndexInfoKHR = RenderingInputAttachmentIndexInfo; + struct PhysicalDeviceMaintenance6Features; + using PhysicalDeviceMaintenance6FeaturesKHR = PhysicalDeviceMaintenance6Features; + struct PhysicalDeviceMaintenance6Properties; + using PhysicalDeviceMaintenance6PropertiesKHR = PhysicalDeviceMaintenance6Properties; + struct BindMemoryStatus; + using BindMemoryStatusKHR = BindMemoryStatus; + struct BindDescriptorSetsInfo; + using BindDescriptorSetsInfoKHR = BindDescriptorSetsInfo; + struct PushConstantsInfo; + using PushConstantsInfoKHR = PushConstantsInfo; + struct PushDescriptorSetInfo; + using PushDescriptorSetInfoKHR = PushDescriptorSetInfo; + struct PushDescriptorSetWithTemplateInfo; + using PushDescriptorSetWithTemplateInfoKHR = PushDescriptorSetWithTemplateInfo; + struct PhysicalDevicePipelineProtectedAccessFeatures; + using PhysicalDevicePipelineProtectedAccessFeaturesEXT = PhysicalDevicePipelineProtectedAccessFeatures; + struct PhysicalDevicePipelineRobustnessFeatures; + using PhysicalDevicePipelineRobustnessFeaturesEXT = PhysicalDevicePipelineRobustnessFeatures; + struct PhysicalDevicePipelineRobustnessProperties; + using PhysicalDevicePipelineRobustnessPropertiesEXT = PhysicalDevicePipelineRobustnessProperties; + struct PipelineRobustnessCreateInfo; + using PipelineRobustnessCreateInfoEXT = PipelineRobustnessCreateInfo; + struct PhysicalDeviceHostImageCopyFeatures; + using PhysicalDeviceHostImageCopyFeaturesEXT = PhysicalDeviceHostImageCopyFeatures; + struct PhysicalDeviceHostImageCopyProperties; + using PhysicalDeviceHostImageCopyPropertiesEXT = PhysicalDeviceHostImageCopyProperties; + struct MemoryToImageCopy; + using MemoryToImageCopyEXT = MemoryToImageCopy; + struct ImageToMemoryCopy; + using ImageToMemoryCopyEXT = ImageToMemoryCopy; + struct CopyMemoryToImageInfo; + using CopyMemoryToImageInfoEXT = CopyMemoryToImageInfo; + struct CopyImageToMemoryInfo; + using CopyImageToMemoryInfoEXT = CopyImageToMemoryInfo; + struct CopyImageToImageInfo; + using CopyImageToImageInfoEXT = CopyImageToImageInfo; + struct HostImageLayoutTransitionInfo; + using HostImageLayoutTransitionInfoEXT = HostImageLayoutTransitionInfo; + struct SubresourceHostMemcpySize; + using SubresourceHostMemcpySizeEXT = SubresourceHostMemcpySize; + struct HostImageCopyDevicePerformanceQuery; + using HostImageCopyDevicePerformanceQueryEXT = HostImageCopyDevicePerformanceQuery; + //=== VK_KHR_surface === struct SurfaceCapabilitiesKHR; struct SurfaceFormatKHR; @@ -531,7 +650,6 @@ namespace VULKAN_HPP_NAMESPACE struct DebugMarkerObjectTagInfoEXT; struct DebugMarkerMarkerInfoEXT; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_queue === struct QueueFamilyQueryResultStatusPropertiesKHR; struct QueueFamilyVideoPropertiesKHR; @@ -550,14 +668,11 @@ namespace VULKAN_HPP_NAMESPACE struct VideoBeginCodingInfoKHR; struct VideoEndCodingInfoKHR; struct VideoCodingControlInfoKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_decode_queue === struct VideoDecodeCapabilitiesKHR; struct VideoDecodeUsageInfoKHR; struct VideoDecodeInfoKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_NV_dedicated_allocation === struct DedicatedAllocationImageCreateInfoNV; @@ -571,6 +686,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NVX_binary_import === struct CuModuleCreateInfoNVX; + struct CuModuleTexturingModeCreateInfoNVX; struct CuFunctionCreateInfoNVX; struct CuLaunchInfoNVX; @@ -578,50 +694,49 @@ namespace VULKAN_HPP_NAMESPACE struct ImageViewHandleInfoNVX; struct ImageViewAddressPropertiesNVX; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_encode_h264 === - struct VideoEncodeH264CapabilitiesEXT; - struct VideoEncodeH264SessionParametersCreateInfoEXT; - struct VideoEncodeH264SessionParametersAddInfoEXT; - struct VideoEncodeH264VclFrameInfoEXT; - struct VideoEncodeH264ReferenceListsInfoEXT; - struct VideoEncodeH264EmitPictureParametersInfoEXT; - struct VideoEncodeH264DpbSlotInfoEXT; - struct VideoEncodeH264NaluSliceInfoEXT; - struct VideoEncodeH264ProfileInfoEXT; - struct VideoEncodeH264RateControlInfoEXT; - struct VideoEncodeH264RateControlLayerInfoEXT; - struct VideoEncodeH264QpEXT; - struct VideoEncodeH264FrameSizeEXT; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_encode_h265 === - struct VideoEncodeH265CapabilitiesEXT; - struct VideoEncodeH265SessionParametersCreateInfoEXT; - struct VideoEncodeH265SessionParametersAddInfoEXT; - struct VideoEncodeH265VclFrameInfoEXT; - struct VideoEncodeH265EmitPictureParametersInfoEXT; - struct VideoEncodeH265DpbSlotInfoEXT; - struct VideoEncodeH265NaluSliceSegmentInfoEXT; - struct VideoEncodeH265ProfileInfoEXT; - struct VideoEncodeH265ReferenceListsInfoEXT; - struct VideoEncodeH265RateControlInfoEXT; - struct VideoEncodeH265RateControlLayerInfoEXT; - struct VideoEncodeH265QpEXT; - struct VideoEncodeH265FrameSizeEXT; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_decode_h264 === - struct VideoDecodeH264ProfileInfoEXT; - struct VideoDecodeH264CapabilitiesEXT; - struct VideoDecodeH264SessionParametersCreateInfoEXT; - struct VideoDecodeH264SessionParametersAddInfoEXT; - struct VideoDecodeH264PictureInfoEXT; - struct VideoDecodeH264MvcInfoEXT; - struct VideoDecodeH264DpbSlotInfoEXT; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_KHR_video_encode_h264 === + struct VideoEncodeH264CapabilitiesKHR; + struct VideoEncodeH264QualityLevelPropertiesKHR; + struct VideoEncodeH264SessionCreateInfoKHR; + struct VideoEncodeH264SessionParametersCreateInfoKHR; + struct VideoEncodeH264SessionParametersAddInfoKHR; + struct VideoEncodeH264SessionParametersGetInfoKHR; + struct VideoEncodeH264SessionParametersFeedbackInfoKHR; + struct VideoEncodeH264PictureInfoKHR; + struct VideoEncodeH264DpbSlotInfoKHR; + struct VideoEncodeH264NaluSliceInfoKHR; + struct VideoEncodeH264ProfileInfoKHR; + struct VideoEncodeH264RateControlInfoKHR; + struct VideoEncodeH264RateControlLayerInfoKHR; + struct VideoEncodeH264QpKHR; + struct VideoEncodeH264FrameSizeKHR; + struct VideoEncodeH264GopRemainingFrameInfoKHR; + + //=== VK_KHR_video_encode_h265 === + struct VideoEncodeH265CapabilitiesKHR; + struct VideoEncodeH265SessionCreateInfoKHR; + struct VideoEncodeH265QualityLevelPropertiesKHR; + struct VideoEncodeH265SessionParametersCreateInfoKHR; + struct VideoEncodeH265SessionParametersAddInfoKHR; + struct VideoEncodeH265SessionParametersGetInfoKHR; + struct VideoEncodeH265SessionParametersFeedbackInfoKHR; + struct VideoEncodeH265PictureInfoKHR; + struct VideoEncodeH265DpbSlotInfoKHR; + struct VideoEncodeH265NaluSliceSegmentInfoKHR; + struct VideoEncodeH265ProfileInfoKHR; + struct VideoEncodeH265RateControlInfoKHR; + struct VideoEncodeH265RateControlLayerInfoKHR; + struct VideoEncodeH265QpKHR; + struct VideoEncodeH265FrameSizeKHR; + struct VideoEncodeH265GopRemainingFrameInfoKHR; + + //=== VK_KHR_video_decode_h264 === + struct VideoDecodeH264ProfileInfoKHR; + struct VideoDecodeH264CapabilitiesKHR; + struct VideoDecodeH264SessionParametersCreateInfoKHR; + struct VideoDecodeH264SessionParametersAddInfoKHR; + struct VideoDecodeH264PictureInfoKHR; + struct VideoDecodeH264DpbSlotInfoKHR; //=== VK_AMD_texture_gather_bias_lod === struct TextureLODGatherFormatPropertiesAMD; @@ -630,13 +745,6 @@ namespace VULKAN_HPP_NAMESPACE struct ShaderResourceUsageAMD; struct ShaderStatisticsInfoAMD; - //=== VK_KHR_dynamic_rendering === - struct RenderingFragmentShadingRateAttachmentInfoKHR; - struct RenderingFragmentDensityMapAttachmentInfoEXT; - struct AttachmentSampleCountInfoAMD; - using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD; - struct MultiviewPerViewAttributesInfoNVX; - #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === struct StreamDescriptorSurfaceCreateInfoGGP; @@ -675,11 +783,6 @@ namespace VULKAN_HPP_NAMESPACE struct ImageViewASTCDecodeModeEXT; struct PhysicalDeviceASTCDecodeFeaturesEXT; - //=== VK_EXT_pipeline_robustness === - struct PhysicalDevicePipelineRobustnessFeaturesEXT; - struct PhysicalDevicePipelineRobustnessPropertiesEXT; - struct PipelineRobustnessCreateInfoEXT; - #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_memory_win32 === struct ImportMemoryWin32HandleInfoKHR; @@ -710,9 +813,6 @@ namespace VULKAN_HPP_NAMESPACE struct ImportSemaphoreFdInfoKHR; struct SemaphoreGetFdInfoKHR; - //=== VK_KHR_push_descriptor === - struct PhysicalDevicePushDescriptorPropertiesKHR; - //=== VK_EXT_conditional_rendering === struct ConditionalRenderingBeginInfoEXT; struct PhysicalDeviceConditionalRenderingFeaturesEXT; @@ -744,6 +844,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NVX_multiview_per_view_attributes === struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + struct MultiviewPerViewAttributesInfoNVX; //=== VK_NV_viewport_swizzle === struct ViewportSwizzleNV; @@ -765,6 +866,9 @@ namespace VULKAN_HPP_NAMESPACE struct HdrMetadataEXT; struct XYColorEXT; + //=== VK_IMG_relaxed_line_rasterization === + struct PhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + //=== VK_KHR_shared_presentable_image === struct SharedPresentSurfaceCapabilitiesKHR; @@ -829,6 +933,22 @@ namespace VULKAN_HPP_NAMESPACE struct AndroidHardwareBufferFormatProperties2ANDROID; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + struct PhysicalDeviceShaderEnqueueFeaturesAMDX; + struct PhysicalDeviceShaderEnqueuePropertiesAMDX; + struct ExecutionGraphPipelineScratchSizeAMDX; + struct ExecutionGraphPipelineCreateInfoAMDX; + struct DispatchGraphInfoAMDX; + struct DispatchGraphCountInfoAMDX; + struct PipelineShaderStageNodeCreateInfoAMDX; + union DeviceOrHostAddressConstAMDX; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_mixed_attachment_samples === + struct AttachmentSampleCountInfoAMD; + using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD; + //=== VK_EXT_sample_locations === struct SampleLocationEXT; struct SampleLocationsInfoEXT; @@ -874,6 +994,18 @@ namespace VULKAN_HPP_NAMESPACE struct CopyAccelerationStructureInfoKHR; struct AccelerationStructureBuildSizesInfoKHR; + //=== VK_KHR_ray_tracing_pipeline === + struct RayTracingShaderGroupCreateInfoKHR; + struct RayTracingPipelineCreateInfoKHR; + struct PhysicalDeviceRayTracingPipelineFeaturesKHR; + struct PhysicalDeviceRayTracingPipelinePropertiesKHR; + struct StridedDeviceAddressRegionKHR; + struct TraceRaysIndirectCommandKHR; + struct RayTracingPipelineInterfaceCreateInfoKHR; + + //=== VK_KHR_ray_query === + struct PhysicalDeviceRayQueryFeaturesKHR; + //=== VK_NV_framebuffer_mixed_samples === struct PipelineCoverageModulationStateCreateInfoNV; @@ -943,47 +1075,28 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_AMD_pipeline_compiler_control === struct PipelineCompilerControlCreateInfoAMD; - //=== VK_EXT_calibrated_timestamps === - struct CalibratedTimestampInfoEXT; - //=== VK_AMD_shader_core_properties === struct PhysicalDeviceShaderCorePropertiesAMD; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_decode_h265 === - struct VideoDecodeH265ProfileInfoEXT; - struct VideoDecodeH265CapabilitiesEXT; - struct VideoDecodeH265SessionParametersCreateInfoEXT; - struct VideoDecodeH265SessionParametersAddInfoEXT; - struct VideoDecodeH265PictureInfoEXT; - struct VideoDecodeH265DpbSlotInfoEXT; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - //=== VK_KHR_global_priority === - struct DeviceQueueGlobalPriorityCreateInfoKHR; - using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfoKHR; - struct PhysicalDeviceGlobalPriorityQueryFeaturesKHR; - using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeaturesKHR; - struct QueueFamilyGlobalPriorityPropertiesKHR; - using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityPropertiesKHR; + //=== VK_KHR_video_decode_h265 === + struct VideoDecodeH265ProfileInfoKHR; + struct VideoDecodeH265CapabilitiesKHR; + struct VideoDecodeH265SessionParametersCreateInfoKHR; + struct VideoDecodeH265SessionParametersAddInfoKHR; + struct VideoDecodeH265PictureInfoKHR; + struct VideoDecodeH265DpbSlotInfoKHR; //=== VK_AMD_memory_overallocation_behavior === struct DeviceMemoryOverallocationCreateInfoAMD; //=== VK_EXT_vertex_attribute_divisor === struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT; - struct VertexInputBindingDivisorDescriptionEXT; - struct PipelineVertexInputDivisorStateCreateInfoEXT; - struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT; #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_frame_token === struct PresentFrameTokenGGP; #endif /*VK_USE_PLATFORM_GGP*/ - //=== VK_NV_compute_shader_derivatives === - struct PhysicalDeviceComputeShaderDerivativesFeaturesNV; - //=== VK_NV_mesh_shader === struct PhysicalDeviceMeshShaderFeaturesNV; struct PhysicalDeviceMeshShaderPropertiesNV; @@ -999,6 +1112,8 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_device_diagnostic_checkpoints === struct QueueFamilyCheckpointPropertiesNV; struct CheckpointDataNV; + struct QueueFamilyCheckpointProperties2NV; + struct CheckpointData2NV; //=== VK_INTEL_shader_integer_functions2 === struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; @@ -1035,6 +1150,7 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceFragmentDensityMapFeaturesEXT; struct PhysicalDeviceFragmentDensityMapPropertiesEXT; struct RenderPassFragmentDensityMapCreateInfoEXT; + struct RenderingFragmentDensityMapAttachmentInfoEXT; //=== VK_KHR_fragment_shading_rate === struct FragmentShadingRateAttachmentInfoKHR; @@ -1042,6 +1158,7 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceFragmentShadingRateFeaturesKHR; struct PhysicalDeviceFragmentShadingRatePropertiesKHR; struct PhysicalDeviceFragmentShadingRateKHR; + struct RenderingFragmentShadingRateAttachmentInfoKHR; //=== VK_AMD_shader_core_properties2 === struct PhysicalDeviceShaderCoreProperties2AMD; @@ -1052,6 +1169,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_shader_image_atomic_int64 === struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + //=== VK_KHR_shader_quad_control === + struct PhysicalDeviceShaderQuadControlFeaturesKHR; + //=== VK_EXT_memory_budget === struct PhysicalDeviceMemoryBudgetPropertiesEXT; @@ -1107,17 +1227,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_headless_surface === struct HeadlessSurfaceCreateInfoEXT; - //=== VK_EXT_line_rasterization === - struct PhysicalDeviceLineRasterizationFeaturesEXT; - struct PhysicalDeviceLineRasterizationPropertiesEXT; - struct PipelineRasterizationLineStateCreateInfoEXT; - //=== VK_EXT_shader_atomic_float === struct PhysicalDeviceShaderAtomicFloatFeaturesEXT; - //=== VK_EXT_index_type_uint8 === - struct PhysicalDeviceIndexTypeUint8FeaturesEXT; - //=== VK_EXT_extended_dynamic_state === struct PhysicalDeviceExtendedDynamicStateFeaturesEXT; @@ -1131,9 +1243,27 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineExecutableStatisticKHR; struct PipelineExecutableInternalRepresentationKHR; + //=== VK_EXT_map_memory_placed === + struct PhysicalDeviceMapMemoryPlacedFeaturesEXT; + struct PhysicalDeviceMapMemoryPlacedPropertiesEXT; + struct MemoryMapPlacedInfoEXT; + //=== VK_EXT_shader_atomic_float2 === struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT; + //=== VK_EXT_surface_maintenance1 === + struct SurfacePresentModeEXT; + struct SurfacePresentScalingCapabilitiesEXT; + struct SurfacePresentModeCompatibilityEXT; + + //=== VK_EXT_swapchain_maintenance1 === + struct PhysicalDeviceSwapchainMaintenance1FeaturesEXT; + struct SwapchainPresentFenceInfoEXT; + struct SwapchainPresentModesCreateInfoEXT; + struct SwapchainPresentModeInfoEXT; + struct SwapchainPresentScalingCreateInfoEXT; + struct ReleaseSwapchainImagesInfoEXT; + //=== VK_NV_device_generated_commands === struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; @@ -1160,6 +1290,11 @@ namespace VULKAN_HPP_NAMESPACE struct RenderPassTransformBeginInfoQCOM; struct CommandBufferInheritanceRenderPassTransformInfoQCOM; + //=== VK_EXT_depth_bias_control === + struct PhysicalDeviceDepthBiasControlFeaturesEXT; + struct DepthBiasInfoEXT; + struct DepthBiasRepresentationInfoEXT; + //=== VK_EXT_device_memory_report === struct PhysicalDeviceDeviceMemoryReportFeaturesEXT; struct DeviceDeviceMemoryReportCreateInfoEXT; @@ -1177,23 +1312,44 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_pipeline_library === struct PipelineLibraryCreateInfoKHR; + //=== VK_NV_present_barrier === + struct PhysicalDevicePresentBarrierFeaturesNV; + struct SurfaceCapabilitiesPresentBarrierNV; + struct SwapchainPresentBarrierCreateInfoNV; + //=== VK_KHR_present_id === struct PresentIdKHR; struct PhysicalDevicePresentIdFeaturesKHR; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_encode_queue === struct VideoEncodeInfoKHR; struct VideoEncodeCapabilitiesKHR; + struct QueryPoolVideoEncodeFeedbackCreateInfoKHR; struct VideoEncodeUsageInfoKHR; struct VideoEncodeRateControlInfoKHR; struct VideoEncodeRateControlLayerInfoKHR; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct PhysicalDeviceVideoEncodeQualityLevelInfoKHR; + struct VideoEncodeQualityLevelPropertiesKHR; + struct VideoEncodeQualityLevelInfoKHR; + struct VideoEncodeSessionParametersGetInfoKHR; + struct VideoEncodeSessionParametersFeedbackInfoKHR; //=== VK_NV_device_diagnostics_config === struct PhysicalDeviceDiagnosticsConfigFeaturesNV; struct DeviceDiagnosticsConfigCreateInfoNV; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + struct CudaModuleCreateInfoNV; + struct CudaFunctionCreateInfoNV; + struct CudaLaunchInfoNV; + struct PhysicalDeviceCudaKernelLaunchFeaturesNV; + struct PhysicalDeviceCudaKernelLaunchPropertiesNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_low_latency === + struct QueryLowLatencySupportNV; + #if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_objects === struct ExportMetalObjectCreateInfoEXT; @@ -1210,9 +1366,21 @@ namespace VULKAN_HPP_NAMESPACE struct ImportMetalSharedEventInfoEXT; #endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_synchronization2 === - struct QueueFamilyCheckpointProperties2NV; - struct CheckpointData2NV; + //=== VK_EXT_descriptor_buffer === + struct PhysicalDeviceDescriptorBufferPropertiesEXT; + struct PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + struct PhysicalDeviceDescriptorBufferFeaturesEXT; + struct DescriptorAddressInfoEXT; + struct DescriptorBufferBindingInfoEXT; + struct DescriptorBufferBindingPushDescriptorBufferHandleEXT; + union DescriptorDataEXT; + struct DescriptorGetInfoEXT; + struct BufferCaptureDescriptorDataInfoEXT; + struct ImageCaptureDescriptorDataInfoEXT; + struct ImageViewCaptureDescriptorDataInfoEXT; + struct SamplerCaptureDescriptorDataInfoEXT; + struct OpaqueCaptureDescriptorDataCreateInfoEXT; + struct AccelerationStructureCaptureDescriptorDataInfoEXT; //=== VK_EXT_graphics_pipeline_library === struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; @@ -1266,8 +1434,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_image_compression_control === struct PhysicalDeviceImageCompressionControlFeaturesEXT; struct ImageCompressionControlEXT; - struct SubresourceLayout2EXT; - struct ImageSubresource2EXT; struct ImageCompressionPropertiesEXT; //=== VK_EXT_attachment_feedback_loop_layout === @@ -1276,6 +1442,14 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_4444_formats === struct PhysicalDevice4444FormatsFeaturesEXT; + //=== VK_EXT_device_fault === + struct PhysicalDeviceFaultFeaturesEXT; + struct DeviceFaultCountsEXT; + struct DeviceFaultInfoEXT; + struct DeviceFaultAddressInfoEXT; + struct DeviceFaultVendorInfoEXT; + struct DeviceFaultVendorBinaryHeaderVersionOneEXT; + //=== VK_EXT_rgba10x6_formats === struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT; @@ -1284,18 +1458,6 @@ namespace VULKAN_HPP_NAMESPACE struct DirectFBSurfaceCreateInfoEXT; #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - //=== VK_KHR_ray_tracing_pipeline === - struct RayTracingShaderGroupCreateInfoKHR; - struct RayTracingPipelineCreateInfoKHR; - struct PhysicalDeviceRayTracingPipelineFeaturesKHR; - struct PhysicalDeviceRayTracingPipelinePropertiesKHR; - struct StridedDeviceAddressRegionKHR; - struct TraceRaysIndirectCommandKHR; - struct RayTracingPipelineInterfaceCreateInfoKHR; - - //=== VK_KHR_ray_query === - struct PhysicalDeviceRayQueryFeaturesKHR; - //=== VK_EXT_vertex_input_dynamic_state === struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT; struct VertexInputBindingDescription2EXT; @@ -1304,6 +1466,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_physical_device_drm === struct PhysicalDeviceDrmPropertiesEXT; + //=== VK_EXT_device_address_binding_report === + struct PhysicalDeviceAddressBindingReportFeaturesEXT; + struct DeviceAddressBindingCallbackDataEXT; + //=== VK_EXT_depth_clip_control === struct PhysicalDeviceDepthClipControlFeaturesEXT; struct PipelineViewportDepthClipControlCreateInfoEXT; @@ -1311,6 +1477,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_primitive_topology_list_restart === struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + //=== VK_EXT_present_mode_fifo_latest_ready === + struct PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === struct ImportMemoryZirconHandleInfoFUCHSIA; @@ -1354,6 +1523,10 @@ namespace VULKAN_HPP_NAMESPACE struct PipelinePropertiesIdentifierEXT; struct PhysicalDevicePipelinePropertiesFeaturesEXT; + //=== VK_EXT_frame_boundary === + struct PhysicalDeviceFrameBoundaryFeaturesEXT; + struct FrameBoundaryEXT; + //=== VK_EXT_multisampled_render_to_single_sampled === struct PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; struct SubpassResolvePerformanceQueryEXT; @@ -1391,6 +1564,36 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_image_2d_view_of_3d === struct PhysicalDeviceImage2DViewOf3DFeaturesEXT; + //=== VK_EXT_shader_tile_image === + struct PhysicalDeviceShaderTileImageFeaturesEXT; + struct PhysicalDeviceShaderTileImagePropertiesEXT; + + //=== VK_EXT_opacity_micromap === + struct MicromapBuildInfoEXT; + struct MicromapUsageEXT; + struct MicromapCreateInfoEXT; + struct PhysicalDeviceOpacityMicromapFeaturesEXT; + struct PhysicalDeviceOpacityMicromapPropertiesEXT; + struct MicromapVersionInfoEXT; + struct CopyMicromapToMemoryInfoEXT; + struct CopyMemoryToMicromapInfoEXT; + struct CopyMicromapInfoEXT; + struct MicromapBuildSizesInfoEXT; + struct AccelerationStructureTrianglesOpacityMicromapEXT; + struct MicromapTriangleEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + struct PhysicalDeviceDisplacementMicromapFeaturesNV; + struct PhysicalDeviceDisplacementMicromapPropertiesNV; + struct AccelerationStructureTrianglesDisplacementMicromapNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_HUAWEI_cluster_culling_shader === + struct PhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + struct PhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + struct PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + //=== VK_EXT_border_color_swizzle === struct PhysicalDeviceBorderColorSwizzleFeaturesEXT; struct SamplerBorderColorComponentMappingCreateInfoEXT; @@ -1398,25 +1601,66 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_pageable_device_local_memory === struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + //=== VK_ARM_shader_core_properties === + struct PhysicalDeviceShaderCorePropertiesARM; + + //=== VK_ARM_scheduling_controls === + struct DeviceQueueShaderCoreControlCreateInfoARM; + struct PhysicalDeviceSchedulingControlsFeaturesARM; + struct PhysicalDeviceSchedulingControlsPropertiesARM; + + //=== VK_EXT_image_sliced_view_of_3d === + struct PhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + struct ImageViewSlicedCreateInfoEXT; + //=== VK_VALVE_descriptor_set_host_mapping === struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; struct DescriptorSetBindingReferenceVALVE; struct DescriptorSetLayoutHostMappingInfoVALVE; - //=== VK_EXT_depth_clamp_zero_one === - struct PhysicalDeviceDepthClampZeroOneFeaturesEXT; - //=== VK_EXT_non_seamless_cube_map === struct PhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + //=== VK_ARM_render_pass_striped === + struct PhysicalDeviceRenderPassStripedFeaturesARM; + struct PhysicalDeviceRenderPassStripedPropertiesARM; + struct RenderPassStripeBeginInfoARM; + struct RenderPassStripeInfoARM; + struct RenderPassStripeSubmitInfoARM; + //=== VK_QCOM_fragment_density_map_offset === struct PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; struct PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; struct SubpassFragmentDensityMapOffsetEndInfoQCOM; + //=== VK_NV_copy_memory_indirect === + struct CopyMemoryIndirectCommandNV; + struct CopyMemoryToImageIndirectCommandNV; + struct PhysicalDeviceCopyMemoryIndirectFeaturesNV; + struct PhysicalDeviceCopyMemoryIndirectPropertiesNV; + + //=== VK_NV_memory_decompression === + struct DecompressMemoryRegionNV; + struct PhysicalDeviceMemoryDecompressionFeaturesNV; + struct PhysicalDeviceMemoryDecompressionPropertiesNV; + + //=== VK_NV_device_generated_commands_compute === + struct PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + struct ComputePipelineIndirectBufferInfoNV; + struct PipelineIndirectDeviceAddressInfoNV; + struct BindPipelineIndirectCommandNV; + + //=== VK_NV_ray_tracing_linear_swept_spheres === + struct PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; + struct AccelerationStructureGeometryLinearSweptSpheresDataNV; + struct AccelerationStructureGeometrySpheresDataNV; + //=== VK_NV_linear_color_attachment === struct PhysicalDeviceLinearColorAttachmentFeaturesNV; + //=== VK_KHR_shader_maximal_reconvergence === + struct PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR; + //=== VK_EXT_image_compression_control_swapchain === struct PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; @@ -1425,6 +1669,19 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceImageProcessingFeaturesQCOM; struct PhysicalDeviceImageProcessingPropertiesQCOM; + //=== VK_EXT_nested_command_buffer === + struct PhysicalDeviceNestedCommandBufferFeaturesEXT; + struct PhysicalDeviceNestedCommandBufferPropertiesEXT; + + //=== VK_EXT_external_memory_acquire_unmodified === + struct ExternalMemoryAcquireUnmodifiedEXT; + + //=== VK_EXT_extended_dynamic_state3 === + struct PhysicalDeviceExtendedDynamicState3FeaturesEXT; + struct PhysicalDeviceExtendedDynamicState3PropertiesEXT; + struct ColorBlendEquationEXT; + struct ColorBlendAdvancedEXT; + //=== VK_EXT_subpass_merge_feedback === struct PhysicalDeviceSubpassMergeFeedbackFeaturesEXT; struct RenderPassCreationControlEXT; @@ -1433,6 +1690,10 @@ namespace VULKAN_HPP_NAMESPACE struct RenderPassSubpassFeedbackInfoEXT; struct RenderPassSubpassFeedbackCreateInfoEXT; + //=== VK_LUNARG_direct_driver_loading === + struct DirectDriverLoadingInfoLUNARG; + struct DirectDriverLoadingListLUNARG; + //=== VK_EXT_shader_module_identifier === struct PhysicalDeviceShaderModuleIdentifierFeaturesEXT; struct PhysicalDeviceShaderModuleIdentifierPropertiesEXT; @@ -1443,9 +1704,52 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; using PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + //=== VK_NV_optical_flow === + struct PhysicalDeviceOpticalFlowFeaturesNV; + struct PhysicalDeviceOpticalFlowPropertiesNV; + struct OpticalFlowImageFormatInfoNV; + struct OpticalFlowImageFormatPropertiesNV; + struct OpticalFlowSessionCreateInfoNV; + struct OpticalFlowSessionCreatePrivateDataInfoNV; + struct OpticalFlowExecuteInfoNV; + //=== VK_EXT_legacy_dithering === struct PhysicalDeviceLegacyDitheringFeaturesEXT; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_format_resolve === + struct PhysicalDeviceExternalFormatResolveFeaturesANDROID; + struct PhysicalDeviceExternalFormatResolvePropertiesANDROID; + struct AndroidHardwareBufferFormatResolvePropertiesANDROID; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_AMD_anti_lag === + struct PhysicalDeviceAntiLagFeaturesAMD; + struct AntiLagDataAMD; + struct AntiLagPresentationInfoAMD; + + //=== VK_KHR_ray_tracing_position_fetch === + struct PhysicalDeviceRayTracingPositionFetchFeaturesKHR; + + //=== VK_EXT_shader_object === + struct PhysicalDeviceShaderObjectFeaturesEXT; + struct PhysicalDeviceShaderObjectPropertiesEXT; + struct ShaderCreateInfoEXT; + + //=== VK_KHR_pipeline_binary === + struct PhysicalDevicePipelineBinaryFeaturesKHR; + struct PhysicalDevicePipelineBinaryPropertiesKHR; + struct DevicePipelineBinaryInternalCacheControlKHR; + struct PipelineBinaryKeyKHR; + struct PipelineBinaryDataKHR; + struct PipelineBinaryKeysAndDataKHR; + struct PipelineBinaryCreateInfoKHR; + struct PipelineBinaryInfoKHR; + struct ReleaseCapturedPipelineDataInfoKHR; + struct PipelineBinaryDataInfoKHR; + struct PipelineCreateInfoKHR; + struct PipelineBinaryHandlesInfoKHR; + //=== VK_QCOM_tile_properties === struct PhysicalDeviceTilePropertiesFeaturesQCOM; struct TilePropertiesQCOM; @@ -1454,6 +1758,23 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceAmigoProfilingFeaturesSEC; struct AmigoProfilingSubmitInfoSEC; + //=== VK_QCOM_multiview_per_view_viewports === + struct PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + + //=== VK_NV_ray_tracing_invocation_reorder === + struct PhysicalDeviceRayTracingInvocationReorderPropertiesNV; + struct PhysicalDeviceRayTracingInvocationReorderFeaturesNV; + + //=== VK_NV_cooperative_vector === + struct PhysicalDeviceCooperativeVectorPropertiesNV; + struct PhysicalDeviceCooperativeVectorFeaturesNV; + struct CooperativeVectorPropertiesNV; + struct ConvertCooperativeVectorMatrixInfoNV; + + //=== VK_NV_extended_sparse_address_space === + struct PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + struct PhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + //=== VK_EXT_mutable_descriptor_type === struct PhysicalDeviceMutableDescriptorTypeFeaturesEXT; using PhysicalDeviceMutableDescriptorTypeFeaturesVALVE = PhysicalDeviceMutableDescriptorTypeFeaturesEXT; @@ -1462,9903 +1783,13639 @@ namespace VULKAN_HPP_NAMESPACE struct MutableDescriptorTypeCreateInfoEXT; using MutableDescriptorTypeCreateInfoVALVE = MutableDescriptorTypeCreateInfoEXT; - //=============== - //=== HANDLEs === - //=============== + //=== VK_EXT_legacy_vertex_attributes === + struct PhysicalDeviceLegacyVertexAttributesFeaturesEXT; + struct PhysicalDeviceLegacyVertexAttributesPropertiesEXT; + + //=== VK_EXT_layer_settings === + struct LayerSettingsCreateInfoEXT; + struct LayerSettingEXT; + + //=== VK_ARM_shader_core_builtins === + struct PhysicalDeviceShaderCoreBuiltinsFeaturesARM; + struct PhysicalDeviceShaderCoreBuiltinsPropertiesARM; + + //=== VK_EXT_pipeline_library_group_handles === + struct PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + + //=== VK_EXT_dynamic_rendering_unused_attachments === + struct PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + + //=== VK_NV_low_latency2 === + struct LatencySleepModeInfoNV; + struct LatencySleepInfoNV; + struct SetLatencyMarkerInfoNV; + struct GetLatencyMarkerInfoNV; + struct LatencyTimingsFrameReportNV; + struct LatencySubmissionPresentIdNV; + struct SwapchainLatencyCreateInfoNV; + struct OutOfBandQueueTypeInfoNV; + struct LatencySurfaceCapabilitiesNV; + + //=== VK_KHR_cooperative_matrix === + struct CooperativeMatrixPropertiesKHR; + struct PhysicalDeviceCooperativeMatrixFeaturesKHR; + struct PhysicalDeviceCooperativeMatrixPropertiesKHR; + + //=== VK_QCOM_multiview_per_view_render_areas === + struct PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + struct MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + + //=== VK_KHR_compute_shader_derivatives === + struct PhysicalDeviceComputeShaderDerivativesFeaturesKHR; + using PhysicalDeviceComputeShaderDerivativesFeaturesNV = PhysicalDeviceComputeShaderDerivativesFeaturesKHR; + struct PhysicalDeviceComputeShaderDerivativesPropertiesKHR; + + //=== VK_KHR_video_decode_av1 === + struct VideoDecodeAV1ProfileInfoKHR; + struct VideoDecodeAV1CapabilitiesKHR; + struct VideoDecodeAV1SessionParametersCreateInfoKHR; + struct VideoDecodeAV1PictureInfoKHR; + struct VideoDecodeAV1DpbSlotInfoKHR; + + //=== VK_KHR_video_encode_av1 === + struct PhysicalDeviceVideoEncodeAV1FeaturesKHR; + struct VideoEncodeAV1CapabilitiesKHR; + struct VideoEncodeAV1QualityLevelPropertiesKHR; + struct VideoEncodeAV1SessionCreateInfoKHR; + struct VideoEncodeAV1SessionParametersCreateInfoKHR; + struct VideoEncodeAV1PictureInfoKHR; + struct VideoEncodeAV1DpbSlotInfoKHR; + struct VideoEncodeAV1ProfileInfoKHR; + struct VideoEncodeAV1QIndexKHR; + struct VideoEncodeAV1FrameSizeKHR; + struct VideoEncodeAV1GopRemainingFrameInfoKHR; + struct VideoEncodeAV1RateControlInfoKHR; + struct VideoEncodeAV1RateControlLayerInfoKHR; + + //=== VK_KHR_video_maintenance1 === + struct PhysicalDeviceVideoMaintenance1FeaturesKHR; + struct VideoInlineQueryInfoKHR; + + //=== VK_NV_per_stage_descriptor_set === + struct PhysicalDevicePerStageDescriptorSetFeaturesNV; + + //=== VK_QCOM_image_processing2 === + struct PhysicalDeviceImageProcessing2FeaturesQCOM; + struct PhysicalDeviceImageProcessing2PropertiesQCOM; + struct SamplerBlockMatchWindowCreateInfoQCOM; + + //=== VK_QCOM_filter_cubic_weights === + struct PhysicalDeviceCubicWeightsFeaturesQCOM; + struct SamplerCubicWeightsCreateInfoQCOM; + struct BlitImageCubicWeightsInfoQCOM; + + //=== VK_QCOM_ycbcr_degamma === + struct PhysicalDeviceYcbcrDegammaFeaturesQCOM; + struct SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; + + //=== VK_QCOM_filter_cubic_clamp === + struct PhysicalDeviceCubicClampFeaturesQCOM; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + struct PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; - class SurfaceKHR - { - public: - using CType = VkSurfaceKHR; - using NativeType = VkSurfaceKHR; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + struct ScreenBufferPropertiesQNX; + struct ScreenBufferFormatPropertiesQNX; + struct ImportScreenBufferInfoQNX; + struct ExternalFormatQNX; + struct PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR; + //=== VK_MSFT_layered_driver === + struct PhysicalDeviceLayeredDriverPropertiesMSFT; + + //=== VK_KHR_calibrated_timestamps === + struct CalibratedTimestampInfoKHR; + using CalibratedTimestampInfoEXT = CalibratedTimestampInfoKHR; + + //=== VK_KHR_maintenance6 === + struct SetDescriptorBufferOffsetsInfoEXT; + struct BindDescriptorBufferEmbeddedSamplersInfoEXT; + + //=== VK_NV_descriptor_pool_overallocation === + struct PhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + + //=== VK_NV_display_stereo === + struct DisplaySurfaceStereoCreateInfoNV; + struct DisplayModeStereoPropertiesNV; + + //=== VK_KHR_video_encode_quantization_map === + struct VideoEncodeQuantizationMapCapabilitiesKHR; + struct VideoFormatQuantizationMapPropertiesKHR; + struct VideoEncodeQuantizationMapInfoKHR; + struct VideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + struct PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + struct VideoEncodeH264QuantizationMapCapabilitiesKHR; + struct VideoEncodeH265QuantizationMapCapabilitiesKHR; + struct VideoFormatH265QuantizationMapPropertiesKHR; + struct VideoEncodeAV1QuantizationMapCapabilitiesKHR; + struct VideoFormatAV1QuantizationMapPropertiesKHR; + + //=== VK_NV_raw_access_chains === + struct PhysicalDeviceRawAccessChainsFeaturesNV; + + //=== VK_KHR_shader_relaxed_extended_instruction === + struct PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + + //=== VK_NV_command_buffer_inheritance === + struct PhysicalDeviceCommandBufferInheritanceFeaturesNV; + + //=== VK_KHR_maintenance7 === + struct PhysicalDeviceMaintenance7FeaturesKHR; + struct PhysicalDeviceMaintenance7PropertiesKHR; + struct PhysicalDeviceLayeredApiPropertiesListKHR; + struct PhysicalDeviceLayeredApiPropertiesKHR; + struct PhysicalDeviceLayeredApiVulkanPropertiesKHR; + + //=== VK_NV_shader_atomic_float16_vector === + struct PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + + //=== VK_EXT_shader_replicated_composites === + struct PhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + + //=== VK_NV_ray_tracing_validation === + struct PhysicalDeviceRayTracingValidationFeaturesNV; + + //=== VK_NV_cluster_acceleration_structure === + struct PhysicalDeviceClusterAccelerationStructureFeaturesNV; + struct PhysicalDeviceClusterAccelerationStructurePropertiesNV; + struct ClusterAccelerationStructureClustersBottomLevelInputNV; + struct ClusterAccelerationStructureTriangleClusterInputNV; + struct ClusterAccelerationStructureMoveObjectsInputNV; + union ClusterAccelerationStructureOpInputNV; + struct ClusterAccelerationStructureInputInfoNV; + struct ClusterAccelerationStructureCommandsInfoNV; + struct StridedDeviceAddressNV; + struct ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV; + struct ClusterAccelerationStructureMoveObjectsInfoNV; + struct ClusterAccelerationStructureBuildClustersBottomLevelInfoNV; + struct ClusterAccelerationStructureBuildTriangleClusterInfoNV; + struct ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV; + struct ClusterAccelerationStructureInstantiateClusterInfoNV; + struct RayTracingPipelineClusterAccelerationStructureCreateInfoNV; + + //=== VK_NV_partitioned_acceleration_structure === + struct PhysicalDevicePartitionedAccelerationStructureFeaturesNV; + struct PhysicalDevicePartitionedAccelerationStructurePropertiesNV; + struct PartitionedAccelerationStructureFlagsNV; + struct BuildPartitionedAccelerationStructureIndirectCommandNV; + struct PartitionedAccelerationStructureWriteInstanceDataNV; + struct PartitionedAccelerationStructureUpdateInstanceDataNV; + struct PartitionedAccelerationStructureWritePartitionTranslationDataNV; + struct WriteDescriptorSetPartitionedAccelerationStructureNV; + struct PartitionedAccelerationStructureInstancesInputNV; + struct BuildPartitionedAccelerationStructureInfoNV; + + //=== VK_EXT_device_generated_commands === + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + struct GeneratedCommandsMemoryRequirementsInfoEXT; + struct IndirectExecutionSetCreateInfoEXT; + union IndirectExecutionSetInfoEXT; + struct IndirectExecutionSetPipelineInfoEXT; + struct IndirectExecutionSetShaderInfoEXT; + struct GeneratedCommandsInfoEXT; + struct WriteIndirectExecutionSetPipelineEXT; + struct IndirectCommandsLayoutCreateInfoEXT; + struct IndirectCommandsLayoutTokenEXT; + struct DrawIndirectCountIndirectCommandEXT; + struct IndirectCommandsVertexBufferTokenEXT; + struct BindVertexBufferIndirectCommandEXT; + struct IndirectCommandsIndexBufferTokenEXT; + struct BindIndexBufferIndirectCommandEXT; + struct IndirectCommandsPushConstantTokenEXT; + struct IndirectCommandsExecutionSetTokenEXT; + union IndirectCommandsTokenDataEXT; + struct IndirectExecutionSetShaderLayoutInfoEXT; + struct GeneratedCommandsPipelineInfoEXT; + struct GeneratedCommandsShaderInfoEXT; + struct WriteIndirectExecutionSetShaderEXT; + + //=== VK_KHR_maintenance8 === + struct PhysicalDeviceMaintenance8FeaturesKHR; + struct MemoryBarrierAccessFlags3KHR; + + //=== VK_MESA_image_alignment_control === + struct PhysicalDeviceImageAlignmentControlFeaturesMESA; + struct PhysicalDeviceImageAlignmentControlPropertiesMESA; + struct ImageAlignmentControlCreateInfoMESA; + + //=== VK_EXT_depth_clamp_control === + struct PhysicalDeviceDepthClampControlFeaturesEXT; + struct PipelineViewportDepthClampControlCreateInfoEXT; + struct DepthClampRangeEXT; + + //=== VK_KHR_video_maintenance2 === + struct PhysicalDeviceVideoMaintenance2FeaturesKHR; + struct VideoDecodeH264InlineSessionParametersInfoKHR; + struct VideoDecodeH265InlineSessionParametersInfoKHR; + struct VideoDecodeAV1InlineSessionParametersInfoKHR; + + //=== VK_HUAWEI_hdr_vivid === + struct PhysicalDeviceHdrVividFeaturesHUAWEI; + struct HdrVividDynamicMetadataHUAWEI; + + //=== VK_NV_cooperative_matrix2 === + struct CooperativeMatrixFlexibleDimensionsPropertiesNV; + struct PhysicalDeviceCooperativeMatrix2FeaturesNV; + struct PhysicalDeviceCooperativeMatrix2PropertiesNV; + + //=== VK_ARM_pipeline_opacity_micromap === + struct PhysicalDevicePipelineOpacityMicromapFeaturesARM; - public: - VULKAN_HPP_CONSTEXPR SurfaceKHR() = default; - VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT : m_surfaceKHR( surfaceKHR ) {} +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + struct ImportMemoryMetalHandleInfoEXT; + struct MemoryMetalHandlePropertiesEXT; + struct MemoryGetMetalHandleInfoEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - SurfaceKHR & operator=( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT - { - m_surfaceKHR = surfaceKHR; - return *this; - } -#endif + //=== VK_KHR_depth_clamp_zero_one === + struct PhysicalDeviceDepthClampZeroOneFeaturesKHR; + using PhysicalDeviceDepthClampZeroOneFeaturesEXT = PhysicalDeviceDepthClampZeroOneFeaturesKHR; - SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_surfaceKHR = {}; - return *this; - } + //=== VK_EXT_vertex_attribute_robustness === + struct PhysicalDeviceVertexAttributeRobustnessFeaturesEXT; -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SurfaceKHR const & ) const = default; -#else - bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_surfaceKHR == rhs.m_surfaceKHR; - } + //=================================== + //=== HANDLE forward declarations === + //=================================== - bool operator!=( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_surfaceKHR != rhs.m_surfaceKHR; - } + //=== VK_VERSION_1_0 === + class Instance; + class PhysicalDevice; + class Device; + class Queue; + class DeviceMemory; + class Fence; + class Semaphore; + class Event; + class QueryPool; + class Buffer; + class BufferView; + class Image; + class ImageView; + class ShaderModule; + class PipelineCache; + class Pipeline; + class PipelineLayout; + class Sampler; + class DescriptorPool; + class DescriptorSet; + class DescriptorSetLayout; + class Framebuffer; + class RenderPass; + class CommandPool; + class CommandBuffer; - bool operator<( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_surfaceKHR < rhs.m_surfaceKHR; - } -#endif + //=== VK_VERSION_1_1 === + class SamplerYcbcrConversion; + class DescriptorUpdateTemplate; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT - { - return m_surfaceKHR; - } + //=== VK_VERSION_1_3 === + class PrivateDataSlot; - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_surfaceKHR != VK_NULL_HANDLE; - } + //=== VK_KHR_surface === + class SurfaceKHR; - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_surfaceKHR == VK_NULL_HANDLE; - } + //=== VK_KHR_swapchain === + class SwapchainKHR; - private: - VkSurfaceKHR m_surfaceKHR = {}; - }; + //=== VK_KHR_display === + class DisplayKHR; + class DisplayModeKHR; - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; - }; + //=== VK_EXT_debug_report === + class DebugReportCallbackEXT; - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; - }; + //=== VK_KHR_video_queue === + class VideoSessionKHR; + class VideoSessionParametersKHR; - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; + //=== VK_NVX_binary_import === + class CuModuleNVX; + class CuFunctionNVX; - class DebugReportCallbackEXT - { - public: - using CType = VkDebugReportCallbackEXT; - using NativeType = VkDebugReportCallbackEXT; + //=== VK_EXT_debug_utils === + class DebugUtilsMessengerEXT; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT; + //=== VK_KHR_acceleration_structure === + class AccelerationStructureKHR; - public: - VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() = default; - VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT - : m_debugReportCallbackEXT( debugReportCallbackEXT ) - { - } + //=== VK_EXT_validation_cache === + class ValidationCacheEXT; -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - DebugReportCallbackEXT & operator=( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT - { - m_debugReportCallbackEXT = debugReportCallbackEXT; - return *this; - } -#endif + //=== VK_NV_ray_tracing === + class AccelerationStructureNV; - DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_debugReportCallbackEXT = {}; - return *this; - } + //=== VK_INTEL_performance_query === + class PerformanceConfigurationINTEL; -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DebugReportCallbackEXT const & ) const = default; -#else - bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT; - } + //=== VK_KHR_deferred_host_operations === + class DeferredOperationKHR; - bool operator!=( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT; - } + //=== VK_NV_device_generated_commands === + class IndirectCommandsLayoutNV; - bool operator<( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT; - } -#endif +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + class CudaModuleNV; + class CudaFunctionNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT; - } +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + class BufferCollectionFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT != VK_NULL_HANDLE; - } + //=== VK_EXT_opacity_micromap === + class MicromapEXT; - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT == VK_NULL_HANDLE; - } + //=== VK_NV_optical_flow === + class OpticalFlowSessionNV; - private: - VkDebugReportCallbackEXT m_debugReportCallbackEXT = {}; - }; + //=== VK_EXT_shader_object === + class ShaderEXT; - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; - }; + //=== VK_KHR_pipeline_binary === + class PipelineBinaryKHR; - template <> - struct CppType + //=== VK_EXT_device_generated_commands === + class IndirectCommandsLayoutEXT; + class IndirectExecutionSetEXT; + + typedef void( VKAPI_PTR * PFN_VoidFunction )(); + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + //====================== + //=== UNIQUE HANDLEs === + //====================== + + //=== VK_VERSION_1_0 === + template + class UniqueHandleTraits { - using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + public: + using deleter = detail::ObjectDestroy; }; - template <> - struct isVulkanHandleType + using UniqueInstance = UniqueHandle; + + template + class UniqueHandleTraits { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + public: + using deleter = detail::ObjectDestroy; }; - class DebugUtilsMessengerEXT + using UniqueDevice = UniqueHandle; + + template + class UniqueHandleTraits { public: - using CType = VkDebugUtilsMessengerEXT; - using NativeType = VkDebugUtilsMessengerEXT; + using deleter = detail::ObjectFree; + }; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + using UniqueDeviceMemory = UniqueHandle; + template + class UniqueHandleTraits + { public: - VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() = default; - VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT - : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT ) - { - } - -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - DebugUtilsMessengerEXT & operator=( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT - { - m_debugUtilsMessengerEXT = debugUtilsMessengerEXT; - return *this; - } -#endif + using deleter = detail::ObjectDestroy; + }; - DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_debugUtilsMessengerEXT = {}; - return *this; - } + using UniqueFence = UniqueHandle; -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DebugUtilsMessengerEXT const & ) const = default; -#else - bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT; - } + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; - bool operator!=( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT; - } + using UniqueSemaphore = UniqueHandle; - bool operator<( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT; - } -#endif + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT; - } + using UniqueEvent = UniqueHandle; - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT != VK_NULL_HANDLE; - } + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT == VK_NULL_HANDLE; - } + using UniqueQueryPool = UniqueHandle; - private: - VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT = {}; + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; }; - template <> - struct CppType + using UniqueBuffer = UniqueHandle; + + template + class UniqueHandleTraits { - using Type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + public: + using deleter = detail::ObjectDestroy; }; - template <> - struct isVulkanHandleType + using UniqueBufferView = UniqueHandle; + + template + class UniqueHandleTraits { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + public: + using deleter = detail::ObjectDestroy; }; - class DisplayKHR + using UniqueImage = UniqueHandle; + + template + class UniqueHandleTraits { public: - using CType = VkDisplayKHR; - using NativeType = VkDisplayKHR; + using deleter = detail::ObjectDestroy; + }; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR; + using UniqueImageView = UniqueHandle; + template + class UniqueHandleTraits + { public: - VULKAN_HPP_CONSTEXPR DisplayKHR() = default; - VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT : m_displayKHR( displayKHR ) {} - -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - DisplayKHR & operator=( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT - { - m_displayKHR = displayKHR; - return *this; - } -#endif + using deleter = detail::ObjectDestroy; + }; - DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_displayKHR = {}; - return *this; - } + using UniqueShaderModule = UniqueHandle; -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayKHR const & ) const = default; -#else - bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR == rhs.m_displayKHR; - } + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; - bool operator!=( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR != rhs.m_displayKHR; - } + using UniquePipelineCache = UniqueHandle; - bool operator<( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR < rhs.m_displayKHR; - } -#endif + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR; - } + using UniquePipeline = UniqueHandle; - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR != VK_NULL_HANDLE; - } + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR == VK_NULL_HANDLE; - } + using UniquePipelineLayout = UniqueHandle; - private: - VkDisplayKHR m_displayKHR = {}; + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; }; - template <> - struct CppType + using UniqueSampler = UniqueHandle; + + template + class UniqueHandleTraits { - using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; + public: + using deleter = detail::ObjectDestroy; }; - template <> - struct CppType + using UniqueDescriptorPool = UniqueHandle; + + template + class UniqueHandleTraits { - using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; + public: + using deleter = detail::PoolFree; }; - template <> - struct isVulkanHandleType + using UniqueDescriptorSet = UniqueHandle; + + template + class UniqueHandleTraits { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + public: + using deleter = detail::ObjectDestroy; }; - class SwapchainKHR + using UniqueDescriptorSetLayout = UniqueHandle; + + template + class UniqueHandleTraits { public: - using CType = VkSwapchainKHR; - using NativeType = VkSwapchainKHR; + using deleter = detail::ObjectDestroy; + }; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR; + using UniqueFramebuffer = UniqueHandle; + template + class UniqueHandleTraits + { public: - VULKAN_HPP_CONSTEXPR SwapchainKHR() = default; - VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT : m_swapchainKHR( swapchainKHR ) {} - -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - SwapchainKHR & operator=( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT - { - m_swapchainKHR = swapchainKHR; - return *this; - } -#endif + using deleter = detail::ObjectDestroy; + }; - SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_swapchainKHR = {}; - return *this; - } + using UniqueRenderPass = UniqueHandle; -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SwapchainKHR const & ) const = default; -#else - bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR == rhs.m_swapchainKHR; - } + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; - bool operator!=( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR != rhs.m_swapchainKHR; - } + using UniqueCommandPool = UniqueHandle; - bool operator<( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR < rhs.m_swapchainKHR; - } -#endif - - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR; - } - - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR != VK_NULL_HANDLE; - } - - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR == VK_NULL_HANDLE; - } - - private: - VkSwapchainKHR m_swapchainKHR = {}; - }; - - template <> - struct CppType + template + class UniqueHandleTraits { - using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + public: + using deleter = detail::PoolFree; }; - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; - }; + using UniqueCommandBuffer = UniqueHandle; - template <> - struct isVulkanHandleType + //=== VK_VERSION_1_1 === + template + class UniqueHandleTraits { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + public: + using deleter = detail::ObjectDestroy; }; - class Semaphore + using UniqueSamplerYcbcrConversion = UniqueHandle; + using UniqueSamplerYcbcrConversionKHR = UniqueHandle; + + template + class UniqueHandleTraits { public: - using CType = VkSemaphore; - using NativeType = VkSemaphore; + using deleter = detail::ObjectDestroy; + }; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore; + using UniqueDescriptorUpdateTemplate = UniqueHandle; + using UniqueDescriptorUpdateTemplateKHR = UniqueHandle; + //=== VK_VERSION_1_3 === + template + class UniqueHandleTraits + { public: - VULKAN_HPP_CONSTEXPR Semaphore() = default; - VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT : m_semaphore( semaphore ) {} - -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - Semaphore & operator=( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT - { - m_semaphore = semaphore; - return *this; - } -#endif + using deleter = detail::ObjectDestroy; + }; - Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_semaphore = {}; - return *this; - } + using UniquePrivateDataSlot = UniqueHandle; + using UniquePrivateDataSlotEXT = UniqueHandle; -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Semaphore const & ) const = default; -#else - bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_semaphore == rhs.m_semaphore; - } + //=== VK_KHR_surface === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; - bool operator!=( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_semaphore != rhs.m_semaphore; - } + using UniqueSurfaceKHR = UniqueHandle; - bool operator<( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_semaphore < rhs.m_semaphore; - } -#endif + //=== VK_KHR_swapchain === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT - { - return m_semaphore; - } + using UniqueSwapchainKHR = UniqueHandle; - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_semaphore != VK_NULL_HANDLE; - } + //=== VK_KHR_display === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_semaphore == VK_NULL_HANDLE; - } + using UniqueDisplayKHR = UniqueHandle; - private: - VkSemaphore m_semaphore = {}; + //=== VK_EXT_debug_report === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; }; - template <> - struct CppType + using UniqueDebugReportCallbackEXT = UniqueHandle; + + //=== VK_KHR_video_queue === + template + class UniqueHandleTraits { - using Type = VULKAN_HPP_NAMESPACE::Semaphore; + public: + using deleter = detail::ObjectDestroy; }; - template <> - struct CppType + using UniqueVideoSessionKHR = UniqueHandle; + + template + class UniqueHandleTraits { - using Type = VULKAN_HPP_NAMESPACE::Semaphore; + public: + using deleter = detail::ObjectDestroy; }; - template <> - struct isVulkanHandleType + using UniqueVideoSessionParametersKHR = UniqueHandle; + + //=== VK_NVX_binary_import === + template + class UniqueHandleTraits { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + public: + using deleter = detail::ObjectDestroy; }; - class Fence + using UniqueCuModuleNVX = UniqueHandle; + + template + class UniqueHandleTraits { public: - using CType = VkFence; - using NativeType = VkFence; + using deleter = detail::ObjectDestroy; + }; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFence; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence; + using UniqueCuFunctionNVX = UniqueHandle; + //=== VK_EXT_debug_utils === + template + class UniqueHandleTraits + { public: - VULKAN_HPP_CONSTEXPR Fence() = default; - VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT : m_fence( fence ) {} - -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - Fence & operator=( VkFence fence ) VULKAN_HPP_NOEXCEPT - { - m_fence = fence; - return *this; - } -#endif + using deleter = detail::ObjectDestroy; + }; - Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_fence = {}; - return *this; - } + using UniqueDebugUtilsMessengerEXT = UniqueHandle; -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Fence const & ) const = default; -#else - bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_fence == rhs.m_fence; - } + //=== VK_KHR_acceleration_structure === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; - bool operator!=( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_fence != rhs.m_fence; - } + using UniqueAccelerationStructureKHR = UniqueHandle; - bool operator<( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_fence < rhs.m_fence; - } -#endif + //=== VK_EXT_validation_cache === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT - { - return m_fence; - } + using UniqueValidationCacheEXT = UniqueHandle; - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_fence != VK_NULL_HANDLE; - } + //=== VK_NV_ray_tracing === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_fence == VK_NULL_HANDLE; - } + using UniqueAccelerationStructureNV = UniqueHandle; - private: - VkFence m_fence = {}; + //=== VK_INTEL_performance_query === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; }; - template <> - struct CppType + using UniquePerformanceConfigurationINTEL = UniqueHandle; + + //=== VK_KHR_deferred_host_operations === + template + class UniqueHandleTraits { - using Type = VULKAN_HPP_NAMESPACE::Fence; + public: + using deleter = detail::ObjectDestroy; }; - template <> - struct CppType + using UniqueDeferredOperationKHR = UniqueHandle; + + //=== VK_NV_device_generated_commands === + template + class UniqueHandleTraits { - using Type = VULKAN_HPP_NAMESPACE::Fence; + public: + using deleter = detail::ObjectDestroy; }; - template <> - struct isVulkanHandleType + using UniqueIndirectCommandsLayoutNV = UniqueHandle; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + template + class UniqueHandleTraits { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + public: + using deleter = detail::ObjectDestroy; }; - class PerformanceConfigurationINTEL + using UniqueCudaModuleNV = UniqueHandle; + + template + class UniqueHandleTraits { public: - using CType = VkPerformanceConfigurationINTEL; - using NativeType = VkPerformanceConfigurationINTEL; + using deleter = detail::ObjectDestroy; + }; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + using UniqueCudaFunctionNV = UniqueHandle; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + template + class UniqueHandleTraits + { public: - VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() = default; - VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT - : m_performanceConfigurationINTEL( performanceConfigurationINTEL ) - { - } + using deleter = detail::ObjectDestroy; + }; -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - PerformanceConfigurationINTEL & operator=( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT - { - m_performanceConfigurationINTEL = performanceConfigurationINTEL; - return *this; - } -#endif + using UniqueBufferCollectionFUCHSIA = UniqueHandle; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ - PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_performanceConfigurationINTEL = {}; - return *this; - } + //=== VK_EXT_opacity_micromap === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PerformanceConfigurationINTEL const & ) const = default; -#else - bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL; - } + using UniqueMicromapEXT = UniqueHandle; - bool operator!=( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL; - } + //=== VK_NV_optical_flow === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; - bool operator<( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL; - } -#endif + using UniqueOpticalFlowSessionNV = UniqueHandle; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL; - } + //=== VK_EXT_shader_object === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL != VK_NULL_HANDLE; - } + using UniqueShaderEXT = UniqueHandle; - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL == VK_NULL_HANDLE; - } + //=== VK_KHR_pipeline_binary === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; - private: - VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL = {}; + using UniquePipelineBinaryKHR = UniqueHandle; + + //=== VK_EXT_device_generated_commands === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; }; - template <> - struct CppType + using UniqueIndirectCommandsLayoutEXT = UniqueHandle; + + template + class UniqueHandleTraits { - using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; + public: + using deleter = detail::ObjectDestroy; }; - template <> - struct isVulkanHandleType + using UniqueIndirectExecutionSetEXT = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + //=============== + //=== HANDLEs === + //=============== + + template + struct isVulkanHandleType { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false; }; - class QueryPool + class SurfaceKHR { public: - using CType = VkQueryPool; - using NativeType = VkQueryPool; + using CType = VkSurfaceKHR; + using NativeType = VkSurfaceKHR; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR; public: - VULKAN_HPP_CONSTEXPR QueryPool() = default; - VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT : m_queryPool( queryPool ) {} + SurfaceKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + SurfaceKHR( SurfaceKHR const & rhs ) = default; + SurfaceKHR & operator=( SurfaceKHR const & rhs ) = default; -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - QueryPool & operator=( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + SurfaceKHR( SurfaceKHR && rhs ) = default; + SurfaceKHR & operator=( SurfaceKHR && rhs ) = default; +#else + SurfaceKHR( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_surfaceKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_surfaceKHR, {} ) ) {} + + SurfaceKHR & operator=( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT { - m_queryPool = queryPool; + m_surfaceKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_surfaceKHR, {} ); return *this; } #endif - QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_queryPool = {}; - return *this; - } + VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( QueryPool const & ) const = default; -#else - bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queryPool == rhs.m_queryPool; - } + VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT : m_surfaceKHR( surfaceKHR ) {} - bool operator!=( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + SurfaceKHR & operator=( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT { - return m_queryPool != rhs.m_queryPool; + m_surfaceKHR = surfaceKHR; + return *this; } +#endif - bool operator<( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - return m_queryPool < rhs.m_queryPool; + m_surfaceKHR = {}; + return *this; } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT { - return m_queryPool; + return m_surfaceKHR; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_queryPool != VK_NULL_HANDLE; + return m_surfaceKHR != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_queryPool == VK_NULL_HANDLE; + return m_surfaceKHR == VK_NULL_HANDLE; } private: - VkQueryPool m_queryPool = {}; + VkSurfaceKHR m_surfaceKHR = {}; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::QueryPool; + using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::QueryPool; + using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> - struct isVulkanHandleType + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; + }; +#endif + + template <> + struct isVulkanHandleType { static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - class Buffer + class DebugReportCallbackEXT { public: - using CType = VkBuffer; - using NativeType = VkBuffer; + using CType = VkDebugReportCallbackEXT; + using NativeType = VkDebugReportCallbackEXT; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT; public: - VULKAN_HPP_CONSTEXPR Buffer() = default; - VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT : m_buffer( buffer ) {} + DebugReportCallbackEXT() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DebugReportCallbackEXT( DebugReportCallbackEXT const & rhs ) = default; + DebugReportCallbackEXT & operator=( DebugReportCallbackEXT const & rhs ) = default; -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - Buffer & operator=( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DebugReportCallbackEXT( DebugReportCallbackEXT && rhs ) = default; + DebugReportCallbackEXT & operator=( DebugReportCallbackEXT && rhs ) = default; +#else + DebugReportCallbackEXT( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_debugReportCallbackEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_debugReportCallbackEXT, {} ) ) { - m_buffer = buffer; - return *this; } -#endif - Buffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + DebugReportCallbackEXT & operator=( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT { - m_buffer = {}; + m_debugReportCallbackEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_debugReportCallbackEXT, {} ); return *this; } +#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Buffer const & ) const = default; -#else - bool operator==( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT + : m_debugReportCallbackEXT( debugReportCallbackEXT ) { - return m_buffer == rhs.m_buffer; } - bool operator!=( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DebugReportCallbackEXT & operator=( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT { - return m_buffer != rhs.m_buffer; + m_debugReportCallbackEXT = debugReportCallbackEXT; + return *this; } +#endif - bool operator<( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - return m_buffer < rhs.m_buffer; + m_debugReportCallbackEXT = {}; + return *this; } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT { - return m_buffer; + return m_debugReportCallbackEXT; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_buffer != VK_NULL_HANDLE; + return m_debugReportCallbackEXT != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_buffer == VK_NULL_HANDLE; + return m_debugReportCallbackEXT == VK_NULL_HANDLE; } private: - VkBuffer m_buffer = {}; + VkDebugReportCallbackEXT m_debugReportCallbackEXT = {}; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::Buffer; + using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::Buffer; + using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> - struct isVulkanHandleType + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + }; +#endif + + template <> + struct isVulkanHandleType { static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - class PipelineLayout + class DebugUtilsMessengerEXT { public: - using CType = VkPipelineLayout; - using NativeType = VkPipelineLayout; + using CType = VkDebugUtilsMessengerEXT; + using NativeType = VkDebugUtilsMessengerEXT; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - VULKAN_HPP_CONSTEXPR PipelineLayout() = default; - VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT : m_pipelineLayout( pipelineLayout ) {} + DebugUtilsMessengerEXT() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DebugUtilsMessengerEXT( DebugUtilsMessengerEXT const & rhs ) = default; + DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT const & rhs ) = default; -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - PipelineLayout & operator=( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DebugUtilsMessengerEXT( DebugUtilsMessengerEXT && rhs ) = default; + DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT && rhs ) = default; +#else + DebugUtilsMessengerEXT( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_debugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_debugUtilsMessengerEXT, {} ) ) { - m_pipelineLayout = pipelineLayout; - return *this; } -#endif - PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT { - m_pipelineLayout = {}; + m_debugUtilsMessengerEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_debugUtilsMessengerEXT, {} ); return *this; } +#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineLayout const & ) const = default; -#else - bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT + : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT ) { - return m_pipelineLayout == rhs.m_pipelineLayout; } - bool operator!=( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DebugUtilsMessengerEXT & operator=( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT { - return m_pipelineLayout != rhs.m_pipelineLayout; + m_debugUtilsMessengerEXT = debugUtilsMessengerEXT; + return *this; } +#endif - bool operator<( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - return m_pipelineLayout < rhs.m_pipelineLayout; + m_debugUtilsMessengerEXT = {}; + return *this; } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT { - return m_pipelineLayout; + return m_debugUtilsMessengerEXT; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_pipelineLayout != VK_NULL_HANDLE; + return m_debugUtilsMessengerEXT != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_pipelineLayout == VK_NULL_HANDLE; + return m_debugUtilsMessengerEXT == VK_NULL_HANDLE; } private: - VkPipelineLayout m_pipelineLayout = {}; + VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT = {}; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; + using Type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; + using Type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; }; +#endif template <> - struct isVulkanHandleType + struct isVulkanHandleType { static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - class DescriptorSet + class DisplayKHR { public: - using CType = VkDescriptorSet; - using NativeType = VkDescriptorSet; + using CType = VkDisplayKHR; + using NativeType = VkDisplayKHR; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR; public: - VULKAN_HPP_CONSTEXPR DescriptorSet() = default; - VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT : m_descriptorSet( descriptorSet ) {} + DisplayKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DisplayKHR( DisplayKHR const & rhs ) = default; + DisplayKHR & operator=( DisplayKHR const & rhs ) = default; -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - DescriptorSet & operator=( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DisplayKHR( DisplayKHR && rhs ) = default; + DisplayKHR & operator=( DisplayKHR && rhs ) = default; +#else + DisplayKHR( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_displayKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_displayKHR, {} ) ) {} + + DisplayKHR & operator=( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT { - m_descriptorSet = descriptorSet; + m_displayKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_displayKHR, {} ); return *this; } #endif - DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_descriptorSet = {}; - return *this; - } + VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorSet const & ) const = default; -#else - bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSet == rhs.m_descriptorSet; - } + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT : m_displayKHR( displayKHR ) {} - bool operator!=( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DisplayKHR & operator=( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT { - return m_descriptorSet != rhs.m_descriptorSet; + m_displayKHR = displayKHR; + return *this; } +#endif - bool operator<( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - return m_descriptorSet < rhs.m_descriptorSet; + m_displayKHR = {}; + return *this; } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT { - return m_descriptorSet; + return m_displayKHR; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_descriptorSet != VK_NULL_HANDLE; + return m_displayKHR != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_descriptorSet == VK_NULL_HANDLE; + return m_displayKHR == VK_NULL_HANDLE; } private: - VkDescriptorSet m_descriptorSet = {}; + VkDisplayKHR m_displayKHR = {}; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> - struct isVulkanHandleType + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; + }; +#endif + + template <> + struct isVulkanHandleType { static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - class ImageView + class SwapchainKHR { public: - using CType = VkImageView; - using NativeType = VkImageView; + using CType = VkSwapchainKHR; + using NativeType = VkSwapchainKHR; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR; public: - VULKAN_HPP_CONSTEXPR ImageView() = default; - VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT : m_imageView( imageView ) {} + SwapchainKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + SwapchainKHR( SwapchainKHR const & rhs ) = default; + SwapchainKHR & operator=( SwapchainKHR const & rhs ) = default; -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - ImageView & operator=( VkImageView imageView ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + SwapchainKHR( SwapchainKHR && rhs ) = default; + SwapchainKHR & operator=( SwapchainKHR && rhs ) = default; +#else + SwapchainKHR( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_swapchainKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_swapchainKHR, {} ) ) {} + + SwapchainKHR & operator=( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT { - m_imageView = imageView; + m_swapchainKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_swapchainKHR, {} ); return *this; } #endif - ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_imageView = {}; - return *this; - } + VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageView const & ) const = default; -#else - bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_imageView == rhs.m_imageView; - } + VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT : m_swapchainKHR( swapchainKHR ) {} - bool operator!=( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + SwapchainKHR & operator=( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT { - return m_imageView != rhs.m_imageView; + m_swapchainKHR = swapchainKHR; + return *this; } +#endif - bool operator<( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - return m_imageView < rhs.m_imageView; + m_swapchainKHR = {}; + return *this; } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT { - return m_imageView; + return m_swapchainKHR; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_imageView != VK_NULL_HANDLE; + return m_swapchainKHR != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_imageView == VK_NULL_HANDLE; + return m_swapchainKHR == VK_NULL_HANDLE; } private: - VkImageView m_imageView = {}; + VkSwapchainKHR m_swapchainKHR = {}; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::ImageView; + using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::ImageView; + using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> - struct isVulkanHandleType + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; + }; +#endif + + template <> + struct isVulkanHandleType { static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - class Pipeline + class Semaphore { public: - using CType = VkPipeline; - using NativeType = VkPipeline; + using CType = VkSemaphore; + using NativeType = VkSemaphore; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore; public: - VULKAN_HPP_CONSTEXPR Pipeline() = default; - VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT : m_pipeline( pipeline ) {} + Semaphore() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Semaphore( Semaphore const & rhs ) = default; + Semaphore & operator=( Semaphore const & rhs ) = default; -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - Pipeline & operator=( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Semaphore( Semaphore && rhs ) = default; + Semaphore & operator=( Semaphore && rhs ) = default; +#else + Semaphore( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT : m_semaphore( VULKAN_HPP_NAMESPACE::exchange( rhs.m_semaphore, {} ) ) {} + + Semaphore & operator=( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT { - m_pipeline = pipeline; + m_semaphore = VULKAN_HPP_NAMESPACE::exchange( rhs.m_semaphore, {} ); return *this; } #endif - Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_pipeline = {}; - return *this; - } + VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Pipeline const & ) const = default; -#else - bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipeline == rhs.m_pipeline; - } + VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT : m_semaphore( semaphore ) {} - bool operator!=( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + Semaphore & operator=( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT { - return m_pipeline != rhs.m_pipeline; + m_semaphore = semaphore; + return *this; } +#endif - bool operator<( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - return m_pipeline < rhs.m_pipeline; + m_semaphore = {}; + return *this; } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT { - return m_pipeline; + return m_semaphore; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_pipeline != VK_NULL_HANDLE; + return m_semaphore != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_pipeline == VK_NULL_HANDLE; + return m_semaphore == VK_NULL_HANDLE; } private: - VkPipeline m_pipeline = {}; + VkSemaphore m_semaphore = {}; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::Pipeline; + using Type = VULKAN_HPP_NAMESPACE::Semaphore; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::Pipeline; + using Type = VULKAN_HPP_NAMESPACE::Semaphore; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> - struct isVulkanHandleType + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Semaphore; + }; +#endif + + template <> + struct isVulkanHandleType { static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - class Image + class Fence { public: - using CType = VkImage; - using NativeType = VkImage; + using CType = VkFence; + using NativeType = VkFence; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImage; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFence; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence; public: - VULKAN_HPP_CONSTEXPR Image() = default; - VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT : m_image( image ) {} + Fence() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Fence( Fence const & rhs ) = default; + Fence & operator=( Fence const & rhs ) = default; -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - Image & operator=( VkImage image ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Fence( Fence && rhs ) = default; + Fence & operator=( Fence && rhs ) = default; +#else + Fence( Fence && rhs ) VULKAN_HPP_NOEXCEPT : m_fence( VULKAN_HPP_NAMESPACE::exchange( rhs.m_fence, {} ) ) {} + + Fence & operator=( Fence && rhs ) VULKAN_HPP_NOEXCEPT { - m_image = image; + m_fence = VULKAN_HPP_NAMESPACE::exchange( rhs.m_fence, {} ); return *this; } #endif - Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_image = {}; - return *this; - } + VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Image const & ) const = default; -#else - bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_image == rhs.m_image; - } + VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT : m_fence( fence ) {} - bool operator!=( Image const & rhs ) const VULKAN_HPP_NOEXCEPT +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + Fence & operator=( VkFence fence ) VULKAN_HPP_NOEXCEPT { - return m_image != rhs.m_image; + m_fence = fence; + return *this; } +#endif - bool operator<( Image const & rhs ) const VULKAN_HPP_NOEXCEPT + Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - return m_image < rhs.m_image; + m_fence = {}; + return *this; } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT { - return m_image; + return m_fence; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_image != VK_NULL_HANDLE; + return m_fence != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_image == VK_NULL_HANDLE; + return m_fence == VK_NULL_HANDLE; } private: - VkImage m_image = {}; + VkFence m_fence = {}; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::Image; + using Type = VULKAN_HPP_NAMESPACE::Fence; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::Image; + using Type = VULKAN_HPP_NAMESPACE::Fence; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> - struct isVulkanHandleType + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Fence; + }; +#endif + + template <> + struct isVulkanHandleType { static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - class AccelerationStructureNV + class PerformanceConfigurationINTEL { public: - using CType = VkAccelerationStructureNV; - using NativeType = VkAccelerationStructureNV; + using CType = VkPerformanceConfigurationINTEL; + using NativeType = VkPerformanceConfigurationINTEL; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - VULKAN_HPP_CONSTEXPR AccelerationStructureNV() = default; - VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT - : m_accelerationStructureNV( accelerationStructureNV ) + PerformanceConfigurationINTEL() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + PerformanceConfigurationINTEL( PerformanceConfigurationINTEL const & rhs ) = default; + PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PerformanceConfigurationINTEL( PerformanceConfigurationINTEL && rhs ) = default; + PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL && rhs ) = default; +#else + PerformanceConfigurationINTEL( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT + : m_performanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::exchange( rhs.m_performanceConfigurationINTEL, {} ) ) { } -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - AccelerationStructureNV & operator=( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT + PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT { - m_accelerationStructureNV = accelerationStructureNV; + m_performanceConfigurationINTEL = VULKAN_HPP_NAMESPACE::exchange( rhs.m_performanceConfigurationINTEL, {} ); return *this; } #endif - AccelerationStructureNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT + : m_performanceConfigurationINTEL( performanceConfigurationINTEL ) { - m_accelerationStructureNV = {}; - return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( AccelerationStructureNV const & ) const = default; -#else - bool operator==( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureNV == rhs.m_accelerationStructureNV; - } - - bool operator!=( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + PerformanceConfigurationINTEL & operator=( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT { - return m_accelerationStructureNV != rhs.m_accelerationStructureNV; + m_performanceConfigurationINTEL = performanceConfigurationINTEL; + return *this; } +#endif - bool operator<( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - return m_accelerationStructureNV < rhs.m_accelerationStructureNV; + m_performanceConfigurationINTEL = {}; + return *this; } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT { - return m_accelerationStructureNV; + return m_performanceConfigurationINTEL; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_accelerationStructureNV != VK_NULL_HANDLE; + return m_performanceConfigurationINTEL != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_accelerationStructureNV == VK_NULL_HANDLE; + return m_performanceConfigurationINTEL == VK_NULL_HANDLE; } private: - VkAccelerationStructureNV m_accelerationStructureNV = {}; + VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL = {}; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; }; +#endif template <> - struct isVulkanHandleType + struct isVulkanHandleType { static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - class DescriptorUpdateTemplate + class QueryPool { public: - using CType = VkDescriptorUpdateTemplate; - using NativeType = VkDescriptorUpdateTemplate; + using CType = VkQueryPool; + using NativeType = VkQueryPool; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool; public: - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() = default; - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT - : m_descriptorUpdateTemplate( descriptorUpdateTemplate ) - { - } + QueryPool() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + QueryPool( QueryPool const & rhs ) = default; + QueryPool & operator=( QueryPool const & rhs ) = default; -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - DescriptorUpdateTemplate & operator=( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + QueryPool( QueryPool && rhs ) = default; + QueryPool & operator=( QueryPool && rhs ) = default; +#else + QueryPool( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT : m_queryPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_queryPool, {} ) ) {} + + QueryPool & operator=( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT { - m_descriptorUpdateTemplate = descriptorUpdateTemplate; + m_queryPool = VULKAN_HPP_NAMESPACE::exchange( rhs.m_queryPool, {} ); return *this; } #endif - DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_descriptorUpdateTemplate = {}; - return *this; - } + VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorUpdateTemplate const & ) const = default; -#else - bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate; - } + VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT : m_queryPool( queryPool ) {} - bool operator!=( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + QueryPool & operator=( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT { - return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate; + m_queryPool = queryPool; + return *this; } +#endif - bool operator<( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate; + m_queryPool = {}; + return *this; } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT { - return m_descriptorUpdateTemplate; + return m_queryPool; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_descriptorUpdateTemplate != VK_NULL_HANDLE; + return m_queryPool != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_descriptorUpdateTemplate == VK_NULL_HANDLE; + return m_queryPool == VK_NULL_HANDLE; } private: - VkDescriptorUpdateTemplate m_descriptorUpdateTemplate = {}; + VkQueryPool m_queryPool = {}; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + using Type = VULKAN_HPP_NAMESPACE::QueryPool; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + using Type = VULKAN_HPP_NAMESPACE::QueryPool; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> - struct isVulkanHandleType + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::QueryPool; + }; +#endif + + template <> + struct isVulkanHandleType { static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate; - class Event + class Buffer { public: - using CType = VkEvent; - using NativeType = VkEvent; + using CType = VkBuffer; + using NativeType = VkBuffer; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBuffer; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer; public: - VULKAN_HPP_CONSTEXPR Event() = default; - VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT : m_event( event ) {} + Buffer() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Buffer( Buffer const & rhs ) = default; + Buffer & operator=( Buffer const & rhs ) = default; -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - Event & operator=( VkEvent event ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Buffer( Buffer && rhs ) = default; + Buffer & operator=( Buffer && rhs ) = default; +#else + Buffer( Buffer && rhs ) VULKAN_HPP_NOEXCEPT : m_buffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_buffer, {} ) ) {} + + Buffer & operator=( Buffer && rhs ) VULKAN_HPP_NOEXCEPT { - m_event = event; + m_buffer = VULKAN_HPP_NAMESPACE::exchange( rhs.m_buffer, {} ); return *this; } #endif - Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_event = {}; - return *this; - } + VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Event const & ) const = default; -#else - bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_event == rhs.m_event; - } + VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT : m_buffer( buffer ) {} - bool operator!=( Event const & rhs ) const VULKAN_HPP_NOEXCEPT +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + Buffer & operator=( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT { - return m_event != rhs.m_event; + m_buffer = buffer; + return *this; } +#endif - bool operator<( Event const & rhs ) const VULKAN_HPP_NOEXCEPT + Buffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - return m_event < rhs.m_event; + m_buffer = {}; + return *this; } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT { - return m_event; + return m_buffer; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_event != VK_NULL_HANDLE; + return m_buffer != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_event == VK_NULL_HANDLE; + return m_buffer == VK_NULL_HANDLE; } private: - VkEvent m_event = {}; + VkBuffer m_buffer = {}; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::Event; + using Type = VULKAN_HPP_NAMESPACE::Buffer; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::Event; + using Type = VULKAN_HPP_NAMESPACE::Buffer; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> - struct isVulkanHandleType + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Buffer; + }; +#endif + + template <> + struct isVulkanHandleType { static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - class AccelerationStructureKHR + class PipelineLayout { public: - using CType = VkAccelerationStructureKHR; - using NativeType = VkAccelerationStructureKHR; + using CType = VkPipelineLayout; + using NativeType = VkPipelineLayout; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout; public: - VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() = default; - VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT - : m_accelerationStructureKHR( accelerationStructureKHR ) - { - } + PipelineLayout() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + PipelineLayout( PipelineLayout const & rhs ) = default; + PipelineLayout & operator=( PipelineLayout const & rhs ) = default; -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - AccelerationStructureKHR & operator=( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PipelineLayout( PipelineLayout && rhs ) = default; + PipelineLayout & operator=( PipelineLayout && rhs ) = default; +#else + PipelineLayout( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT : m_pipelineLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ) ) {} + + PipelineLayout & operator=( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT { - m_accelerationStructureKHR = accelerationStructureKHR; + m_pipelineLayout = VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ); return *this; } #endif - AccelerationStructureKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_accelerationStructureKHR = {}; - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( AccelerationStructureKHR const & ) const = default; -#else - bool operator==( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureKHR == rhs.m_accelerationStructureKHR; - } + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT : m_pipelineLayout( pipelineLayout ) {} - bool operator!=( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + PipelineLayout & operator=( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT { - return m_accelerationStructureKHR != rhs.m_accelerationStructureKHR; + m_pipelineLayout = pipelineLayout; + return *this; } +#endif - bool operator<( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - return m_accelerationStructureKHR < rhs.m_accelerationStructureKHR; + m_pipelineLayout = {}; + return *this; } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT { - return m_accelerationStructureKHR; + return m_pipelineLayout; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_accelerationStructureKHR != VK_NULL_HANDLE; + return m_pipelineLayout != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_accelerationStructureKHR == VK_NULL_HANDLE; + return m_pipelineLayout == VK_NULL_HANDLE; } private: - VkAccelerationStructureKHR m_accelerationStructureKHR = {}; + VkPipelineLayout m_pipelineLayout = {}; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> - struct isVulkanHandleType + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; + }; +#endif + + template <> + struct isVulkanHandleType { static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - class CommandBuffer + class DescriptorSet { public: - using CType = VkCommandBuffer; - using NativeType = VkCommandBuffer; + using CType = VkDescriptorSet; + using NativeType = VkDescriptorSet; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet; public: - VULKAN_HPP_CONSTEXPR CommandBuffer() = default; - VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT : m_commandBuffer( commandBuffer ) {} + DescriptorSet() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DescriptorSet( DescriptorSet const & rhs ) = default; + DescriptorSet & operator=( DescriptorSet const & rhs ) = default; - CommandBuffer & operator=( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DescriptorSet( DescriptorSet && rhs ) = default; + DescriptorSet & operator=( DescriptorSet && rhs ) = default; +#else + DescriptorSet( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT : m_descriptorSet( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSet, {} ) ) {} + + DescriptorSet & operator=( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT { - m_commandBuffer = commandBuffer; + m_descriptorSet = VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSet, {} ); return *this; } +#endif - CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT : m_descriptorSet( descriptorSet ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DescriptorSet & operator=( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT { - m_commandBuffer = {}; + m_descriptorSet = descriptorSet; return *this; } +#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CommandBuffer const & ) const = default; -#else - bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - return m_commandBuffer == rhs.m_commandBuffer; + m_descriptorSet = {}; + return *this; } - bool operator!=( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT { - return m_commandBuffer != rhs.m_commandBuffer; + return m_descriptorSet; } - bool operator<( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_commandBuffer < rhs.m_commandBuffer; + return m_descriptorSet != VK_NULL_HANDLE; } -#endif - //=== VK_VERSION_1_0 === - - template - VULKAN_HPP_NODISCARD Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet == VK_NULL_HANDLE; + } -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + private: + VkDescriptorSet m_descriptorSet = {}; + }; -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; - template - void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, - VULKAN_HPP_NAMESPACE::Pipeline pipeline, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; - template - void setViewport( uint32_t firstViewport, - uint32_t viewportCount, - const VULKAN_HPP_NAMESPACE::Viewport * pViewports, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewport( uint32_t firstViewport, - VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; + }; +#endif - template - void setScissor( uint32_t firstScissor, - uint32_t scissorCount, - const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setScissor( uint32_t firstScissor, - VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; - template - void setLineWidth( float lineWidth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + class ImageView + { + public: + using CType = VkImageView; + using NativeType = VkImageView; - template - void setDepthBias( float depthBiasConstantFactor, - float depthBiasClamp, - float depthBiasSlopeFactor, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView; - template - void setBlendConstants( const float blendConstants[4], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + public: + ImageView() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + ImageView( ImageView const & rhs ) = default; + ImageView & operator=( ImageView const & rhs ) = default; - template - void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + ImageView( ImageView && rhs ) = default; + ImageView & operator=( ImageView && rhs ) = default; +#else + ImageView( ImageView && rhs ) VULKAN_HPP_NOEXCEPT : m_imageView( VULKAN_HPP_NAMESPACE::exchange( rhs.m_imageView, {} ) ) {} - template - void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, - uint32_t compareMask, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + ImageView & operator=( ImageView && rhs ) VULKAN_HPP_NOEXCEPT + { + m_imageView = VULKAN_HPP_NAMESPACE::exchange( rhs.m_imageView, {} ); + return *this; + } +#endif - template - void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, - uint32_t writeMask, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - template - void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, - uint32_t reference, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT : m_imageView( imageView ) {} - template - void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, - VULKAN_HPP_NAMESPACE::PipelineLayout layout, - uint32_t firstSet, - uint32_t descriptorSetCount, - const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, - uint32_t dynamicOffsetCount, - const uint32_t * pDynamicOffsets, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, - VULKAN_HPP_NAMESPACE::PipelineLayout layout, - uint32_t firstSet, - VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, - VULKAN_HPP_NAMESPACE::ArrayProxy const & dynamicOffsets, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + ImageView & operator=( VkImageView imageView ) VULKAN_HPP_NOEXCEPT + { + m_imageView = imageView; + return *this; + } +#endif - template - void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::IndexType indexType, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_imageView = {}; + return *this; + } - template - void bindVertexBuffers( uint32_t firstBinding, - uint32_t bindingCount, - const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, - const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindVertexBuffers( uint32_t firstBinding, - VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT + { + return m_imageView; + } - template - void draw( uint32_t vertexCount, - uint32_t instanceCount, - uint32_t firstVertex, - uint32_t firstInstance, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_imageView != VK_NULL_HANDLE; + } - template - void drawIndexed( uint32_t indexCount, - uint32_t instanceCount, - uint32_t firstIndex, - int32_t vertexOffset, - uint32_t firstInstance, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_imageView == VK_NULL_HANDLE; + } - template - void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - uint32_t drawCount, - uint32_t stride, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + private: + VkImageView m_imageView = {}; + }; - template - void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - uint32_t drawCount, - uint32_t stride, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ImageView; + }; - template - void dispatch( uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ImageView; + }; - template - void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ImageView; + }; +#endif - template - void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - uint32_t regionCount, - const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; - template - void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, - VULKAN_HPP_NAMESPACE::Image dstImage, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, - uint32_t regionCount, - const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, - VULKAN_HPP_NAMESPACE::Image dstImage, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, - VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + class Pipeline + { + public: + using CType = VkPipeline; + using NativeType = VkPipeline; - template - void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, - VULKAN_HPP_NAMESPACE::Image dstImage, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, - uint32_t regionCount, - const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, - VULKAN_HPP_NAMESPACE::Filter filter, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, - VULKAN_HPP_NAMESPACE::Image dstImage, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, - VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, - VULKAN_HPP_NAMESPACE::Filter filter, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline; - template - void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, - VULKAN_HPP_NAMESPACE::Image dstImage, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, - uint32_t regionCount, - const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, - VULKAN_HPP_NAMESPACE::Image dstImage, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, - VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + public: + Pipeline() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Pipeline( Pipeline const & rhs ) = default; + Pipeline & operator=( Pipeline const & rhs ) = default; - template - void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - uint32_t regionCount, - const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Pipeline( Pipeline && rhs ) = default; + Pipeline & operator=( Pipeline && rhs ) = default; +#else + Pipeline( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT : m_pipeline( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipeline, {} ) ) {} - template - void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - VULKAN_HPP_NAMESPACE::DeviceSize dataSize, - const void * pData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - VULKAN_HPP_NAMESPACE::ArrayProxy const & data, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + Pipeline & operator=( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT + { + m_pipeline = VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipeline, {} ); + return *this; + } +#endif - template - void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - VULKAN_HPP_NAMESPACE::DeviceSize size, - uint32_t data, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - template - void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, - const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, - uint32_t rangeCount, - const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, - const VULKAN_HPP_NAMESPACE::ClearColorValue & color, - VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT : m_pipeline( pipeline ) {} - template - void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, - const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, - uint32_t rangeCount, - const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, - const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, - VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + Pipeline & operator=( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT + { + m_pipeline = pipeline; + return *this; + } +#endif - template - void clearAttachments( uint32_t attachmentCount, - const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, - uint32_t rectCount, - const VULKAN_HPP_NAMESPACE::ClearRect * pRects, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy const & attachments, - VULKAN_HPP_NAMESPACE::ArrayProxy const & rects, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipeline = {}; + return *this; + } - template - void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, - VULKAN_HPP_NAMESPACE::Image dstImage, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, - uint32_t regionCount, - const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, - VULKAN_HPP_NAMESPACE::Image dstImage, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, - VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline; + } - template - void setEvent( VULKAN_HPP_NAMESPACE::Event event, - VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline != VK_NULL_HANDLE; + } - template - void resetEvent( VULKAN_HPP_NAMESPACE::Event event, - VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline == VK_NULL_HANDLE; + } - template - void waitEvents( uint32_t eventCount, - const VULKAN_HPP_NAMESPACE::Event * pEvents, - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, - uint32_t memoryBarrierCount, - const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, - VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + private: + VkPipeline m_pipeline = {}; + }; - template - void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, - uint32_t memoryBarrierCount, - const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, - VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Pipeline; + }; - template - void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t query, - VULKAN_HPP_NAMESPACE::QueryControlFlags flags, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Pipeline; + }; - template - void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t query, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Pipeline; + }; +#endif - template - void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; - template - void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, - VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t query, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - - template - void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - VULKAN_HPP_NAMESPACE::DeviceSize stride, - VULKAN_HPP_NAMESPACE::QueryResultFlags flags, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + class ShaderEXT + { + public: + using CType = VkShaderEXT; + using NativeType = VkShaderEXT; - template - void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, - uint32_t offset, - uint32_t size, - const void * pValues, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, - uint32_t offset, - VULKAN_HPP_NAMESPACE::ArrayProxy const & values, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - template - void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, - VULKAN_HPP_NAMESPACE::SubpassContents contents, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, - VULKAN_HPP_NAMESPACE::SubpassContents contents, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + public: + ShaderEXT() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + ShaderEXT( ShaderEXT const & rhs ) = default; + ShaderEXT & operator=( ShaderEXT const & rhs ) = default; - template - void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + ShaderEXT( ShaderEXT && rhs ) = default; + ShaderEXT & operator=( ShaderEXT && rhs ) = default; +#else + ShaderEXT( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT : m_shaderEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_shaderEXT, {} ) ) {} - template - void endRenderPass( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + ShaderEXT & operator=( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_shaderEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_shaderEXT, {} ); + return *this; + } +#endif - template - void executeCommands( uint32_t commandBufferCount, - const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_CONSTEXPR ShaderEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - //=== VK_VERSION_1_1 === + VULKAN_HPP_TYPESAFE_EXPLICIT ShaderEXT( VkShaderEXT shaderEXT ) VULKAN_HPP_NOEXCEPT : m_shaderEXT( shaderEXT ) {} - template - void setDeviceMask( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + ShaderEXT & operator=( VkShaderEXT shaderEXT ) VULKAN_HPP_NOEXCEPT + { + m_shaderEXT = shaderEXT; + return *this; + } +#endif - template - void dispatchBase( uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + ShaderEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_shaderEXT = {}; + return *this; + } - //=== VK_VERSION_1_2 === + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderEXT() const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT; + } - template - void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT != VK_NULL_HANDLE; + } - template - void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_shaderEXT == VK_NULL_HANDLE; + } - template - void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, - const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, - const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + private: + VkShaderEXT m_shaderEXT = {}; + }; - template - void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, - const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, - const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderEXT; + }; - template - void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderEXT; + }; +#endif - //=== VK_VERSION_1_3 === + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; - template - void setEvent2( VULKAN_HPP_NAMESPACE::Event event, - const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setEvent2( VULKAN_HPP_NAMESPACE::Event event, - const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + class Image + { + public: + using CType = VkImage; + using NativeType = VkImage; - template - void resetEvent2( VULKAN_HPP_NAMESPACE::Event event, - VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImage; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage; - template - void waitEvents2( uint32_t eventCount, - const VULKAN_HPP_NAMESPACE::Event * pEvents, - const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, - VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + public: + Image() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Image( Image const & rhs ) = default; + Image & operator=( Image const & rhs ) = default; - template - void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Image( Image && rhs ) = default; + Image & operator=( Image && rhs ) = default; +#else + Image( Image && rhs ) VULKAN_HPP_NOEXCEPT : m_image( VULKAN_HPP_NAMESPACE::exchange( rhs.m_image, {} ) ) {} - template - void writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t query, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + Image & operator=( Image && rhs ) VULKAN_HPP_NOEXCEPT + { + m_image = VULKAN_HPP_NAMESPACE::exchange( rhs.m_image, {} ); + return *this; + } +#endif - template - void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - template - void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT : m_image( image ) {} - template - void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + Image & operator=( VkImage image ) VULKAN_HPP_NOEXCEPT + { + m_image = image; + return *this; + } +#endif - template - void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_image = {}; + return *this; + } - template - void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT + { + return m_image; + } - template - void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_image != VK_NULL_HANDLE; + } - template - void endRendering( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_image == VK_NULL_HANDLE; + } - template - void setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + private: + VkImage m_image = {}; + }; - template - void setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Image; + }; - template - void setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Image; + }; - template - void setViewportWithCount( uint32_t viewportCount, - const VULKAN_HPP_NAMESPACE::Viewport * pViewports, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Image; + }; +#endif - template - void setScissorWithCount( uint32_t scissorCount, - const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; - template - void bindVertexBuffers2( uint32_t firstBinding, - uint32_t bindingCount, - const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, - const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, - const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, - const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindVertexBuffers2( - uint32_t firstBinding, - VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, - VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - VULKAN_HPP_NAMESPACE::ArrayProxy const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + class AccelerationStructureNV + { + public: + using CType = VkAccelerationStructureNV; + using NativeType = VkAccelerationStructureNV; - template - void setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV; - template - void setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + public: + AccelerationStructureNV() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + AccelerationStructureNV( AccelerationStructureNV const & rhs ) = default; + AccelerationStructureNV & operator=( AccelerationStructureNV const & rhs ) = default; - template - void setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + AccelerationStructureNV( AccelerationStructureNV && rhs ) = default; + AccelerationStructureNV & operator=( AccelerationStructureNV && rhs ) = default; +#else + AccelerationStructureNV( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructureNV, {} ) ) + { + } - template - void setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + AccelerationStructureNV & operator=( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureNV = VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructureNV, {} ); + return *this; + } +#endif - template - void setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - template - void setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, - VULKAN_HPP_NAMESPACE::StencilOp failOp, - VULKAN_HPP_NAMESPACE::StencilOp passOp, - VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, - VULKAN_HPP_NAMESPACE::CompareOp compareOp, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureNV( accelerationStructureNV ) + { + } - template - void setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + AccelerationStructureNV & operator=( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureNV = accelerationStructureNV; + return *this; + } +#endif - template - void setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + AccelerationStructureNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureNV = {}; + return *this; + } - template - void setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV; + } - //=== VK_EXT_debug_marker === + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV != VK_NULL_HANDLE; + } - template - void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV == VK_NULL_HANDLE; + } - template - void debugMarkerEndEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + private: + VkAccelerationStructureNV m_accelerationStructureNV = {}; + }; - template - void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + }; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_queue === + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + }; - template - void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + }; +#endif - template - void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; - template - void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + class OpticalFlowSessionNV + { + public: + using CType = VkOpticalFlowSessionNV; + using NativeType = VkOpticalFlowSessionNV; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_decode_queue === + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - template - void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & frameInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + public: + OpticalFlowSessionNV() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + OpticalFlowSessionNV( OpticalFlowSessionNV const & rhs ) = default; + OpticalFlowSessionNV & operator=( OpticalFlowSessionNV const & rhs ) = default; - //=== VK_EXT_transform_feedback === +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + OpticalFlowSessionNV( OpticalFlowSessionNV && rhs ) = default; + OpticalFlowSessionNV & operator=( OpticalFlowSessionNV && rhs ) = default; +#else + OpticalFlowSessionNV( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_opticalFlowSessionNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_opticalFlowSessionNV, {} ) ) + { + } - template - void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, - uint32_t bindingCount, - const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, - const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, - const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, - VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, - VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes - VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + OpticalFlowSessionNV & operator=( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT + { + m_opticalFlowSessionNV = VULKAN_HPP_NAMESPACE::exchange( rhs.m_opticalFlowSessionNV, {} ); + return *this; + } +#endif - template - void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, - uint32_t counterBufferCount, - const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, - const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, - VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets - VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_CONSTEXPR OpticalFlowSessionNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - template - void endTransformFeedbackEXT( uint32_t firstCounterBuffer, - uint32_t counterBufferCount, - const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, - const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endTransformFeedbackEXT( uint32_t firstCounterBuffer, - VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets - VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_TYPESAFE_EXPLICIT OpticalFlowSessionNV( VkOpticalFlowSessionNV opticalFlowSessionNV ) VULKAN_HPP_NOEXCEPT + : m_opticalFlowSessionNV( opticalFlowSessionNV ) + { + } - template - void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t query, - VULKAN_HPP_NAMESPACE::QueryControlFlags flags, - uint32_t index, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + OpticalFlowSessionNV & operator=( VkOpticalFlowSessionNV opticalFlowSessionNV ) VULKAN_HPP_NOEXCEPT + { + m_opticalFlowSessionNV = opticalFlowSessionNV; + return *this; + } +#endif - template - void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t query, - uint32_t index, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + OpticalFlowSessionNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_opticalFlowSessionNV = {}; + return *this; + } - template - void drawIndirectByteCountEXT( uint32_t instanceCount, - uint32_t firstInstance, - VULKAN_HPP_NAMESPACE::Buffer counterBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, - uint32_t counterOffset, - uint32_t vertexStride, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkOpticalFlowSessionNV() const VULKAN_HPP_NOEXCEPT + { + return m_opticalFlowSessionNV; + } - //=== VK_NVX_binary_import === + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_opticalFlowSessionNV != VK_NULL_HANDLE; + } - template - void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_opticalFlowSessionNV == VK_NULL_HANDLE; + } - //=== VK_AMD_draw_indirect_count === + private: + VkOpticalFlowSessionNV m_opticalFlowSessionNV = {}; + }; - template - void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV; + }; - template - void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV; + }; +#endif - //=== VK_KHR_dynamic_rendering === + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; - template - void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + class DescriptorUpdateTemplate + { + public: + using CType = VkDescriptorUpdateTemplate; + using NativeType = VkDescriptorUpdateTemplate; - template - void endRenderingKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate; - //=== VK_KHR_device_group === + public: + DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DescriptorUpdateTemplate( DescriptorUpdateTemplate const & rhs ) = default; + DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate const & rhs ) = default; - template - void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DescriptorUpdateTemplate( DescriptorUpdateTemplate && rhs ) = default; + DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate && rhs ) = default; +#else + DescriptorUpdateTemplate( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT + : m_descriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ) ) + { + } - template - void dispatchBaseKHR( uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ); + return *this; + } +#endif - //=== VK_KHR_push_descriptor === + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - template - void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, - VULKAN_HPP_NAMESPACE::PipelineLayout layout, - uint32_t set, - uint32_t descriptorWriteCount, - const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, - VULKAN_HPP_NAMESPACE::PipelineLayout layout, - uint32_t set, - VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT + : m_descriptorUpdateTemplate( descriptorUpdateTemplate ) + { + } - template - void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - VULKAN_HPP_NAMESPACE::PipelineLayout layout, - uint32_t set, - const void * pData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - VULKAN_HPP_NAMESPACE::PipelineLayout layout, - uint32_t set, - DataType const & data, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DescriptorUpdateTemplate & operator=( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = descriptorUpdateTemplate; + return *this; + } +#endif - //=== VK_EXT_conditional_rendering === + DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = {}; + return *this; + } - template - void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate; + } - template - void endConditionalRenderingEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate != VK_NULL_HANDLE; + } - //=== VK_NV_clip_space_w_scaling === + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate == VK_NULL_HANDLE; + } - template - void setViewportWScalingNV( uint32_t firstViewport, - uint32_t viewportCount, - const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewportWScalingNV( uint32_t firstViewport, - VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportWScalings, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + private: + VkDescriptorUpdateTemplate m_descriptorUpdateTemplate = {}; + }; - //=== VK_EXT_discard_rectangles === + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; - template - void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, - uint32_t discardRectangleCount, - const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, - VULKAN_HPP_NAMESPACE::ArrayProxy const & discardRectangles, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; - //=== VK_KHR_create_renderpass2 === +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + }; +#endif - template - void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, - const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, - const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; - template - void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, - const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, - const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate; - template - void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + class Event + { + public: + using CType = VkEvent; + using NativeType = VkEvent; - //=== VK_EXT_debug_utils === + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent; - template - void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + public: + Event() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Event( Event const & rhs ) = default; + Event & operator=( Event const & rhs ) = default; - template - void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Event( Event && rhs ) = default; + Event & operator=( Event && rhs ) = default; +#else + Event( Event && rhs ) VULKAN_HPP_NOEXCEPT : m_event( VULKAN_HPP_NAMESPACE::exchange( rhs.m_event, {} ) ) {} - template - void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + Event & operator=( Event && rhs ) VULKAN_HPP_NOEXCEPT + { + m_event = VULKAN_HPP_NAMESPACE::exchange( rhs.m_event, {} ); + return *this; + } +#endif - //=== VK_EXT_sample_locations === + VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - template - void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT : m_event( event ) {} - //=== VK_KHR_acceleration_structure === +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + Event & operator=( VkEvent event ) VULKAN_HPP_NOEXCEPT + { + m_event = event; + return *this; + } +#endif - template - void buildAccelerationStructuresKHR( uint32_t infoCount, - const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, - const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void buildAccelerationStructuresKHR( - VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, - VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_event = {}; + return *this; + } - template - void buildAccelerationStructuresIndirectKHR( uint32_t infoCount, - const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, - const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, - const uint32_t * pIndirectStrides, - const uint32_t * const * ppMaxPrimitiveCounts, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void buildAccelerationStructuresIndirectKHR( - VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, - VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectDeviceAddresses, - VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectStrides, - VULKAN_HPP_NAMESPACE::ArrayProxy const & pMaxPrimitiveCounts, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT + { + return m_event; + } - template - void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_event != VK_NULL_HANDLE; + } - template - void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_event == VK_NULL_HANDLE; + } - template - void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + private: + VkEvent m_event = {}; + }; - template - void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, - const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void writeAccelerationStructuresPropertiesKHR( - VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Event; + }; - //=== VK_NV_shading_rate_image === + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Event; + }; - template - void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Event; + }; +#endif - template - void setViewportShadingRatePaletteNV( uint32_t firstViewport, - uint32_t viewportCount, - const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewportShadingRatePaletteNV( uint32_t firstViewport, - VULKAN_HPP_NAMESPACE::ArrayProxy const & shadingRatePalettes, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; - template - void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, - uint32_t customSampleOrderCount, - const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, - VULKAN_HPP_NAMESPACE::ArrayProxy const & customSampleOrders, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + class AccelerationStructureKHR + { + public: + using CType = VkAccelerationStructureKHR; + using NativeType = VkAccelerationStructureKHR; - //=== VK_NV_ray_tracing === + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR; - template - void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, - VULKAN_HPP_NAMESPACE::Buffer instanceData, - VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, - VULKAN_HPP_NAMESPACE::Bool32 update, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, - VULKAN_HPP_NAMESPACE::Buffer scratch, - VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, - VULKAN_HPP_NAMESPACE::Buffer instanceData, - VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, - VULKAN_HPP_NAMESPACE::Bool32 update, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, - VULKAN_HPP_NAMESPACE::Buffer scratch, - VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + public: + AccelerationStructureKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + AccelerationStructureKHR( AccelerationStructureKHR const & rhs ) = default; + AccelerationStructureKHR & operator=( AccelerationStructureKHR const & rhs ) = default; - template - void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, - VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, - VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, - VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, - VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, - VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, - VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, - uint32_t width, - uint32_t height, - uint32_t depth, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + AccelerationStructureKHR( AccelerationStructureKHR && rhs ) = default; + AccelerationStructureKHR & operator=( AccelerationStructureKHR && rhs ) = default; +#else + AccelerationStructureKHR( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructureKHR, {} ) ) + { + } - template - void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, - const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void writeAccelerationStructuresPropertiesNV( - VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + AccelerationStructureKHR & operator=( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructureKHR, {} ); + return *this; + } +#endif - //=== VK_KHR_draw_indirect_count === + VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - template - void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureKHR( accelerationStructureKHR ) + { + } - template - void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + AccelerationStructureKHR & operator=( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureKHR = accelerationStructureKHR; + return *this; + } +#endif - //=== VK_AMD_buffer_marker === + AccelerationStructureKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureKHR = {}; + return *this; + } - template - void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - uint32_t marker, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR; + } - //=== VK_NV_mesh_shader === + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR != VK_NULL_HANDLE; + } - template - void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureKHR == VK_NULL_HANDLE; + } - template - void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - uint32_t drawCount, - uint32_t stride, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + private: + VkAccelerationStructureKHR m_accelerationStructureKHR = {}; + }; - template - void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; - //=== VK_NV_scissor_exclusive === + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; - template - void setExclusiveScissorNV( uint32_t firstExclusiveScissor, - uint32_t exclusiveScissorCount, - const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setExclusiveScissorNV( uint32_t firstExclusiveScissor, - VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissors, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + }; +#endif - //=== VK_NV_device_diagnostic_checkpoints === + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; - template - void setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setCheckpointNV( CheckpointMarkerType const & checkpointMarker, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + class MicromapEXT + { + public: + using CType = VkMicromapEXT; + using NativeType = VkMicromapEXT; - //=== VK_INTEL_performance_query === + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - template - VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + public: + MicromapEXT() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + MicromapEXT( MicromapEXT const & rhs ) = default; + MicromapEXT & operator=( MicromapEXT const & rhs ) = default; - template - VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + MicromapEXT( MicromapEXT && rhs ) = default; + MicromapEXT & operator=( MicromapEXT && rhs ) = default; +#else + MicromapEXT( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT : m_micromapEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_micromapEXT, {} ) ) {} - template - VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + MicromapEXT & operator=( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_micromapEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_micromapEXT, {} ); + return *this; + } +#endif - //=== VK_KHR_fragment_shading_rate === + VULKAN_HPP_CONSTEXPR MicromapEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - template - void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, - const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, - const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_TYPESAFE_EXPLICIT MicromapEXT( VkMicromapEXT micromapEXT ) VULKAN_HPP_NOEXCEPT : m_micromapEXT( micromapEXT ) {} - //=== VK_EXT_line_rasterization === +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + MicromapEXT & operator=( VkMicromapEXT micromapEXT ) VULKAN_HPP_NOEXCEPT + { + m_micromapEXT = micromapEXT; + return *this; + } +#endif - template - void setLineStippleEXT( uint32_t lineStippleFactor, - uint16_t lineStipplePattern, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + MicromapEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_micromapEXT = {}; + return *this; + } - //=== VK_EXT_extended_dynamic_state === + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkMicromapEXT() const VULKAN_HPP_NOEXCEPT + { + return m_micromapEXT; + } - template - void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_micromapEXT != VK_NULL_HANDLE; + } - template - void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_micromapEXT == VK_NULL_HANDLE; + } - template - void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + private: + VkMicromapEXT m_micromapEXT = {}; + }; - template - void setViewportWithCountEXT( uint32_t viewportCount, - const VULKAN_HPP_NAMESPACE::Viewport * pViewports, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::MicromapEXT; + }; - template - void setScissorWithCountEXT( uint32_t scissorCount, - const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::MicromapEXT; + }; +#endif - template - void bindVertexBuffers2EXT( uint32_t firstBinding, - uint32_t bindingCount, - const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, - const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, - const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, - const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void bindVertexBuffers2EXT( - uint32_t firstBinding, - VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, - VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - VULKAN_HPP_NAMESPACE::ArrayProxy const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; - template - void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + class CommandBuffer + { + public: + using CType = VkCommandBuffer; + using NativeType = VkCommandBuffer; - template - void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer; - template - void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + public: + CommandBuffer() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + CommandBuffer( CommandBuffer const & rhs ) = default; + CommandBuffer & operator=( CommandBuffer const & rhs ) = default; - template - void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CommandBuffer( CommandBuffer && rhs ) = default; + CommandBuffer & operator=( CommandBuffer && rhs ) = default; +#else + CommandBuffer( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT : m_commandBuffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandBuffer, {} ) ) {} - template - void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + CommandBuffer & operator=( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandBuffer, {} ); + return *this; + } +#endif - template - void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, - VULKAN_HPP_NAMESPACE::StencilOp failOp, - VULKAN_HPP_NAMESPACE::StencilOp passOp, - VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, - VULKAN_HPP_NAMESPACE::CompareOp compareOp, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - //=== VK_NV_device_generated_commands === + CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT : m_commandBuffer( commandBuffer ) {} - template - void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + CommandBuffer & operator=( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = commandBuffer; + return *this; + } + + CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = {}; + return *this; + } + + //=== VK_VERSION_1_0 === template - void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, - const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, - const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, - VULKAN_HPP_NAMESPACE::Pipeline pipeline, - uint32_t groupIndex, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_encode_queue === + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else template - void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - //=== VK_KHR_synchronization2 === + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, - const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else template - void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, - const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + typename ResultValueType::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, - VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - void waitEvents2KHR( uint32_t eventCount, - const VULKAN_HPP_NAMESPACE::Event * pEvents, - const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void setViewport( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, - VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void setViewport( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void setScissor( uint32_t firstScissor, + uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void setScissor( uint32_t firstScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t query, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void setLineWidth( float lineWidth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - uint32_t marker, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - - //=== VK_NV_fragment_shading_rate_enums === + void setDepthBias( float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, - const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - - //=== VK_EXT_mesh_shader === + void setBlendConstants( const float blendConstants[4], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - void drawMeshTasksEXT( uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - void drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - uint32_t drawCount, - uint32_t stride, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t compareMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - void drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t writeMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - //=== VK_KHR_copy_commands2 === + template + void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t reference, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dynamicOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void bindVertexBuffers( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void bindVertexBuffers( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void draw( uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + void drawIndexed( uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template - void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template - void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void dispatch( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - //=== VK_KHR_ray_tracing_pipeline === + template + void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, - uint32_t width, - uint32_t height, - uint32_t depth, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, - uint32_t width, - uint32_t height, - uint32_t depth, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, - VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, - VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - - //=== VK_EXT_vertex_input_dynamic_state === + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void setVertexInputEXT( uint32_t vertexBindingDescriptionCount, - const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, - uint32_t vertexAttributeDescriptionCount, - const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void - setVertexInputEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexBindingDescriptions, - VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexAttributeDescriptions, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_HUAWEI_subpass_shading === + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void subpassShadingHUAWEI( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - - //=== VK_HUAWEI_invocation_mask === - + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - - //=== VK_EXT_extended_dynamic_state2 === + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize dataSize, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - - //=== VK_EXT_color_write_enable === + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue & color, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void setColorWriteEnableEXT( uint32_t attachmentCount, - const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteEnables, + void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_KHR_ray_tracing_maintenance1 === - - template - void traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - - //=== VK_EXT_multi_draw === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void drawMultiEXT( uint32_t drawCount, - const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - uint32_t stride, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void clearAttachments( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, + uint32_t rectCount, + const VULKAN_HPP_NAMESPACE::ClearRect * pRects, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & vertexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy const & attachments, + VULKAN_HPP_NAMESPACE::ArrayProxy const & rects, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void drawMultiIndexedEXT( uint32_t drawCount, - const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - uint32_t stride, - const int32_t * pVertexOffset, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + uint32_t regionCount, + const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & indexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - Optional vertexOffset VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer; - } + template + void setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer != VK_NULL_HANDLE; - } + template + void resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer == VK_NULL_HANDLE; - } + template + void waitEvents( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - private: - VkCommandBuffer m_commandBuffer = {}; - }; + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; - }; + template + void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; - }; + template + void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - class DeviceMemory - { - public: - using CType = VkDeviceMemory; - using NativeType = VkDeviceMemory; + template + void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory; + template + void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - public: - VULKAN_HPP_CONSTEXPR DeviceMemory() = default; - VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT : m_deviceMemory( deviceMemory ) {} + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & values, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - DeviceMemory & operator=( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT - { - m_deviceMemory = deviceMemory; - return *this; - } -#endif + template + void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_deviceMemory = {}; - return *this; - } + template + void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceMemory const & ) const = default; -#else - bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory == rhs.m_deviceMemory; - } + template + void endRenderPass( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator!=( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory != rhs.m_deviceMemory; - } + template + void executeCommands( uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator<( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory < rhs.m_deviceMemory; - } -#endif + //=== VK_VERSION_1_1 === - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory; - } + template + void setDeviceMask( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory != VK_NULL_HANDLE; - } + template + void dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory == VK_NULL_HANDLE; - } + //=== VK_VERSION_1_2 === - private: - VkDeviceMemory m_deviceMemory = {}; - }; + template + void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; - }; + template + void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; - }; - - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; + template + void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - class VideoSessionKHR - { - public: - using CType = VkVideoSessionKHR; - using NativeType = VkVideoSessionKHR; + template + void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + template + void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - public: - VULKAN_HPP_CONSTEXPR VideoSessionKHR() = default; - VULKAN_HPP_CONSTEXPR VideoSessionKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT VideoSessionKHR( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT : m_videoSessionKHR( videoSessionKHR ) {} + //=== VK_VERSION_1_3 === -# if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - VideoSessionKHR & operator=( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT - { - m_videoSessionKHR = videoSessionKHR; - return *this; - } -# endif + template + void setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - VideoSessionKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_videoSessionKHR = {}; - return *this; - } + template + void resetEvent2( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoSessionKHR const & ) const = default; -# else - bool operator==( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_videoSessionKHR == rhs.m_videoSessionKHR; - } + template + void waitEvents2( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator!=( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_videoSessionKHR != rhs.m_videoSessionKHR; - } + template + void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator<( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_videoSessionKHR < rhs.m_videoSessionKHR; - } -# endif + template + void writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionKHR() const VULKAN_HPP_NOEXCEPT - { - return m_videoSessionKHR; - } + template + void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_videoSessionKHR != VK_NULL_HANDLE; - } + template + void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_videoSessionKHR == VK_NULL_HANDLE; - } + template + void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - private: - VkVideoSessionKHR m_videoSessionKHR = {}; - }; + template + void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::VideoSessionKHR; - }; + template + void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template + void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - class DeferredOperationKHR - { - public: - using CType = VkDeferredOperationKHR; - using NativeType = VkDeferredOperationKHR; + template + void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + template + void endRendering( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - public: - VULKAN_HPP_CONSTEXPR DeferredOperationKHR() = default; - VULKAN_HPP_CONSTEXPR DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT DeferredOperationKHR( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT - : m_deferredOperationKHR( deferredOperationKHR ) - { - } + template + void setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - DeferredOperationKHR & operator=( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT - { - m_deferredOperationKHR = deferredOperationKHR; - return *this; - } -#endif + template + void setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - DeferredOperationKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_deferredOperationKHR = {}; - return *this; - } + template + void setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeferredOperationKHR const & ) const = default; -#else - bool operator==( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deferredOperationKHR == rhs.m_deferredOperationKHR; - } + template + void setViewportWithCount( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator!=( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deferredOperationKHR != rhs.m_deferredOperationKHR; - } + template + void setScissorWithCount( uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator<( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deferredOperationKHR < rhs.m_deferredOperationKHR; - } -#endif + template + void bindVertexBuffers2( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindVertexBuffers2( + uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeferredOperationKHR() const VULKAN_HPP_NOEXCEPT - { - return m_deferredOperationKHR; - } + template + void setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_deferredOperationKHR != VK_NULL_HANDLE; - } + template + void setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_deferredOperationKHR == VK_NULL_HANDLE; - } + template + void setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - private: - VkDeferredOperationKHR m_deferredOperationKHR = {}; - }; + template + void setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; - }; + template + void setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; + template + void setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#if defined( VK_USE_PLATFORM_FUCHSIA ) - class BufferCollectionFUCHSIA - { - public: - using CType = VkBufferCollectionFUCHSIA; - using NativeType = VkBufferCollectionFUCHSIA; + template + void setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA; + template + void setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - public: - VULKAN_HPP_CONSTEXPR BufferCollectionFUCHSIA() = default; - VULKAN_HPP_CONSTEXPR BufferCollectionFUCHSIA( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT BufferCollectionFUCHSIA( VkBufferCollectionFUCHSIA bufferCollectionFUCHSIA ) VULKAN_HPP_NOEXCEPT - : m_bufferCollectionFUCHSIA( bufferCollectionFUCHSIA ) - { - } + template + void setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - BufferCollectionFUCHSIA & operator=( VkBufferCollectionFUCHSIA bufferCollectionFUCHSIA ) VULKAN_HPP_NOEXCEPT - { - m_bufferCollectionFUCHSIA = bufferCollectionFUCHSIA; - return *this; - } -# endif + //=== VK_VERSION_1_4 === - BufferCollectionFUCHSIA & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_bufferCollectionFUCHSIA = {}; - return *this; - } + template + void setLineStipple( uint32_t lineStippleFactor, + uint16_t lineStipplePattern, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( BufferCollectionFUCHSIA const & ) const = default; -# else - bool operator==( BufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferCollectionFUCHSIA == rhs.m_bufferCollectionFUCHSIA; - } + template + void bindIndexBuffer2( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator!=( BufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferCollectionFUCHSIA != rhs.m_bufferCollectionFUCHSIA; - } + template + void pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator<( BufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferCollectionFUCHSIA < rhs.m_bufferCollectionFUCHSIA; - } -# endif + template + void pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferCollectionFUCHSIA() const VULKAN_HPP_NOEXCEPT - { - return m_bufferCollectionFUCHSIA; - } + template + void setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_bufferCollectionFUCHSIA != VK_NULL_HANDLE; - } + template + void setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_bufferCollectionFUCHSIA == VK_NULL_HANDLE; - } + template + void bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - private: - VkBufferCollectionFUCHSIA m_bufferCollectionFUCHSIA = {}; - }; + template + void pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; - }; + template + void pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; - }; + template + void pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ + //=== VK_EXT_debug_marker === - class BufferView - { - public: - using CType = VkBufferView; - using NativeType = VkBufferView; + template + void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView; + template + void debugMarkerEndEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - public: - VULKAN_HPP_CONSTEXPR BufferView() = default; - VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT : m_bufferView( bufferView ) {} + template + void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - BufferView & operator=( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT - { - m_bufferView = bufferView; - return *this; - } -#endif + //=== VK_KHR_video_queue === - BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_bufferView = {}; - return *this; - } + template + void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( BufferView const & ) const = default; -#else - bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferView == rhs.m_bufferView; - } + template + void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator!=( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferView != rhs.m_bufferView; - } + template + void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator<( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferView < rhs.m_bufferView; - } -#endif + //=== VK_KHR_video_decode_queue === - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT - { - return m_bufferView; - } + template + void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_bufferView != VK_NULL_HANDLE; - } + //=== VK_EXT_transform_feedback === - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_bufferView == VK_NULL_HANDLE; - } + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - private: - VkBufferView m_bufferView = {}; - }; + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::BufferView; - }; + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::BufferView; - }; + template + void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; + template + void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + uint32_t index, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - class CommandPool - { - public: - using CType = VkCommandPool; - using NativeType = VkCommandPool; + template + void drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool; + //=== VK_NVX_binary_import === - public: - VULKAN_HPP_CONSTEXPR CommandPool() = default; - VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT : m_commandPool( commandPool ) {} + template + void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - CommandPool & operator=( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT - { - m_commandPool = commandPool; - return *this; - } -#endif + //=== VK_AMD_draw_indirect_count === - CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_commandPool = {}; - return *this; - } + template + void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CommandPool const & ) const = default; -#else - bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandPool == rhs.m_commandPool; - } + template + void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator!=( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandPool != rhs.m_commandPool; - } + //=== VK_KHR_dynamic_rendering === - bool operator<( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandPool < rhs.m_commandPool; - } -#endif + template + void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT - { - return m_commandPool; - } + template + void endRenderingKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_commandPool != VK_NULL_HANDLE; - } + //=== VK_KHR_device_group === - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_commandPool == VK_NULL_HANDLE; - } + template + void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - private: - VkCommandPool m_commandPool = {}; - }; + template + void dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::CommandPool; - }; + //=== VK_KHR_push_descriptor === - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::CommandPool; - }; + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; + template + void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - class PipelineCache - { - public: - using CType = VkPipelineCache; - using NativeType = VkPipelineCache; + //=== VK_EXT_conditional_rendering === - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache; + template + void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - public: - VULKAN_HPP_CONSTEXPR PipelineCache() = default; - VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT : m_pipelineCache( pipelineCache ) {} + template + void endConditionalRenderingEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - PipelineCache & operator=( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT - { - m_pipelineCache = pipelineCache; - return *this; - } -#endif + //=== VK_NV_clip_space_w_scaling === - PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_pipelineCache = {}; - return *this; - } + template + void setViewportWScalingNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportWScalingNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportWScalings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineCache const & ) const = default; -#else - bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache == rhs.m_pipelineCache; - } + //=== VK_EXT_discard_rectangles === - bool operator!=( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache != rhs.m_pipelineCache; - } + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, + VULKAN_HPP_NAMESPACE::ArrayProxy const & discardRectangles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator<( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache < rhs.m_pipelineCache; - } -#endif + template + void setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache; - } + template + void setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache != VK_NULL_HANDLE; - } + //=== VK_KHR_create_renderpass2 === - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache == VK_NULL_HANDLE; - } + template + void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - private: - VkPipelineCache m_pipelineCache = {}; - }; + template + void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::PipelineCache; - }; + template + void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::PipelineCache; - }; + //=== VK_EXT_debug_utils === - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - class CuFunctionNVX - { - public: - using CType = VkCuFunctionNVX; - using NativeType = VkCuFunctionNVX; + template + void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX; + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - public: - VULKAN_HPP_CONSTEXPR CuFunctionNVX() = default; - VULKAN_HPP_CONSTEXPR CuFunctionNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT CuFunctionNVX( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT : m_cuFunctionNVX( cuFunctionNVX ) {} +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - CuFunctionNVX & operator=( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT - { - m_cuFunctionNVX = cuFunctionNVX; - return *this; - } -#endif + template + void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - CuFunctionNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_cuFunctionNVX = {}; - return *this; - } + template + void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CuFunctionNVX const & ) const = default; -#else - bool operator==( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cuFunctionNVX == rhs.m_cuFunctionNVX; - } + template + void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator!=( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cuFunctionNVX != rhs.m_cuFunctionNVX; - } + template + void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + VULKAN_HPP_NAMESPACE::DeviceAddress countInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - bool operator<( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cuFunctionNVX < rhs.m_cuFunctionNVX; - } -#endif + //=== VK_EXT_sample_locations === - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuFunctionNVX() const VULKAN_HPP_NOEXCEPT - { - return m_cuFunctionNVX; - } + template + void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_cuFunctionNVX != VK_NULL_HANDLE; - } + //=== VK_KHR_acceleration_structure === - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_cuFunctionNVX == VK_NULL_HANDLE; - } + template + void buildAccelerationStructuresKHR( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - private: - VkCuFunctionNVX m_cuFunctionNVX = {}; - }; + template + void buildAccelerationStructuresIndirectKHR( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructuresIndirectKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectDeviceAddresses, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectStrides, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pMaxPrimitiveCounts, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; - }; + template + void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; - }; + template + void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; + template + void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - class CuModuleNVX - { - public: - using CType = VkCuModuleNVX; - using NativeType = VkCuModuleNVX; + template + void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX; + //=== VK_KHR_ray_tracing_pipeline === - public: - VULKAN_HPP_CONSTEXPR CuModuleNVX() = default; - VULKAN_HPP_CONSTEXPR CuModuleNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT CuModuleNVX( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT : m_cuModuleNVX( cuModuleNVX ) {} + template + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - CuModuleNVX & operator=( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT - { - m_cuModuleNVX = cuModuleNVX; - return *this; - } -#endif + template + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - CuModuleNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_cuModuleNVX = {}; - return *this; - } + template + void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CuModuleNVX const & ) const = default; -#else - bool operator==( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cuModuleNVX == rhs.m_cuModuleNVX; - } + //=== VK_NV_shading_rate_image === - bool operator!=( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cuModuleNVX != rhs.m_cuModuleNVX; - } + template + void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator<( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cuModuleNVX < rhs.m_cuModuleNVX; - } -#endif + template + void setViewportShadingRatePaletteNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportShadingRatePaletteNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shadingRatePalettes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuModuleNVX() const VULKAN_HPP_NOEXCEPT - { - return m_cuModuleNVX; - } + template + void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + VULKAN_HPP_NAMESPACE::ArrayProxy const & customSampleOrders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_cuModuleNVX != VK_NULL_HANDLE; - } + //=== VK_NV_ray_tracing === - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_cuModuleNVX == VK_NULL_HANDLE; - } + template + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - private: - VkCuModuleNVX m_cuModuleNVX = {}; - }; + template + void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; - }; + template + void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; - }; + template + void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeAccelerationStructuresPropertiesNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; + //=== VK_KHR_draw_indirect_count === - class DescriptorPool - { - public: - using CType = VkDescriptorPool; - using NativeType = VkDescriptorPool; + template + void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool; + template + void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - public: - VULKAN_HPP_CONSTEXPR DescriptorPool() = default; - VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT : m_descriptorPool( descriptorPool ) {} + //=== VK_AMD_buffer_marker === -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - DescriptorPool & operator=( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT - { - m_descriptorPool = descriptorPool; - return *this; - } -#endif + template + void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_descriptorPool = {}; - return *this; - } + template + void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorPool const & ) const = default; -#else - bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool == rhs.m_descriptorPool; - } + //=== VK_NV_mesh_shader === - bool operator!=( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool != rhs.m_descriptorPool; - } + template + void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator<( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool < rhs.m_descriptorPool; - } -#endif + template + void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool; - } + template + void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool != VK_NULL_HANDLE; - } + //=== VK_NV_scissor_exclusive === - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool == VK_NULL_HANDLE; - } + template + void setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pExclusiveScissorEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissorEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - private: - VkDescriptorPool m_descriptorPool = {}; - }; + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; - }; + //=== VK_NV_device_diagnostic_checkpoints === - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; - }; + template + void setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setCheckpointNV( CheckpointMarkerType const & checkpointMarker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; + //=== VK_INTEL_performance_query === - class DescriptorSetLayout - { - public: - using CType = VkDescriptorSetLayout; - using NativeType = VkDescriptorSetLayout; + template + VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout; + template + VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - public: - VULKAN_HPP_CONSTEXPR DescriptorSetLayout() = default; - VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT - : m_descriptorSetLayout( descriptorSetLayout ) - { - } + template + VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - DescriptorSetLayout & operator=( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT - { - m_descriptorSetLayout = descriptorSetLayout; - return *this; - } -#endif + //=== VK_KHR_fragment_shading_rate === - DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_descriptorSetLayout = {}; - return *this; - } + template + void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorSetLayout const & ) const = default; -#else - bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout == rhs.m_descriptorSetLayout; - } + //=== VK_KHR_dynamic_rendering_local_read === - bool operator!=( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout != rhs.m_descriptorSetLayout; - } + template + void setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator<( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout < rhs.m_descriptorSetLayout; - } -#endif + template + void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout; - } + //=== VK_EXT_line_rasterization === - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout != VK_NULL_HANDLE; - } + template + void setLineStippleEXT( uint32_t lineStippleFactor, + uint16_t lineStipplePattern, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout == VK_NULL_HANDLE; - } + //=== VK_EXT_extended_dynamic_state === - private: - VkDescriptorSetLayout m_descriptorSetLayout = {}; - }; + template + void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; - }; + template + void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; - }; + template + void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; + template + void setViewportWithCountEXT( uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - class Framebuffer - { - public: - using CType = VkFramebuffer; - using NativeType = VkFramebuffer; + template + void setScissorWithCountEXT( uint32_t scissorCount, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer; + template + void bindVertexBuffers2EXT( uint32_t firstBinding, + uint32_t bindingCount, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, + const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindVertexBuffers2EXT( + uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - public: - VULKAN_HPP_CONSTEXPR Framebuffer() = default; - VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT : m_framebuffer( framebuffer ) {} + template + void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - Framebuffer & operator=( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT - { - m_framebuffer = framebuffer; - return *this; - } -#endif + template + void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_framebuffer = {}; - return *this; - } + template + void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Framebuffer const & ) const = default; -#else - bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer == rhs.m_framebuffer; - } + template + void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator!=( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer != rhs.m_framebuffer; - } + template + void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator<( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer < rhs.m_framebuffer; - } -#endif + template + void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer; - } + //=== VK_NV_device_generated_commands === - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer != VK_NULL_HANDLE; - } + template + void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer == VK_NULL_HANDLE; - } + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - private: - VkFramebuffer m_framebuffer = {}; - }; + template + void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::Framebuffer; - }; + //=== VK_EXT_depth_bias_control === - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::Framebuffer; - }; + template + void setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT * pDepthBiasInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; + //=== VK_KHR_video_encode_queue === - class IndirectCommandsLayoutNV - { - public: - using CType = VkIndirectCommandsLayoutNV; - using NativeType = VkIndirectCommandsLayoutNV; + template + void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === - public: - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() = default; - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT - : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV ) - { - } + template + void cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV * pLaunchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - IndirectCommandsLayoutNV & operator=( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT - { - m_indirectCommandsLayoutNV = indirectCommandsLayoutNV; - return *this; - } -#endif + //=== VK_KHR_synchronization2 === - IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_indirectCommandsLayoutNV = {}; - return *this; - } + template + void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectCommandsLayoutNV const & ) const = default; -#else - bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV; - } + template + void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator!=( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV; - } + template + void waitEvents2KHR( uint32_t eventCount, + const VULKAN_HPP_NAMESPACE::Event * pEvents, + const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator<( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV; - } -#endif + template + void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNV; - } + template + void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNV != VK_NULL_HANDLE; - } + //=== VK_EXT_descriptor_buffer === - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNV == VK_NULL_HANDLE; - } + template + void bindDescriptorBuffersEXT( uint32_t bufferCount, + const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindingInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - private: - VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV = {}; - }; + template + void setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + uint32_t setCount, + const uint32_t * pBufferIndices, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferIndices, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; - }; + template + void bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; + //=== VK_NV_fragment_shading_rate_enums === - class PrivateDataSlot - { - public: - using CType = VkPrivateDataSlot; - using NativeType = VkPrivateDataSlot; + template + void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + //=== VK_EXT_mesh_shader === - public: - VULKAN_HPP_CONSTEXPR PrivateDataSlot() = default; - VULKAN_HPP_CONSTEXPR PrivateDataSlot( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT PrivateDataSlot( VkPrivateDataSlot privateDataSlot ) VULKAN_HPP_NOEXCEPT : m_privateDataSlot( privateDataSlot ) {} + template + void drawMeshTasksEXT( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - PrivateDataSlot & operator=( VkPrivateDataSlot privateDataSlot ) VULKAN_HPP_NOEXCEPT - { - m_privateDataSlot = privateDataSlot; - return *this; - } -#endif + template + void drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - PrivateDataSlot & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_privateDataSlot = {}; - return *this; - } + template + void drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PrivateDataSlot const & ) const = default; -#else - bool operator==( PrivateDataSlot const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_privateDataSlot == rhs.m_privateDataSlot; - } + //=== VK_KHR_copy_commands2 === - bool operator!=( PrivateDataSlot const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_privateDataSlot != rhs.m_privateDataSlot; - } + template + void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator<( PrivateDataSlot const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_privateDataSlot < rhs.m_privateDataSlot; - } -#endif + template + void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPrivateDataSlot() const VULKAN_HPP_NOEXCEPT - { - return m_privateDataSlot; - } + template + void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_privateDataSlot != VK_NULL_HANDLE; - } + template + void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_privateDataSlot == VK_NULL_HANDLE; - } + template + void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - private: - VkPrivateDataSlot m_privateDataSlot = {}; - }; + template + void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::PrivateDataSlot; - }; + //=== VK_EXT_vertex_input_dynamic_state === - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; - using PrivateDataSlotEXT = PrivateDataSlot; + template + void setVertexInputEXT( uint32_t vertexBindingDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void + setVertexInputEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexBindingDescriptions, + VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexAttributeDescriptions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - class RenderPass - { - public: - using CType = VkRenderPass; - using NativeType = VkRenderPass; + //=== VK_HUAWEI_subpass_shading === - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass; + template + void subpassShadingHUAWEI( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - public: - VULKAN_HPP_CONSTEXPR RenderPass() = default; - VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT : m_renderPass( renderPass ) {} + //=== VK_HUAWEI_invocation_mask === -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - RenderPass & operator=( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT - { - m_renderPass = renderPass; - return *this; - } -#endif + template + void bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_renderPass = {}; - return *this; - } + //=== VK_EXT_extended_dynamic_state2 === -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderPass const & ) const = default; -#else - bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_renderPass == rhs.m_renderPass; - } + template + void setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator!=( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_renderPass != rhs.m_renderPass; - } + template + void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator<( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_renderPass < rhs.m_renderPass; - } -#endif + template + void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT - { - return m_renderPass; - } + template + void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_renderPass != VK_NULL_HANDLE; - } + template + void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_renderPass == VK_NULL_HANDLE; - } + //=== VK_EXT_color_write_enable === - private: - VkRenderPass m_renderPass = {}; - }; + template + void setColorWriteEnableEXT( uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::RenderPass; - }; + //=== VK_KHR_ray_tracing_maintenance1 === - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::RenderPass; - }; + template + void traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; + //=== VK_EXT_multi_draw === - class Sampler - { - public: - using CType = VkSampler; - using NativeType = VkSampler; + template + void drawMultiEXT( uint32_t drawCount, + const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & vertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler; + template + void drawMultiIndexedEXT( uint32_t drawCount, + const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t * pVertexOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & indexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Optional vertexOffset VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - public: - VULKAN_HPP_CONSTEXPR Sampler() = default; - VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT : m_sampler( sampler ) {} + //=== VK_EXT_opacity_micromap === -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - Sampler & operator=( VkSampler sampler ) VULKAN_HPP_NOEXCEPT - { - m_sampler = sampler; - return *this; - } -#endif + template + void buildMicromapsEXT( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_sampler = {}; - return *this; - } + template + void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Sampler const & ) const = default; -#else - bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_sampler == rhs.m_sampler; - } + template + void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator!=( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_sampler != rhs.m_sampler; - } + template + void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator<( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_sampler < rhs.m_sampler; - } -#endif + template + void writeMicromapsPropertiesEXT( uint32_t micromapCount, + const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT - { - return m_sampler; - } + //=== VK_HUAWEI_cluster_culling_shader === - explicit operator bool() const VULKAN_HPP_NOEXCEPT - { - return m_sampler != VK_NULL_HANDLE; - } + template + void drawClusterHUAWEI( uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator!() const VULKAN_HPP_NOEXCEPT - { - return m_sampler == VK_NULL_HANDLE; - } + template + void drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - private: - VkSampler m_sampler = {}; - }; + //=== VK_NV_copy_memory_indirect === - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::Sampler; - }; + template + void copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template <> - struct CppType - { - using Type = VULKAN_HPP_NAMESPACE::Sampler; - }; + template + void copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageSubresources, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template <> - struct isVulkanHandleType - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; - }; + //=== VK_NV_memory_decompression === - class SamplerYcbcrConversion - { - public: - using CType = VkSamplerYcbcrConversion; - using NativeType = VkSamplerYcbcrConversion; + template + void decompressMemoryNV( uint32_t decompressRegionCount, + const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & decompressMemoryRegions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion; + template + void decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, + uint32_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - public: - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() = default; - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT - : m_samplerYcbcrConversion( samplerYcbcrConversion ) - { - } + //=== VK_NV_device_generated_commands_compute === -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - SamplerYcbcrConversion & operator=( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT - { - m_samplerYcbcrConversion = samplerYcbcrConversion; - return *this; - } -#endif + template + void updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_samplerYcbcrConversion = {}; - return *this; - } + //=== VK_EXT_extended_dynamic_state3 === -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SamplerYcbcrConversion const & ) const = default; -#else - bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion; - } + template + void setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator!=( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion; - } + template + void setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - bool operator<( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion; - } -#endif + template + void setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT + template + void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sampleMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setColorBlendEnableEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setColorBlendEnableEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEnables, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setColorBlendEquationEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setColorBlendEquationEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEquations, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setColorWriteMaskEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setColorWriteMaskEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteMasks, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setColorBlendAdvancedEXT( uint32_t firstAttachment, + uint32_t attachmentCount, + const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setColorBlendAdvancedEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendAdvanced, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setViewportSwizzleNV( uint32_t firstViewport, + uint32_t viewportCount, + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportSwizzleNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportSwizzles, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setCoverageToColorLocationNV( uint32_t coverageToColorLocation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setCoverageModulationTableNV( uint32_t coverageModulationTableCount, + const float * pCoverageModulationTable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & coverageModulationTable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_optical_flow === + + template + void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_maintenance5 === + + template + void bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_shader_object === + + template + void bindShadersEXT( uint32_t stageCount, + const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages, + const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shaders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + Optional depthClampRange VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cooperative_vector === + + template + void convertCooperativeVectorMatrixNV( uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV * pInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void convertCooperativeVectorMatrixNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + + template + void setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_line_rasterization === + + template + void setLineStippleKHR( uint32_t lineStippleFactor, + uint16_t lineStipplePattern, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance6 === + + template + void bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cluster_acceleration_structure === + + template + void buildClusterAccelerationStructureIndirectNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureCommandsInfoNV * pCommandInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildClusterAccelerationStructureIndirectNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureCommandsInfoNV & commandInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_partitioned_acceleration_structure === + + template + void buildPartitionedAccelerationStructuresNV( const VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV * pBuildInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildPartitionedAccelerationStructuresNV( const VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV & buildInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_device_generated_commands === + + template + void preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT { - return m_samplerYcbcrConversion; + return m_commandBuffer; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_samplerYcbcrConversion != VK_NULL_HANDLE; + return m_commandBuffer != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_samplerYcbcrConversion == VK_NULL_HANDLE; + return m_commandBuffer == VK_NULL_HANDLE; } private: - VkSamplerYcbcrConversion m_samplerYcbcrConversion = {}; + VkCommandBuffer m_commandBuffer = {}; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> - struct isVulkanHandleType + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; + }; +#endif + + template <> + struct isVulkanHandleType { static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - using SamplerYcbcrConversionKHR = SamplerYcbcrConversion; - class ShaderModule + class DeviceMemory { public: - using CType = VkShaderModule; - using NativeType = VkShaderModule; + using CType = VkDeviceMemory; + using NativeType = VkDeviceMemory; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory; public: - VULKAN_HPP_CONSTEXPR ShaderModule() = default; - VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT : m_shaderModule( shaderModule ) {} + DeviceMemory() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DeviceMemory( DeviceMemory const & rhs ) = default; + DeviceMemory & operator=( DeviceMemory const & rhs ) = default; -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - ShaderModule & operator=( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DeviceMemory( DeviceMemory && rhs ) = default; + DeviceMemory & operator=( DeviceMemory && rhs ) = default; +#else + DeviceMemory( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT : m_deviceMemory( VULKAN_HPP_NAMESPACE::exchange( rhs.m_deviceMemory, {} ) ) {} + + DeviceMemory & operator=( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT { - m_shaderModule = shaderModule; + m_deviceMemory = VULKAN_HPP_NAMESPACE::exchange( rhs.m_deviceMemory, {} ); return *this; } #endif - ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_shaderModule = {}; - return *this; - } + VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ShaderModule const & ) const = default; -#else - bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_shaderModule == rhs.m_shaderModule; - } + VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT : m_deviceMemory( deviceMemory ) {} - bool operator!=( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DeviceMemory & operator=( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT { - return m_shaderModule != rhs.m_shaderModule; + m_deviceMemory = deviceMemory; + return *this; } +#endif - bool operator<( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - return m_shaderModule < rhs.m_shaderModule; + m_deviceMemory = {}; + return *this; } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT { - return m_shaderModule; + return m_deviceMemory; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_shaderModule != VK_NULL_HANDLE; + return m_deviceMemory != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_shaderModule == VK_NULL_HANDLE; + return m_deviceMemory == VK_NULL_HANDLE; } private: - VkShaderModule m_shaderModule = {}; + VkDeviceMemory m_deviceMemory = {}; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> - struct isVulkanHandleType + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; + }; +#endif + + template <> + struct isVulkanHandleType { static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - class ValidationCacheEXT + class VideoSessionKHR { public: - using CType = VkValidationCacheEXT; - using NativeType = VkValidationCacheEXT; + using CType = VkVideoSessionKHR; + using NativeType = VkVideoSessionKHR; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - VULKAN_HPP_CONSTEXPR ValidationCacheEXT() = default; - VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT : m_validationCacheEXT( validationCacheEXT ) + VideoSessionKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + VideoSessionKHR( VideoSessionKHR const & rhs ) = default; + VideoSessionKHR & operator=( VideoSessionKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + VideoSessionKHR( VideoSessionKHR && rhs ) = default; + VideoSessionKHR & operator=( VideoSessionKHR && rhs ) = default; +#else + VideoSessionKHR( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_videoSessionKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSessionKHR, {} ) ) {} + + VideoSessionKHR & operator=( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT { + m_videoSessionKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSessionKHR, {} ); + return *this; } +#endif -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - ValidationCacheEXT & operator=( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR VideoSessionKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT VideoSessionKHR( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT : m_videoSessionKHR( videoSessionKHR ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + VideoSessionKHR & operator=( VkVideoSessionKHR videoSessionKHR ) VULKAN_HPP_NOEXCEPT { - m_validationCacheEXT = validationCacheEXT; + m_videoSessionKHR = videoSessionKHR; return *this; } #endif - ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + VideoSessionKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - m_validationCacheEXT = {}; + m_videoSessionKHR = {}; return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ValidationCacheEXT const & ) const = default; + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionKHR() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionKHR == VK_NULL_HANDLE; + } + + private: + VkVideoSessionKHR m_videoSessionKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::VideoSessionKHR; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::VideoSessionKHR; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DeferredOperationKHR + { + public: + using CType = VkDeferredOperationKHR; + using NativeType = VkDeferredOperationKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + DeferredOperationKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DeferredOperationKHR( DeferredOperationKHR const & rhs ) = default; + DeferredOperationKHR & operator=( DeferredOperationKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DeferredOperationKHR( DeferredOperationKHR && rhs ) = default; + DeferredOperationKHR & operator=( DeferredOperationKHR && rhs ) = default; #else - bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + DeferredOperationKHR( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_deferredOperationKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_deferredOperationKHR, {} ) ) + { + } + + DeferredOperationKHR & operator=( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT { - return m_validationCacheEXT == rhs.m_validationCacheEXT; + m_deferredOperationKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_deferredOperationKHR, {} ); + return *this; } +#endif - bool operator!=( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DeferredOperationKHR( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT + : m_deferredOperationKHR( deferredOperationKHR ) { - return m_validationCacheEXT != rhs.m_validationCacheEXT; } - bool operator<( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DeferredOperationKHR & operator=( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT { - return m_validationCacheEXT < rhs.m_validationCacheEXT; + m_deferredOperationKHR = deferredOperationKHR; + return *this; } #endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT + DeferredOperationKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - return m_validationCacheEXT; + m_deferredOperationKHR = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeferredOperationKHR() const VULKAN_HPP_NOEXCEPT + { + return m_deferredOperationKHR; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_validationCacheEXT != VK_NULL_HANDLE; + return m_deferredOperationKHR != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_validationCacheEXT == VK_NULL_HANDLE; + return m_deferredOperationKHR == VK_NULL_HANDLE; } private: - VkValidationCacheEXT m_validationCacheEXT = {}; + VkDeferredOperationKHR m_deferredOperationKHR = {}; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + using Type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + using Type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; }; +#endif template <> - struct isVulkanHandleType + struct isVulkanHandleType { static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - class VideoSessionParametersKHR +#if defined( VK_USE_PLATFORM_FUCHSIA ) + class BufferCollectionFUCHSIA { public: - using CType = VkVideoSessionParametersKHR; - using NativeType = VkVideoSessionParametersKHR; + using CType = VkBufferCollectionFUCHSIA; + using NativeType = VkBufferCollectionFUCHSIA; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA; public: - VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR() = default; - VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - VULKAN_HPP_TYPESAFE_EXPLICIT VideoSessionParametersKHR( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT - : m_videoSessionParametersKHR( videoSessionParametersKHR ) + BufferCollectionFUCHSIA() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + BufferCollectionFUCHSIA( BufferCollectionFUCHSIA const & rhs ) = default; + BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA const & rhs ) = default; + +# if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + BufferCollectionFUCHSIA( BufferCollectionFUCHSIA && rhs ) = default; + BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA && rhs ) = default; +# else + BufferCollectionFUCHSIA( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT + : m_bufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::exchange( rhs.m_bufferCollectionFUCHSIA, {} ) ) { } -# if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) - VideoSessionParametersKHR & operator=( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT + BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT { - m_videoSessionParametersKHR = videoSessionParametersKHR; + m_bufferCollectionFUCHSIA = VULKAN_HPP_NAMESPACE::exchange( rhs.m_bufferCollectionFUCHSIA, {} ); return *this; } # endif - VideoSessionParametersKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_videoSessionParametersKHR = {}; - return *this; - } + VULKAN_HPP_CONSTEXPR BufferCollectionFUCHSIA( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoSessionParametersKHR const & ) const = default; -# else - bool operator==( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT BufferCollectionFUCHSIA( VkBufferCollectionFUCHSIA bufferCollectionFUCHSIA ) VULKAN_HPP_NOEXCEPT + : m_bufferCollectionFUCHSIA( bufferCollectionFUCHSIA ) { - return m_videoSessionParametersKHR == rhs.m_videoSessionParametersKHR; } - bool operator!=( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +# if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + BufferCollectionFUCHSIA & operator=( VkBufferCollectionFUCHSIA bufferCollectionFUCHSIA ) VULKAN_HPP_NOEXCEPT { - return m_videoSessionParametersKHR != rhs.m_videoSessionParametersKHR; + m_bufferCollectionFUCHSIA = bufferCollectionFUCHSIA; + return *this; } +# endif - bool operator<( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + BufferCollectionFUCHSIA & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - return m_videoSessionParametersKHR < rhs.m_videoSessionParametersKHR; + m_bufferCollectionFUCHSIA = {}; + return *this; } -# endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionParametersKHR() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferCollectionFUCHSIA() const VULKAN_HPP_NOEXCEPT { - return m_videoSessionParametersKHR; + return m_bufferCollectionFUCHSIA; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_videoSessionParametersKHR != VK_NULL_HANDLE; + return m_bufferCollectionFUCHSIA != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_videoSessionParametersKHR == VK_NULL_HANDLE; + return m_bufferCollectionFUCHSIA == VK_NULL_HANDLE; } private: - VkVideoSessionParametersKHR m_videoSessionParametersKHR = {}; + VkBufferCollectionFUCHSIA m_bufferCollectionFUCHSIA = {}; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; + using Type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; }; template <> - struct isVulkanHandleType + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; + }; + +# if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; + }; +# endif + + template <> + struct isVulkanHandleType { static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - class Queue + class BufferView { public: - using CType = VkQueue; - using NativeType = VkQueue; + using CType = VkBufferView; + using NativeType = VkBufferView; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView; public: - VULKAN_HPP_CONSTEXPR Queue() = default; - VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT : m_queue( queue ) {} + BufferView() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + BufferView( BufferView const & rhs ) = default; + BufferView & operator=( BufferView const & rhs ) = default; - Queue & operator=( VkQueue queue ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + BufferView( BufferView && rhs ) = default; + BufferView & operator=( BufferView && rhs ) = default; +#else + BufferView( BufferView && rhs ) VULKAN_HPP_NOEXCEPT : m_bufferView( VULKAN_HPP_NAMESPACE::exchange( rhs.m_bufferView, {} ) ) {} + + BufferView & operator=( BufferView && rhs ) VULKAN_HPP_NOEXCEPT { - m_queue = queue; + m_bufferView = VULKAN_HPP_NAMESPACE::exchange( rhs.m_bufferView, {} ); return *this; } +#endif - Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT : m_bufferView( bufferView ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + BufferView & operator=( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT { - m_queue = {}; + m_bufferView = bufferView; return *this; } +#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Queue const & ) const = default; -#else - bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT { - return m_queue == rhs.m_queue; + m_bufferView = {}; + return *this; } - bool operator!=( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT { - return m_queue != rhs.m_queue; + return m_bufferView; } - bool operator<( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_queue < rhs.m_queue; + return m_bufferView != VK_NULL_HANDLE; } -#endif - - //=== VK_VERSION_1_0 === - template - VULKAN_HPP_NODISCARD Result submit( uint32_t submitCount, - const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, - VULKAN_HPP_NAMESPACE::Fence fence, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - submit( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, - VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_NODISCARD Result bindSparse( uint32_t bindInfoCount, - const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, - VULKAN_HPP_NAMESPACE::Fence fence, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfo, - VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_VERSION_1_3 === + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView == VK_NULL_HANDLE; + } - template - VULKAN_HPP_NODISCARD Result submit2( uint32_t submitCount, - const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, - VULKAN_HPP_NAMESPACE::Fence fence, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - submit2( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, - VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + private: + VkBufferView m_bufferView = {}; + }; - //=== VK_KHR_swapchain === + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferView; + }; - template - VULKAN_HPP_NODISCARD Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferView; + }; - //=== VK_EXT_debug_utils === +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::BufferView; + }; +#endif - template - void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; - template - void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + class CommandPool + { + public: + using CType = VkCommandPool; + using NativeType = VkCommandPool; - template - void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool; - //=== VK_NV_device_diagnostic_checkpoints === + public: + CommandPool() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + CommandPool( CommandPool const & rhs ) = default; + CommandPool & operator=( CommandPool const & rhs ) = default; - template - void getCheckpointDataNV( uint32_t * pCheckpointDataCount, - VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD std::vector - getCheckpointDataNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = CheckpointDataNVAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD std::vector - getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CommandPool( CommandPool && rhs ) = default; + CommandPool & operator=( CommandPool && rhs ) = default; +#else + CommandPool( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT : m_commandPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandPool, {} ) ) {} - //=== VK_INTEL_performance_query === + CommandPool & operator=( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT + { + m_commandPool = VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandPool, {} ); + return *this; + } +#endif -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - //=== VK_KHR_synchronization2 === + VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT : m_commandPool( commandPool ) {} - template - VULKAN_HPP_NODISCARD Result submit2KHR( uint32_t submitCount, - const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, - VULKAN_HPP_NAMESPACE::Fence fence, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, - VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + CommandPool & operator=( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT + { + m_commandPool = commandPool; + return *this; + } +#endif - template - void getCheckpointData2NV( uint32_t * pCheckpointDataCount, - VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD std::vector - getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = CheckpointData2NVAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD std::vector - getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_commandPool = {}; + return *this; + } - operator VkQueue() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT { - return m_queue; + return m_commandPool; } explicit operator bool() const VULKAN_HPP_NOEXCEPT { - return m_queue != VK_NULL_HANDLE; + return m_commandPool != VK_NULL_HANDLE; } bool operator!() const VULKAN_HPP_NOEXCEPT { - return m_queue == VK_NULL_HANDLE; + return m_commandPool == VK_NULL_HANDLE; } private: - VkQueue m_queue = {}; + VkCommandPool m_commandPool = {}; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::Queue; + using Type = VULKAN_HPP_NAMESPACE::CommandPool; }; template <> - struct CppType + struct CppType { - using Type = VULKAN_HPP_NAMESPACE::Queue; + using Type = VULKAN_HPP_NAMESPACE::CommandPool; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> - struct isVulkanHandleType + struct CppType { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + using Type = VULKAN_HPP_NAMESPACE::CommandPool; }; +#endif -#ifndef VULKAN_HPP_NO_SMART_HANDLE - class Device; - template - class UniqueHandleTraits - { - public: - using deleter = ObjectDestroy; - }; - using UniqueAccelerationStructureKHR = UniqueHandle; - template - class UniqueHandleTraits - { - public: - using deleter = ObjectDestroy; - }; - using UniqueAccelerationStructureNV = UniqueHandle; - template - class UniqueHandleTraits - { - public: - using deleter = ObjectDestroy; - }; - using UniqueBuffer = UniqueHandle; -# if defined( VK_USE_PLATFORM_FUCHSIA ) - template - class UniqueHandleTraits - { - public: - using deleter = ObjectDestroy; - }; - using UniqueBufferCollectionFUCHSIA = UniqueHandle; -# endif /*VK_USE_PLATFORM_FUCHSIA*/ - template - class UniqueHandleTraits - { - public: - using deleter = ObjectDestroy; - }; - using UniqueBufferView = UniqueHandle; - template - class UniqueHandleTraits - { - public: - using deleter = PoolFree; - }; - using UniqueCommandBuffer = UniqueHandle; - template - class UniqueHandleTraits + template <> + struct isVulkanHandleType { - public: - using deleter = ObjectDestroy; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - using UniqueCommandPool = UniqueHandle; - template - class UniqueHandleTraits + + class PipelineCache { public: - using deleter = ObjectDestroy; - }; - using UniqueCuFunctionNVX = UniqueHandle; - template - class UniqueHandleTraits - { + using CType = VkPipelineCache; + using NativeType = VkPipelineCache; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache; + public: - using deleter = ObjectDestroy; - }; - using UniqueCuModuleNVX = UniqueHandle; - template - class UniqueHandleTraits - { - public: - using deleter = ObjectDestroy; + PipelineCache() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + PipelineCache( PipelineCache const & rhs ) = default; + PipelineCache & operator=( PipelineCache const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PipelineCache( PipelineCache && rhs ) = default; + PipelineCache & operator=( PipelineCache && rhs ) = default; +#else + PipelineCache( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT : m_pipelineCache( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineCache, {} ) ) {} + + PipelineCache & operator=( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineCache, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT : m_pipelineCache( pipelineCache ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + PipelineCache & operator=( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = pipelineCache; + return *this; + } +#endif + + PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache == VK_NULL_HANDLE; + } + + private: + VkPipelineCache m_pipelineCache = {}; }; - using UniqueDeferredOperationKHR = UniqueHandle; - template - class UniqueHandleTraits + + template <> + struct CppType { - public: - using deleter = ObjectDestroy; + using Type = VULKAN_HPP_NAMESPACE::PipelineCache; }; - using UniqueDescriptorPool = UniqueHandle; - template - class UniqueHandleTraits + + template <> + struct CppType { - public: - using deleter = PoolFree; + using Type = VULKAN_HPP_NAMESPACE::PipelineCache; }; - using UniqueDescriptorSet = UniqueHandle; - template - class UniqueHandleTraits + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType { - public: - using deleter = ObjectDestroy; + using Type = VULKAN_HPP_NAMESPACE::PipelineCache; }; - using UniqueDescriptorSetLayout = UniqueHandle; - template - class UniqueHandleTraits +#endif + + template <> + struct isVulkanHandleType { - public: - using deleter = ObjectDestroy; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - using UniqueDescriptorUpdateTemplate = UniqueHandle; - using UniqueDescriptorUpdateTemplateKHR = UniqueHandle; - template - class UniqueHandleTraits + + class CuFunctionNVX { public: - using deleter = ObjectFree; + using CType = VkCuFunctionNVX; + using NativeType = VkCuFunctionNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX; + + public: + CuFunctionNVX() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + CuFunctionNVX( CuFunctionNVX const & rhs ) = default; + CuFunctionNVX & operator=( CuFunctionNVX const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CuFunctionNVX( CuFunctionNVX && rhs ) = default; + CuFunctionNVX & operator=( CuFunctionNVX && rhs ) = default; +#else + CuFunctionNVX( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT : m_cuFunctionNVX( VULKAN_HPP_NAMESPACE::exchange( rhs.m_cuFunctionNVX, {} ) ) {} + + CuFunctionNVX & operator=( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT + { + m_cuFunctionNVX = VULKAN_HPP_NAMESPACE::exchange( rhs.m_cuFunctionNVX, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR CuFunctionNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CuFunctionNVX( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT : m_cuFunctionNVX( cuFunctionNVX ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + CuFunctionNVX & operator=( VkCuFunctionNVX cuFunctionNVX ) VULKAN_HPP_NOEXCEPT + { + m_cuFunctionNVX = cuFunctionNVX; + return *this; + } +#endif + + CuFunctionNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cuFunctionNVX = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuFunctionNVX() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cuFunctionNVX == VK_NULL_HANDLE; + } + + private: + VkCuFunctionNVX m_cuFunctionNVX = {}; }; - using UniqueDeviceMemory = UniqueHandle; - template - class UniqueHandleTraits + + template <> + struct CppType { - public: - using deleter = ObjectDestroy; + using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; }; - using UniqueEvent = UniqueHandle; - template - class UniqueHandleTraits + + template <> + struct CppType { - public: - using deleter = ObjectDestroy; + using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; }; - using UniqueFence = UniqueHandle; - template - class UniqueHandleTraits + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType { - public: - using deleter = ObjectDestroy; + using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; }; - using UniqueFramebuffer = UniqueHandle; - template - class UniqueHandleTraits +#endif + + template <> + struct isVulkanHandleType { - public: - using deleter = ObjectDestroy; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - using UniqueImage = UniqueHandle; - template - class UniqueHandleTraits + + class CuModuleNVX { public: - using deleter = ObjectDestroy; + using CType = VkCuModuleNVX; + using NativeType = VkCuModuleNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX; + + public: + CuModuleNVX() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + CuModuleNVX( CuModuleNVX const & rhs ) = default; + CuModuleNVX & operator=( CuModuleNVX const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CuModuleNVX( CuModuleNVX && rhs ) = default; + CuModuleNVX & operator=( CuModuleNVX && rhs ) = default; +#else + CuModuleNVX( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT : m_cuModuleNVX( VULKAN_HPP_NAMESPACE::exchange( rhs.m_cuModuleNVX, {} ) ) {} + + CuModuleNVX & operator=( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT + { + m_cuModuleNVX = VULKAN_HPP_NAMESPACE::exchange( rhs.m_cuModuleNVX, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR CuModuleNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CuModuleNVX( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT : m_cuModuleNVX( cuModuleNVX ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + CuModuleNVX & operator=( VkCuModuleNVX cuModuleNVX ) VULKAN_HPP_NOEXCEPT + { + m_cuModuleNVX = cuModuleNVX; + return *this; + } +#endif + + CuModuleNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cuModuleNVX = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuModuleNVX() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cuModuleNVX == VK_NULL_HANDLE; + } + + private: + VkCuModuleNVX m_cuModuleNVX = {}; }; - using UniqueImageView = UniqueHandle; - template - class UniqueHandleTraits + + template <> + struct CppType { - public: - using deleter = ObjectDestroy; + using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; }; - using UniqueIndirectCommandsLayoutNV = UniqueHandle; - template - class UniqueHandleTraits + + template <> + struct CppType { - public: - using deleter = ObjectDestroy; + using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; }; - using UniquePipeline = UniqueHandle; - template - class UniqueHandleTraits + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType { - public: - using deleter = ObjectDestroy; + using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; }; - using UniquePipelineCache = UniqueHandle; - template - class UniqueHandleTraits +#endif + + template <> + struct isVulkanHandleType { - public: - using deleter = ObjectDestroy; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - using UniquePipelineLayout = UniqueHandle; - template - class UniqueHandleTraits + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + class CudaFunctionNV { public: - using deleter = ObjectDestroy; + using CType = VkCudaFunctionNV; + using NativeType = VkCudaFunctionNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaFunctionNV; + + public: + CudaFunctionNV() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + CudaFunctionNV( CudaFunctionNV const & rhs ) = default; + CudaFunctionNV & operator=( CudaFunctionNV const & rhs ) = default; + +# if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CudaFunctionNV( CudaFunctionNV && rhs ) = default; + CudaFunctionNV & operator=( CudaFunctionNV && rhs ) = default; +# else + CudaFunctionNV( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT : m_cudaFunctionNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_cudaFunctionNV, {} ) ) {} + + CudaFunctionNV & operator=( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT + { + m_cudaFunctionNV = VULKAN_HPP_NAMESPACE::exchange( rhs.m_cudaFunctionNV, {} ); + return *this; + } +# endif + + VULKAN_HPP_CONSTEXPR CudaFunctionNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CudaFunctionNV( VkCudaFunctionNV cudaFunctionNV ) VULKAN_HPP_NOEXCEPT : m_cudaFunctionNV( cudaFunctionNV ) {} + +# if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + CudaFunctionNV & operator=( VkCudaFunctionNV cudaFunctionNV ) VULKAN_HPP_NOEXCEPT + { + m_cudaFunctionNV = cudaFunctionNV; + return *this; + } +# endif + + CudaFunctionNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cudaFunctionNV = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCudaFunctionNV() const VULKAN_HPP_NOEXCEPT + { + return m_cudaFunctionNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cudaFunctionNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cudaFunctionNV == VK_NULL_HANDLE; + } + + private: + VkCudaFunctionNV m_cudaFunctionNV = {}; }; - using UniquePrivateDataSlot = UniqueHandle; - using UniquePrivateDataSlotEXT = UniqueHandle; - template - class UniqueHandleTraits + + template <> + struct CppType { - public: - using deleter = ObjectDestroy; + using Type = VULKAN_HPP_NAMESPACE::CudaFunctionNV; }; - using UniqueQueryPool = UniqueHandle; - template - class UniqueHandleTraits + + template <> + struct CppType { - public: - using deleter = ObjectDestroy; + using Type = VULKAN_HPP_NAMESPACE::CudaFunctionNV; }; - using UniqueRenderPass = UniqueHandle; - template - class UniqueHandleTraits + +# if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType { - public: - using deleter = ObjectDestroy; + using Type = VULKAN_HPP_NAMESPACE::CudaFunctionNV; }; - using UniqueSampler = UniqueHandle; - template - class UniqueHandleTraits +# endif + + template <> + struct isVulkanHandleType { - public: - using deleter = ObjectDestroy; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - using UniqueSamplerYcbcrConversion = UniqueHandle; - using UniqueSamplerYcbcrConversionKHR = UniqueHandle; - template - class UniqueHandleTraits +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + class CudaModuleNV { public: - using deleter = ObjectDestroy; + using CType = VkCudaModuleNV; + using NativeType = VkCudaModuleNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaModuleNV; + + public: + CudaModuleNV() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + CudaModuleNV( CudaModuleNV const & rhs ) = default; + CudaModuleNV & operator=( CudaModuleNV const & rhs ) = default; + +# if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CudaModuleNV( CudaModuleNV && rhs ) = default; + CudaModuleNV & operator=( CudaModuleNV && rhs ) = default; +# else + CudaModuleNV( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT : m_cudaModuleNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_cudaModuleNV, {} ) ) {} + + CudaModuleNV & operator=( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT + { + m_cudaModuleNV = VULKAN_HPP_NAMESPACE::exchange( rhs.m_cudaModuleNV, {} ); + return *this; + } +# endif + + VULKAN_HPP_CONSTEXPR CudaModuleNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CudaModuleNV( VkCudaModuleNV cudaModuleNV ) VULKAN_HPP_NOEXCEPT : m_cudaModuleNV( cudaModuleNV ) {} + +# if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + CudaModuleNV & operator=( VkCudaModuleNV cudaModuleNV ) VULKAN_HPP_NOEXCEPT + { + m_cudaModuleNV = cudaModuleNV; + return *this; + } +# endif + + CudaModuleNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_cudaModuleNV = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCudaModuleNV() const VULKAN_HPP_NOEXCEPT + { + return m_cudaModuleNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_cudaModuleNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_cudaModuleNV == VK_NULL_HANDLE; + } + + private: + VkCudaModuleNV m_cudaModuleNV = {}; }; - using UniqueSemaphore = UniqueHandle; - template - class UniqueHandleTraits + + template <> + struct CppType { - public: - using deleter = ObjectDestroy; + using Type = VULKAN_HPP_NAMESPACE::CudaModuleNV; }; - using UniqueShaderModule = UniqueHandle; - template - class UniqueHandleTraits + + template <> + struct CppType { - public: - using deleter = ObjectDestroy; + using Type = VULKAN_HPP_NAMESPACE::CudaModuleNV; }; - using UniqueSwapchainKHR = UniqueHandle; - template - class UniqueHandleTraits + +# if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType { - public: - using deleter = ObjectDestroy; + using Type = VULKAN_HPP_NAMESPACE::CudaModuleNV; }; - using UniqueValidationCacheEXT = UniqueHandle; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - template - class UniqueHandleTraits +# endif + + template <> + struct isVulkanHandleType { - public: - using deleter = ObjectDestroy; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - using UniqueVideoSessionKHR = UniqueHandle; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - template - class UniqueHandleTraits +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + class DescriptorPool { public: - using deleter = ObjectDestroy; + using CType = VkDescriptorPool; + using NativeType = VkDescriptorPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool; + + public: + DescriptorPool() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DescriptorPool( DescriptorPool const & rhs ) = default; + DescriptorPool & operator=( DescriptorPool const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DescriptorPool( DescriptorPool && rhs ) = default; + DescriptorPool & operator=( DescriptorPool && rhs ) = default; +#else + DescriptorPool( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT : m_descriptorPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ) {} + + DescriptorPool & operator=( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorPool, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT : m_descriptorPool( descriptorPool ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DescriptorPool & operator=( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = descriptorPool; + return *this; + } +#endif + + DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool == VK_NULL_HANDLE; + } + + private: + VkDescriptorPool m_descriptorPool = {}; }; - using UniqueVideoSessionParametersKHR = UniqueHandle; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ - class Device - { - public: - using CType = VkDevice; - using NativeType = VkDevice; + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DescriptorSetLayout + { + public: + using CType = VkDescriptorSetLayout; + using NativeType = VkDescriptorSetLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout; + + public: + DescriptorSetLayout() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DescriptorSetLayout( DescriptorSetLayout const & rhs ) = default; + DescriptorSetLayout & operator=( DescriptorSetLayout const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DescriptorSetLayout( DescriptorSetLayout && rhs ) = default; + DescriptorSetLayout & operator=( DescriptorSetLayout && rhs ) = default; +#else + DescriptorSetLayout( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT + : m_descriptorSetLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ) ) + { + } + + DescriptorSetLayout & operator=( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT + : m_descriptorSetLayout( descriptorSetLayout ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + DescriptorSetLayout & operator=( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = descriptorSetLayout; + return *this; + } +#endif + + DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout == VK_NULL_HANDLE; + } + + private: + VkDescriptorSetLayout m_descriptorSetLayout = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Framebuffer + { + public: + using CType = VkFramebuffer; + using NativeType = VkFramebuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer; + + public: + Framebuffer() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Framebuffer( Framebuffer const & rhs ) = default; + Framebuffer & operator=( Framebuffer const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Framebuffer( Framebuffer && rhs ) = default; + Framebuffer & operator=( Framebuffer && rhs ) = default; +#else + Framebuffer( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT : m_framebuffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_framebuffer, {} ) ) {} + + Framebuffer & operator=( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = VULKAN_HPP_NAMESPACE::exchange( rhs.m_framebuffer, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT : m_framebuffer( framebuffer ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + Framebuffer & operator=( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = framebuffer; + return *this; + } +#endif + + Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer == VK_NULL_HANDLE; + } + + private: + VkFramebuffer m_framebuffer = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Framebuffer; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class IndirectCommandsLayoutEXT + { + public: + using CType = VkIndirectCommandsLayoutEXT; + using NativeType = VkIndirectCommandsLayoutEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + IndirectCommandsLayoutEXT() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + IndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT const & rhs ) = default; + IndirectCommandsLayoutEXT & operator=( IndirectCommandsLayoutEXT const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + IndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT && rhs ) = default; + IndirectCommandsLayoutEXT & operator=( IndirectCommandsLayoutEXT && rhs ) = default; +#else + IndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayoutEXT, {} ) ) + { + } + + IndirectCommandsLayoutEXT & operator=( IndirectCommandsLayoutEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayoutEXT, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutEXT( VkIndirectCommandsLayoutEXT indirectCommandsLayoutEXT ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutEXT( indirectCommandsLayoutEXT ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + IndirectCommandsLayoutEXT & operator=( VkIndirectCommandsLayoutEXT indirectCommandsLayoutEXT ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutEXT = indirectCommandsLayoutEXT; + return *this; + } +#endif + + IndirectCommandsLayoutEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutEXT = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutEXT() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutEXT == VK_NULL_HANDLE; + } + + private: + VkIndirectCommandsLayoutEXT m_indirectCommandsLayoutEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class IndirectCommandsLayoutNV + { + public: + using CType = VkIndirectCommandsLayoutNV; + using NativeType = VkIndirectCommandsLayoutNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + IndirectCommandsLayoutNV() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + IndirectCommandsLayoutNV( IndirectCommandsLayoutNV const & rhs ) = default; + IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + IndirectCommandsLayoutNV( IndirectCommandsLayoutNV && rhs ) = default; + IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV && rhs ) = default; +#else + IndirectCommandsLayoutNV( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayoutNV, {} ) ) + { + } + + IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNV = VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayoutNV, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + IndirectCommandsLayoutNV & operator=( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNV = indirectCommandsLayoutNV; + return *this; + } +#endif + + IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNV = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNV == VK_NULL_HANDLE; + } + + private: + VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class IndirectExecutionSetEXT + { + public: + using CType = VkIndirectExecutionSetEXT; + using NativeType = VkIndirectExecutionSetEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectExecutionSetEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + IndirectExecutionSetEXT() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + IndirectExecutionSetEXT( IndirectExecutionSetEXT const & rhs ) = default; + IndirectExecutionSetEXT & operator=( IndirectExecutionSetEXT const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + IndirectExecutionSetEXT( IndirectExecutionSetEXT && rhs ) = default; + IndirectExecutionSetEXT & operator=( IndirectExecutionSetEXT && rhs ) = default; +#else + IndirectExecutionSetEXT( IndirectExecutionSetEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_indirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectExecutionSetEXT, {} ) ) + { + } + + IndirectExecutionSetEXT & operator=( IndirectExecutionSetEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_indirectExecutionSetEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectExecutionSetEXT, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR IndirectExecutionSetEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT IndirectExecutionSetEXT( VkIndirectExecutionSetEXT indirectExecutionSetEXT ) VULKAN_HPP_NOEXCEPT + : m_indirectExecutionSetEXT( indirectExecutionSetEXT ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + IndirectExecutionSetEXT & operator=( VkIndirectExecutionSetEXT indirectExecutionSetEXT ) VULKAN_HPP_NOEXCEPT + { + m_indirectExecutionSetEXT = indirectExecutionSetEXT; + return *this; + } +#endif + + IndirectExecutionSetEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_indirectExecutionSetEXT = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectExecutionSetEXT() const VULKAN_HPP_NOEXCEPT + { + return m_indirectExecutionSetEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_indirectExecutionSetEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_indirectExecutionSetEXT == VK_NULL_HANDLE; + } + + private: + VkIndirectExecutionSetEXT m_indirectExecutionSetEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class PrivateDataSlot + { + public: + using CType = VkPrivateDataSlot; + using NativeType = VkPrivateDataSlot; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + PrivateDataSlot() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + PrivateDataSlot( PrivateDataSlot const & rhs ) = default; + PrivateDataSlot & operator=( PrivateDataSlot const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PrivateDataSlot( PrivateDataSlot && rhs ) = default; + PrivateDataSlot & operator=( PrivateDataSlot && rhs ) = default; +#else + PrivateDataSlot( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT : m_privateDataSlot( VULKAN_HPP_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ) ) {} + + PrivateDataSlot & operator=( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT + { + m_privateDataSlot = VULKAN_HPP_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR PrivateDataSlot( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PrivateDataSlot( VkPrivateDataSlot privateDataSlot ) VULKAN_HPP_NOEXCEPT : m_privateDataSlot( privateDataSlot ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + PrivateDataSlot & operator=( VkPrivateDataSlot privateDataSlot ) VULKAN_HPP_NOEXCEPT + { + m_privateDataSlot = privateDataSlot; + return *this; + } +#endif + + PrivateDataSlot & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_privateDataSlot = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPrivateDataSlot() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot == VK_NULL_HANDLE; + } + + private: + VkPrivateDataSlot m_privateDataSlot = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PrivateDataSlot; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PrivateDataSlot; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + using PrivateDataSlotEXT = PrivateDataSlot; + + class RenderPass + { + public: + using CType = VkRenderPass; + using NativeType = VkRenderPass; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass; + + public: + RenderPass() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + RenderPass( RenderPass const & rhs ) = default; + RenderPass & operator=( RenderPass const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + RenderPass( RenderPass && rhs ) = default; + RenderPass & operator=( RenderPass && rhs ) = default; +#else + RenderPass( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT : m_renderPass( VULKAN_HPP_NAMESPACE::exchange( rhs.m_renderPass, {} ) ) {} + + RenderPass & operator=( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT + { + m_renderPass = VULKAN_HPP_NAMESPACE::exchange( rhs.m_renderPass, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT : m_renderPass( renderPass ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + RenderPass & operator=( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT + { + m_renderPass = renderPass; + return *this; + } +#endif + + RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_renderPass = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass == VK_NULL_HANDLE; + } + + private: + VkRenderPass m_renderPass = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::RenderPass; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::RenderPass; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Sampler + { + public: + using CType = VkSampler; + using NativeType = VkSampler; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler; + + public: + Sampler() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Sampler( Sampler const & rhs ) = default; + Sampler & operator=( Sampler const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Sampler( Sampler && rhs ) = default; + Sampler & operator=( Sampler && rhs ) = default; +#else + Sampler( Sampler && rhs ) VULKAN_HPP_NOEXCEPT : m_sampler( VULKAN_HPP_NAMESPACE::exchange( rhs.m_sampler, {} ) ) {} + + Sampler & operator=( Sampler && rhs ) VULKAN_HPP_NOEXCEPT + { + m_sampler = VULKAN_HPP_NAMESPACE::exchange( rhs.m_sampler, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT : m_sampler( sampler ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + Sampler & operator=( VkSampler sampler ) VULKAN_HPP_NOEXCEPT + { + m_sampler = sampler; + return *this; + } +#endif + + Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_sampler = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT + { + return m_sampler; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_sampler != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_sampler == VK_NULL_HANDLE; + } + + private: + VkSampler m_sampler = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Sampler; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Sampler; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Sampler; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class SamplerYcbcrConversion + { + public: + using CType = VkSamplerYcbcrConversion; + using NativeType = VkSamplerYcbcrConversion; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion; + + public: + SamplerYcbcrConversion() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + SamplerYcbcrConversion( SamplerYcbcrConversion const & rhs ) = default; + SamplerYcbcrConversion & operator=( SamplerYcbcrConversion const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + SamplerYcbcrConversion( SamplerYcbcrConversion && rhs ) = default; + SamplerYcbcrConversion & operator=( SamplerYcbcrConversion && rhs ) = default; +#else + SamplerYcbcrConversion( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT + : m_samplerYcbcrConversion( VULKAN_HPP_NAMESPACE::exchange( rhs.m_samplerYcbcrConversion, {} ) ) + { + } + + SamplerYcbcrConversion & operator=( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = VULKAN_HPP_NAMESPACE::exchange( rhs.m_samplerYcbcrConversion, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT + : m_samplerYcbcrConversion( samplerYcbcrConversion ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + SamplerYcbcrConversion & operator=( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = samplerYcbcrConversion; + return *this; + } +#endif + + SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion == VK_NULL_HANDLE; + } + + private: + VkSamplerYcbcrConversion m_samplerYcbcrConversion = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + using SamplerYcbcrConversionKHR = SamplerYcbcrConversion; + + class ShaderModule + { + public: + using CType = VkShaderModule; + using NativeType = VkShaderModule; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule; + + public: + ShaderModule() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + ShaderModule( ShaderModule const & rhs ) = default; + ShaderModule & operator=( ShaderModule const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + ShaderModule( ShaderModule && rhs ) = default; + ShaderModule & operator=( ShaderModule && rhs ) = default; +#else + ShaderModule( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT : m_shaderModule( VULKAN_HPP_NAMESPACE::exchange( rhs.m_shaderModule, {} ) ) {} + + ShaderModule & operator=( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = VULKAN_HPP_NAMESPACE::exchange( rhs.m_shaderModule, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT : m_shaderModule( shaderModule ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + ShaderModule & operator=( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = shaderModule; + return *this; + } +#endif + + ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule == VK_NULL_HANDLE; + } + + private: + VkShaderModule m_shaderModule = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ShaderModule; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class ValidationCacheEXT + { + public: + using CType = VkValidationCacheEXT; + using NativeType = VkValidationCacheEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT; + + public: + ValidationCacheEXT() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + ValidationCacheEXT( ValidationCacheEXT const & rhs ) = default; + ValidationCacheEXT & operator=( ValidationCacheEXT const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + ValidationCacheEXT( ValidationCacheEXT && rhs ) = default; + ValidationCacheEXT & operator=( ValidationCacheEXT && rhs ) = default; +#else + ValidationCacheEXT( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT : m_validationCacheEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_validationCacheEXT, {} ) ) + { + } + + ValidationCacheEXT & operator=( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_validationCacheEXT, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT : m_validationCacheEXT( validationCacheEXT ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + ValidationCacheEXT & operator=( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = validationCacheEXT; + return *this; + } +#endif + + ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT == VK_NULL_HANDLE; + } + + private: + VkValidationCacheEXT m_validationCacheEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class VideoSessionParametersKHR + { + public: + using CType = VkVideoSessionParametersKHR; + using NativeType = VkVideoSessionParametersKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + VideoSessionParametersKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + VideoSessionParametersKHR( VideoSessionParametersKHR const & rhs ) = default; + VideoSessionParametersKHR & operator=( VideoSessionParametersKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + VideoSessionParametersKHR( VideoSessionParametersKHR && rhs ) = default; + VideoSessionParametersKHR & operator=( VideoSessionParametersKHR && rhs ) = default; +#else + VideoSessionParametersKHR( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_videoSessionParametersKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSessionParametersKHR, {} ) ) + { + } + + VideoSessionParametersKHR & operator=( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionParametersKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSessionParametersKHR, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT VideoSessionParametersKHR( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT + : m_videoSessionParametersKHR( videoSessionParametersKHR ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + VideoSessionParametersKHR & operator=( VkVideoSessionParametersKHR videoSessionParametersKHR ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionParametersKHR = videoSessionParametersKHR; + return *this; + } +#endif + + VideoSessionParametersKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionParametersKHR = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionParametersKHR() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParametersKHR == VK_NULL_HANDLE; + } + + private: + VkVideoSessionParametersKHR m_videoSessionParametersKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class PipelineBinaryKHR + { + public: + using CType = VkPipelineBinaryKHR; + using NativeType = VkPipelineBinaryKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineBinaryKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + PipelineBinaryKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + PipelineBinaryKHR( PipelineBinaryKHR const & rhs ) = default; + PipelineBinaryKHR & operator=( PipelineBinaryKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PipelineBinaryKHR( PipelineBinaryKHR && rhs ) = default; + PipelineBinaryKHR & operator=( PipelineBinaryKHR && rhs ) = default; +#else + PipelineBinaryKHR( PipelineBinaryKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_pipelineBinaryKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineBinaryKHR, {} ) ) {} + + PipelineBinaryKHR & operator=( PipelineBinaryKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_pipelineBinaryKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineBinaryKHR, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR PipelineBinaryKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineBinaryKHR( VkPipelineBinaryKHR pipelineBinaryKHR ) VULKAN_HPP_NOEXCEPT : m_pipelineBinaryKHR( pipelineBinaryKHR ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + PipelineBinaryKHR & operator=( VkPipelineBinaryKHR pipelineBinaryKHR ) VULKAN_HPP_NOEXCEPT + { + m_pipelineBinaryKHR = pipelineBinaryKHR; + return *this; + } +#endif + + PipelineBinaryKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineBinaryKHR = {}; + return *this; + } + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineBinaryKHR() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineBinaryKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineBinaryKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineBinaryKHR == VK_NULL_HANDLE; + } + + private: + VkPipelineBinaryKHR m_pipelineBinaryKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineBinaryKHR; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineBinaryKHR; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Queue + { + public: + using CType = VkQueue; + using NativeType = VkQueue; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue; + + public: + Queue() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Queue( Queue const & rhs ) = default; + Queue & operator=( Queue const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Queue( Queue && rhs ) = default; + Queue & operator=( Queue && rhs ) = default; +#else + Queue( Queue && rhs ) VULKAN_HPP_NOEXCEPT : m_queue( VULKAN_HPP_NAMESPACE::exchange( rhs.m_queue, {} ) ) {} + + Queue & operator=( Queue && rhs ) VULKAN_HPP_NOEXCEPT + { + m_queue = VULKAN_HPP_NAMESPACE::exchange( rhs.m_queue, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT : m_queue( queue ) {} + + Queue & operator=( VkQueue queue ) VULKAN_HPP_NOEXCEPT + { + m_queue = queue; + return *this; + } + + Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_queue = {}; + return *this; + } + + //=== VK_VERSION_1_0 === + + template + VULKAN_HPP_NODISCARD Result submit( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + submit( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result bindSparse( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_3 === + + template + VULKAN_HPP_NODISCARD Result submit2( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + submit2( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_swapchain === + + template + VULKAN_HPP_NODISCARD Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_debug_utils === + + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_diagnostic_checkpoints === + + template + void getCheckpointDataNV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename CheckpointDataNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointDataNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename CheckpointDataNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getCheckpointData2NV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename CheckpointData2NVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename CheckpointData2NVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_INTEL_performance_query === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_synchronization2 === + + template + VULKAN_HPP_NODISCARD Result submit2KHR( uint32_t submitCount, + const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, + VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_low_latency2 === + + template + void notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV * pQueueTypeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + operator VkQueue() const VULKAN_HPP_NOEXCEPT + { + return m_queue; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_queue != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_queue == VK_NULL_HANDLE; + } + + private: + VkQueue m_queue = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Queue; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Queue; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Queue; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Device + { + public: + using CType = VkDevice; + using NativeType = VkDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDevice; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice; + + public: + Device() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Device( Device const & rhs ) = default; + Device & operator=( Device const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Device( Device && rhs ) = default; + Device & operator=( Device && rhs ) = default; +#else + Device( Device && rhs ) VULKAN_HPP_NOEXCEPT : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) {} + + Device & operator=( Device && rhs ) VULKAN_HPP_NOEXCEPT + { + m_device = VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + Device( VkDevice device ) VULKAN_HPP_NOEXCEPT : m_device( device ) {} + + Device & operator=( VkDevice device ) VULKAN_HPP_NOEXCEPT + { + m_device = device; + return *this; + } + + Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_device = {}; + return *this; + } + + //=== VK_VERSION_1_0 === + + template + PFN_vkVoidFunction getProcAddr( const char * pName, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PFN_VoidFunction getProcAddr( const std::string & name, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getQueue( uint32_t queueFamilyIndex, + uint32_t queueIndex, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue + getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void( free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void( free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, + void ** ppData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result flushMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, + const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize + getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements + getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements + getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, + SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createFenceUnique( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result resetFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result waitForFences( uint32_t fenceCount, + const VULKAN_HPP_NAMESPACE::Fence * pFences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, + VULKAN_HPP_NAMESPACE::Bool32 waitAll, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Event * pEvent, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createEventUnique( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_NODISCARD Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDevice; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice; + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - public: - VULKAN_HPP_CONSTEXPR Device() = default; - VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} - Device( VkDevice device ) VULKAN_HPP_NOEXCEPT : m_device( device ) {} + template + VULKAN_HPP_NODISCARD Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - Device & operator=( VkDevice device ) VULKAN_HPP_NOEXCEPT - { - m_device = device; - return *this; - } + template + VULKAN_HPP_NODISCARD Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Buffer * pBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createBufferUnique( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT - { - m_device = {}; - return *this; - } + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Device const & ) const = default; -#else - bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_device == rhs.m_device; - } + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator!=( Device const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_device != rhs.m_device; - } + template + VULKAN_HPP_NODISCARD Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferView * pView, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - bool operator<( Device const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_device < rhs.m_device; - } -#endif + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_VERSION_1_0 === + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - PFN_vkVoidFunction getProcAddr( const char * pName, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Image * pImage, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createImageUnique( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + void destroyImage( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + void destroy( VULKAN_HPP_NAMESPACE::Image image, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getQueue( uint32_t queueFamilyIndex, - uint32_t queueIndex, - VULKAN_HPP_NAMESPACE::Queue * pQueue, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue - getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout + getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else + VULKAN_HPP_NODISCARD Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ImageView * pView, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void( free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void( free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::DeviceSize size, - VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, - void ** ppData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::DeviceSize size, - VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - + VULKAN_HPP_NODISCARD typename ResultValueType::type + createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result flushMappedMemoryRanges( uint32_t memoryRangeCount, - const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, - const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, - VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize - getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceMemory memory, - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceMemory memory, - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, - VULKAN_HPP_NAMESPACE::DeviceMemory memory, - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, - VULKAN_HPP_NAMESPACE::DeviceMemory memory, - VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements - getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, - VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements - getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - + VULKAN_HPP_NODISCARD ResultValue + createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - void getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, - uint32_t * pSparseMemoryRequirementCount, - VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD std::vector - getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = SparseImageMemoryRequirementsAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD std::vector - getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, - SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD ResultValue> + createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::Fence * pFence, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD ResultValue + createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createFenceUnique( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD ResultValue> + createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::Fence fence, + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::Fence fence, + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result resetFences( uint32_t fenceCount, - const VULKAN_HPP_NAMESPACE::Fence * pFences, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - typename ResultValueType::type resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else + void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result waitForFences( uint32_t fenceCount, - const VULKAN_HPP_NAMESPACE::Fence * pFences, - VULKAN_HPP_NAMESPACE::Bool32 waitAll, - uint64_t timeout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, - VULKAN_HPP_NAMESPACE::Bool32 waitAll, - uint64_t timeout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Sampler * pSampler, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSamplerUnique( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::Event * pEvent, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createEventUnique( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyEvent( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::Event event, + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::Event event, + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else + VULKAN_HPP_NODISCARD Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result setEvent( VULKAN_HPP_NAMESPACE::Event event, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else + void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type setEvent( VULKAN_HPP_NAMESPACE::Event event, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else template - typename ResultValueType::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + void resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_NODISCARD Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename DescriptorSetAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, DescriptorSetAllocator>>::type + allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename DescriptorSetAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, DescriptorSetAllocator>>::type + allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, + DescriptorSetAllocator & descriptorSetAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + Result( free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void( free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - size_t dataSize, - void * pData, - VULKAN_HPP_NAMESPACE::DeviceSize stride, - VULKAN_HPP_NAMESPACE::QueryResultFlags flags, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void updateDescriptorSets( uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD ResultValue> - getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - size_t dataSize, - VULKAN_HPP_NAMESPACE::DeviceSize stride, - VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template - VULKAN_HPP_NODISCARD ResultValue getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - VULKAN_HPP_NAMESPACE::DeviceSize stride, - VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorCopies, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::Buffer * pBuffer, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD typename ResultValueType::type + createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createBufferUnique( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, + VULKAN_HPP_NODISCARD Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::BufferView * pView, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, + VULKAN_HPP_NODISCARD typename ResultValueType::type + createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::Image * pImage, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createImageUnique( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, + VULKAN_HPP_NODISCARD Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyImage( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyImage( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::Image image, + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::Image image, + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, - VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename CommandBufferAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, CommandBufferAllocator>>::type + allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename CommandBufferAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, CommandBufferAllocator>>::type + allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, + CommandBufferAllocator & commandBufferAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout - getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void( free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + uint32_t commandBufferCount, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void( free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_1 === + + template + VULKAN_HPP_NODISCARD Result bindBufferMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result bindImageMemory2( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + getGroupPeerMemoryFeatures( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::ImageView * pView, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template - void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, + VULKAN_HPP_NAMESPACE::Queue * pQueue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - size_t * pDataSize, - void * pData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = Uint8_tAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - Uint8_tAllocator & uint8_tAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, - uint32_t srcCacheCount, - const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - VULKAN_HPP_NODISCARD Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - uint32_t createInfoCount, - const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD ResultValue> - createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B0 = PipelineAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD ResultValue> - createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - PipelineAllocator & pipelineAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template - VULKAN_HPP_NODISCARD ResultValue - createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> - VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> - createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template >, - typename B0 = PipelineAllocator, - typename std::enable_if>::value, int>::type = 0> - VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> - createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - PipelineAllocator & pipelineAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template - VULKAN_HPP_NODISCARD ResultValue> - createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_VERSION_1_2 === template - VULKAN_HPP_NODISCARD Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - uint32_t createInfoCount, - const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD ResultValue> - createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B0 = PipelineAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD ResultValue> - createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - PipelineAllocator & pipelineAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD ResultValue - createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD typename ResultValueType::type + createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> - VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> - createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template >, - typename B0 = PipelineAllocator, - typename std::enable_if>::value, int>::type = 0> - VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> - createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - PipelineAllocator & pipelineAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD ResultValue> - createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD Result getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_3 === template - VULKAN_HPP_NODISCARD Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::Sampler * pSampler, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD typename ResultValueType::type + createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createSamplerUnique( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, + void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_NODISCARD Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD uint64_t getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VERSION_1_4 === + + template + VULKAN_HPP_NODISCARD Result mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, + void ** ppData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE + VULKAN_HPP_NODISCARD typename ResultValueType::type mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD Result unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, - VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else + void getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, - VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, - VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B0 = DescriptorSetAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, - DescriptorSetAllocator & descriptorSetAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> - VULKAN_HPP_NODISCARD typename ResultValueType, DescriptorSetAllocator>>::type - allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template >, - typename B0 = DescriptorSetAllocator, - typename std::enable_if>::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType, DescriptorSetAllocator>>::type - allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, - DescriptorSetAllocator & descriptorSetAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, - uint32_t descriptorSetCount, - const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, - VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - Result( free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, - uint32_t descriptorSetCount, - const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void( free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, - VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void updateDescriptorSets( uint32_t descriptorWriteCount, - const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, - uint32_t descriptorCopyCount, - const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result transitionImageLayout( uint32_t transitionCount, + const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, - VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorCopies, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + transitionImageLayout( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_swapchain === template - VULKAN_HPP_NODISCARD Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSwapchainImagesKHR( + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore, + VULKAN_HPP_NAMESPACE::Fence fence, + uint32_t * pImageIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD ResultValue acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getGroupPresentCapabilitiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, - VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D - getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, + uint32_t * pImageIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD ResultValue acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_display_swapchain === template - void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename SwapchainKHRAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, SwapchainKHRAllocator>>::type + createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename SwapchainKHRAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, SwapchainKHRAllocator>>::type + createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + SwapchainKHRAllocator & swapchainKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_debug_marker === template - void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, - VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else + VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - typename ResultValueType::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, - VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_queue === template - VULKAN_HPP_NODISCARD Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, - VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B0 = CommandBufferAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, - CommandBufferAllocator & commandBufferAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> - VULKAN_HPP_NODISCARD typename ResultValueType, CommandBufferAllocator>>::type - allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template >, - typename B0 = CommandBufferAllocator, - typename std::enable_if>::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType, CommandBufferAllocator>>::type - allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, - CommandBufferAllocator & commandBufferAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, - uint32_t commandBufferCount, - const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, - VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void( free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, - uint32_t commandBufferCount, - const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void( free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, - VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_VERSION_1_1 === + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result bindBufferMemory2( uint32_t bindInfoCount, - const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t * pMemoryRequirementsCount, + VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result bindImageMemory2( uint32_t bindInfoCount, - const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + uint32_t bindSessionMemoryInfoCount, + const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getGroupPeerMemoryFeatures( uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags - getGroupPeerMemoryFeatures( uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, - VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 - getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - VULKAN_HPP_NODISCARD StructureChain - getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, - VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 - getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - VULKAN_HPP_NODISCARD StructureChain - getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD std::vector - getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = SparseImageMemoryRequirements2Allocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD std::vector - getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, - SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, - VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NVX_binary_import === template - void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, - VULKAN_HPP_NAMESPACE::Queue * pQueue, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD typename ResultValueType::type + createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NVX_image_view_handle === template - void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - const void * pData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - DataType const & data, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + uint64_t getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport - getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + uint64_t getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - VULKAN_HPP_NODISCARD StructureChain - getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_shader_info === + + template + VULKAN_HPP_NODISCARD Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + template + VULKAN_HPP_NODISCARD Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_VERSION_1_2 === + //=== VK_KHR_device_group === template - VULKAN_HPP_NODISCARD Result createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_maintenance1 === + template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, + VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === template - void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, - uint64_t * pValue, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type getMemoryWin32HandlePropertiesKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === template - VULKAN_HPP_NODISCARD Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, - uint64_t timeout, + VULKAN_HPP_NODISCARD Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, + int * pFd, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, - uint64_t timeout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + int fd, + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getMemoryFdPropertiesKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + template + VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === template - uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_VERSION_1_3 === + //=== VK_KHR_descriptor_update_template === template - VULKAN_HPP_NODISCARD Result createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, - uint64_t objectHandle, - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - uint64_t data, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else - template - typename ResultValueType::type setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, - uint64_t objectHandle, - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - uint64_t data, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_EXT_display_control === template - void getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, - uint64_t objectHandle, - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - uint64_t * pData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD uint64_t getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, - uint64_t objectHandle, - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + typename ResultValueType::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, - VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 - getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - VULKAN_HPP_NODISCARD StructureChain - getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, - VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Fence * pFence, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 - getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - VULKAN_HPP_NODISCARD StructureChain - getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD std::vector - getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = SparseImageMemoryRequirements2Allocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD std::vector - getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, - SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_swapchain === + //=== VK_GOOGLE_display_timing === template - VULKAN_HPP_NODISCARD Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE + VULKAN_HPP_NODISCARD typename ResultValueType::type + getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_hdr_metadata === template - void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void setHdrMetadataEXT( uint32_t swapchainCount, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, + const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & swapchains, + VULKAN_HPP_NAMESPACE::ArrayProxy const & metadata, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_create_renderpass2 === template - void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_shared_presentable_image === +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - uint32_t * pSwapchainImageCount, - VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, + VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = ImageAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getSwapchainImagesKHR( - VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + template - VULKAN_HPP_NODISCARD Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - uint64_t timeout, - VULKAN_HPP_NAMESPACE::Semaphore semaphore, - VULKAN_HPP_NAMESPACE::Fence fence, - uint32_t * pImageIndex, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD ResultValue acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - uint64_t timeout, - VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getGroupPresentCapabilitiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === template - VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, - uint32_t * pImageIndex, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, + int * pFd, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD ResultValue acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_display_swapchain === + //=== VK_KHR_performance_query === template - VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR( uint32_t swapchainCount, - const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B0 = SwapchainKHRAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - SwapchainKHRAllocator & swapchainKHRAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> - VULKAN_HPP_NODISCARD typename ResultValueType, SwapchainKHRAllocator>>::type - createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template >, - typename B0 = SwapchainKHRAllocator, - typename std::enable_if>::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType, SwapchainKHRAllocator>>::type - createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - SwapchainKHRAllocator & swapchainKHRAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void releaseProfilingLockKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - //=== VK_EXT_debug_marker === + //=== VK_EXT_debug_utils === template - VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, + VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, + setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_queue === + setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - VULKAN_HPP_NODISCARD Result createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === template - void destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer, + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_NODISCARD Result getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, - uint32_t * pMemoryRequirementsCount, - VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD - typename ResultValueType>::type - getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = VideoSessionMemoryRequirementsKHRAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD - typename ResultValueType>::type - getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, - VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - template - VULKAN_HPP_NODISCARD Result bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, - uint32_t bindSessionMemoryInfoCount, - const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, - VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === template - VULKAN_HPP_NODISCARD Result createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD ResultValue + createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createExecutionGraphPipelinesAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createExecutionGraphPipelinesAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - VULKAN_HPP_NODISCARD Result updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, - const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, - const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD ResultValue> createExecutionGraphPipelineAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result + getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, + uint32_t * pNodeIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_NVX_binary_import === + //=== VK_KHR_get_memory_requirements2 === template - VULKAN_HPP_NODISCARD Result createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_acceleration_structure === template - void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_NVX_image_view_handle === +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, - VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_AMD_shader_info === + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, - VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, - size_t * pInfoSize, - void * pInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, - VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = Uint8_tAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, - VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, - Uint8_tAllocator & uint8_tAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_external_memory_win32 === - - template - VULKAN_HPP_NODISCARD Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, - HANDLE * pHandle, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_device_group === + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags - getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_KHR_maintenance1 === - - template - void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, - VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_memory_win32 === - - template - VULKAN_HPP_NODISCARD Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, - HANDLE * pHandle, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, - HANDLE handle, - VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD typename ResultValueType::type getMemoryWin32HandlePropertiesKHR( - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_memory_fd === + VULKAN_HPP_NODISCARD Result + writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, + const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType::type writeAccelerationStructuresPropertyKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, - int * pFd, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + DeviceAddress getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NAMESPACE::DeviceAddress + getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, - int fd, - VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type getMemoryFdPropertiesKHR( - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_semaphore_win32 === + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + void getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, + VULKAN_HPP_NAMESPACE::ArrayProxy const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_ray_tracing_pipeline === template - VULKAN_HPP_NODISCARD Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, - HANDLE * pHandle, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_semaphore_fd === + VULKAN_HPP_NODISCARD ResultValue + createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getRayTracingShaderGroupHandleKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, - int * pFd, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result + getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getRayTracingCaptureReplayShaderGroupHandleKHR( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template - VULKAN_HPP_NODISCARD typename ResultValueType::type getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + DeviceSize getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t group, + VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - //=== VK_KHR_descriptor_update_template === + //=== VK_KHR_sampler_ycbcr_conversion === template - VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD typename ResultValueType::type + createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, - Optional allocator - VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_bind_memory2 === template - void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - const void * pData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result bindBufferMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - DataType const & data, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_EXT_display_control === + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, - const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result bindImageMemory2KHR( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - typename ResultValueType::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, - const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_image_drm_format_modifier === template - VULKAN_HPP_NODISCARD Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::Fence * pFence, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - VULKAN_HPP_NODISCARD Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, - const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::Fence * pFence, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_validation_cache === + + template + VULKAN_HPP_NODISCARD Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, - const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD typename ResultValueType::type + createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, - const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, - uint64_t * pCounterValue, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_GOOGLE_display_timing === + void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - uint32_t * pPresentationTimingCount, - VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = PastPresentationTimingGOOGLEAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_EXT_hdr_metadata === + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void setHdrMetadataEXT( uint32_t swapchainCount, - const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, - const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - void setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & swapchains, - VULKAN_HPP_NAMESPACE::ArrayProxy const & metadata, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_create_renderpass2 === + //=== VK_NV_ray_tracing === template - VULKAN_HPP_NODISCARD Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD typename ResultValueType::type + createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_shared_presentable_image === + template + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_fence_win32 === + template + void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR + getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, + const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + bindAccelerationStructureMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, - HANDLE * pHandle, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_fence_fd === + VULKAN_HPP_NODISCARD ResultValue + createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> + createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType::type getRayTracingShaderGroupHandleNV( + VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getAccelerationStructureHandleNV( + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, - int * pFd, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t shader, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else template - VULKAN_HPP_NODISCARD typename ResultValueType::type getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - //=== VK_KHR_performance_query === + //=== VK_KHR_maintenance3 === template - VULKAN_HPP_NODISCARD Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - void releaseProfilingLockKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_debug_utils === + //=== VK_EXT_external_memory_host === template - VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_calibrated_timestamps === template - VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT( uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_ANDROID_external_memory_android_hardware_buffer === + //=== VK_KHR_timeline_semaphore === template - VULKAN_HPP_NODISCARD Result - getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer, - VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, + uint64_t * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo, - struct AHardwareBuffer ** pBuffer, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - //=== VK_KHR_get_memory_requirements2 === + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, - VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 - getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - VULKAN_HPP_NODISCARD StructureChain - getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_INTEL_performance_query === template - void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, - VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 - getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - VULKAN_HPP_NODISCARD StructureChain - getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD std::vector - getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = SparseImageMemoryRequirements2Allocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD std::vector - getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, - SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_KHR_acceleration_structure === + void uninitializePerformanceApiINTEL( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD typename ResultValueType::type + acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else template - void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else template - void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_NODISCARD Result buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - uint32_t infoCount, - const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos, - const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result buildAccelerationStructuresKHR( - VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, - VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_display_native_hdr === template - VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_buffer_device_address === + + template + DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_present_wait === + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# else template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result - copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# else template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result - copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template - VULKAN_HPP_NODISCARD Result - writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, - const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - size_t dataSize, - void * pData, - size_t stride, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType>::type writeAccelerationStructuresPropertiesKHR( - VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - size_t dataSize, - size_t stride, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template - VULKAN_HPP_NODISCARD typename ResultValueType::type writeAccelerationStructuresPropertyKHR( - VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - size_t stride, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_buffer_device_address === template - DeviceAddress getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NAMESPACE::DeviceAddress - getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo, - VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR - getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, - const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo, - const uint32_t * pMaxPrimitiveCounts, - VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR( - VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, - const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, - VULKAN_HPP_NAMESPACE::ArrayProxy const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_sampler_ycbcr_conversion === + //=== VK_EXT_host_query_reset === template - VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_deferred_host_operations === + + template + VULKAN_HPP_NODISCARD Result createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD typename ResultValueType::type + createDeferredOperationKHR( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createDeferredOperationKHRUnique( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_KHR_bind_memory2 === + void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result bindBufferMemory2KHR( uint32_t bindInfoCount, - const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result bindImageMemory2KHR( uint32_t bindInfoCount, - const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - //=== VK_EXT_image_drm_format_modifier === + //=== VK_KHR_pipeline_executable_properties === template - VULKAN_HPP_NODISCARD Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, - VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, + PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_validation_cache === + template + VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result + getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getPipelineExecutableInternalRepresentationsKHR( + const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, + PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_host_image_copy === + template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE + VULKAN_HPP_NODISCARD Result copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, - uint32_t srcCacheCount, - const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches, + VULKAN_HPP_NODISCARD Result transitionImageLayoutEXT( uint32_t transitionCount, + const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches, + transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, - size_t * pDataSize, - void * pData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = Uint8_tAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, - Uint8_tAllocator & uint8_tAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_NV_ray_tracing === + //=== VK_KHR_map_memory2 === template - VULKAN_HPP_NODISCARD Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, + void ** ppData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + typename ResultValueType::type unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_swapchain_maintenance1 === template - void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + typename ResultValueType::type releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_generated_commands === template - void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo, - VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR - getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain - getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, - const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - bindAccelerationStructureMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - uint32_t createInfoCount, - const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD ResultValue> - createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B0 = PipelineAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD ResultValue> - createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - PipelineAllocator & pipelineAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD ResultValue - createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> - VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> - createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template >, - typename B0 = PipelineAllocator, - typename std::enable_if>::value, int>::type = 0> - VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> - createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - PipelineAllocator & pipelineAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template - VULKAN_HPP_NODISCARD ResultValue> - createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_private_data === template - VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - uint32_t firstGroup, - uint32_t groupCount, - size_t dataSize, - void * pData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - uint32_t firstGroup, - uint32_t groupCount, - size_t dataSize, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template - VULKAN_HPP_NODISCARD typename ResultValueType::type getRayTracingShaderGroupHandleNV( - VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, - size_t dataSize, - void * pData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType>::type getAccelerationStructureHandleNV( - VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - uint32_t shader, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #else template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + typename ResultValueType::type setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - //=== VK_KHR_maintenance3 === + template + void getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_encode_queue === template - void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, + VULKAN_HPP_NODISCARD Result + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, + size_t * pDataSize, + void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport - getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - VULKAN_HPP_NODISCARD StructureChain - getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>>::type + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>>::type + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, std::vector>>::type + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, std::vector>>::type + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_external_memory_host === +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === template - VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, - const void * pHostPointer, - VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CudaModuleNV * pModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, - const void * pHostPointer, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + createCudaModuleNV( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createCudaModuleNVUnique( const VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_calibrated_timestamps === + template + VULKAN_HPP_NODISCARD Result getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + size_t * pCacheSize, + void * pCacheData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getCudaModuleCacheNV( + VULKAN_HPP_NAMESPACE::CudaModuleNV module, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT( uint32_t timestampCount, - const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos, - uint64_t * pTimestamps, - uint64_t * pMaxDeviation, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type - getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B0 = Uint64_tAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type - getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, - Uint64_tAllocator & uint64_tAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD Result createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::CudaFunctionNV * pFunction, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + createCudaFunctionNV( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createCudaFunctionNVUnique( const VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_timeline_semaphore === + template + void destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, - uint64_t * pValue, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + void destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroy( VULKAN_HPP_NAMESPACE::CudaModuleNV module, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, - uint64_t timeout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + void destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, - uint64_t timeout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + void destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroy( VULKAN_HPP_NAMESPACE::CudaFunctionNV function, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_INTEL_performance_query === +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === template - VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + void exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT + exportMetalObjectsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + exportMetalObjectsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - template - void uninitializePerformanceApiINTEL( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_descriptor_buffer === template - VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, - VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize + getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else + void getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + uint32_t binding, + VULKAN_HPP_NAMESPACE::DeviceSize * pOffset, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize + getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, + uint32_t binding, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else + void getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo, + size_t dataSize, + void * pDescriptor, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, + size_t dataSize, + void * pDescriptor, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD DescriptorType getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, - VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result + getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_AMD_display_native_hdr === + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, - VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_buffer_device_address === + template + VULKAN_HPP_NODISCARD Result + getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result + getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template - VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD Result + getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_present_wait === + //=== VK_EXT_device_fault === -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - uint64_t presentId, - uint64_t timeout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - uint64_t presentId, - uint64_t timeout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD Result getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, + VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === + template + VULKAN_HPP_NODISCARD Result getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# else + VULKAN_HPP_NODISCARD Result + getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === -# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# else + VULKAN_HPP_NODISCARD Result + importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, + VULKAN_HPP_NODISCARD Result getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_KHR_buffer_device_address === +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === template - DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NAMESPACE::DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - + VULKAN_HPP_NODISCARD Result createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD typename ResultValueType::type + createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE template - uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_EXT_host_query_reset === - + VULKAN_HPP_NODISCARD Result + setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - - //=== VK_KHR_deferred_host_operations === + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createDeferredOperationKHR( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE + VULKAN_HPP_NODISCARD Result + setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createDeferredOperationKHRUnique( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + void destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + void destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, + void destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - template - uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else - template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result - getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else + VULKAN_HPP_NODISCARD Result getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_KHR_pipeline_executable_properties === + //=== VK_HUAWEI_subpass_shading === template - VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo, - uint32_t * pExecutableCount, - VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result + getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, + VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD - typename ResultValueType>::type - getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = PipelineExecutablePropertiesKHRAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD - typename ResultValueType>::type - getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, - PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template - VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, - uint32_t * pStatisticCount, - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD - typename ResultValueType>::type - getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = PipelineExecutableStatisticKHRAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD - typename ResultValueType>::type - getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, - PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_external_memory_rdma === template - VULKAN_HPP_NODISCARD Result - getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo, - uint32_t * pInternalRepresentationCount, - VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, + VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType< - std::vector>::type - getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = PipelineExecutableInternalRepresentationKHRAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType< - std::vector>::type - getPipelineExecutableInternalRepresentationsKHR( - const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, - PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_NV_device_generated_commands === + //=== VK_EXT_pipeline_properties === template - void getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo, - VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo, + VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 - getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - VULKAN_HPP_NODISCARD StructureChain - getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_opacity_micromap === template - VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + void destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, + void destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_private_data === + template + VULKAN_HPP_NODISCARD Result buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, - uint64_t objectHandle, - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - uint64_t data, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#else + VULKAN_HPP_NODISCARD Result writeMicromapsPropertiesEXT( uint32_t micromapCount, + const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + void * pData, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template - typename ResultValueType::type setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, - uint64_t objectHandle, - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - uint64_t data, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, - uint64_t objectHandle, - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - uint64_t * pData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo, + VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, - uint64_t objectHandle, - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT + getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === + //=== VK_EXT_pageable_device_local_memory === template - void exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + void setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, + float priority, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance4 === + template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT - exportMetalObjectsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain - exportMetalObjectsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_EXT_image_compression_control === + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource, - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT - getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain - getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_ray_tracing_pipeline === + template + void getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, + SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_VALVE_descriptor_set_host_mapping === template - VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - uint32_t createInfoCount, - const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference, + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD ResultValue> - createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B0 = PipelineAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD ResultValue> - createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - PipelineAllocator & pipelineAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD ResultValue - createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> - VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> - createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template >, - typename B0 = PipelineAllocator, - typename std::enable_if>::value, int>::type = 0> - VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> - createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - Optional allocator, - PipelineAllocator & pipelineAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE + getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template - VULKAN_HPP_NODISCARD ResultValue> - createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, - const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + void ** ppData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD void * getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_device_generated_commands_compute === template - VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - uint32_t firstGroup, - uint32_t groupCount, - size_t dataSize, - void * pData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - uint32_t firstGroup, - uint32_t groupCount, - size_t dataSize, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template - VULKAN_HPP_NODISCARD typename ResultValueType::type getRayTracingShaderGroupHandleKHR( - VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result - getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - uint32_t firstGroup, - uint32_t groupCount, - size_t dataSize, - void * pData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + DeviceAddress getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - uint32_t firstGroup, - uint32_t groupCount, - size_t dataSize, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template - VULKAN_HPP_NODISCARD typename ResultValueType::type getRayTracingCaptureReplayShaderGroupHandleKHR( - VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + VULKAN_HPP_NAMESPACE::DeviceAddress getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_shader_module_identifier === + + template + void getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT + getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - DeviceSize getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, - uint32_t group, - VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT + getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_memory === + //=== VK_NV_optical_flow === template - VULKAN_HPP_NODISCARD Result getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, - zx_handle_t * pZirconHandle, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result - getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, - zx_handle_t zirconHandle, - VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + void destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, - zx_handle_t zirconHandle, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_FUCHSIA*/ + void destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_semaphore === + template + void destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD Result - importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#else template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + //=== VK_KHR_maintenance5 === template - VULKAN_HPP_NODISCARD Result getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, - zx_handle_t * pZirconHandle, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + void getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_FUCHSIA*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === + template + void getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + void getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# ifndef VULKAN_HPP_NO_SMART_HANDLE + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_AMD_anti_lag === + template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo, + void antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_EXT_shader_object === + + template + VULKAN_HPP_NODISCARD Result createShadersEXT( uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue + createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, ShaderEXTAllocator>> + createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename std::enable_if>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, ShaderEXTAllocator>> + createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + ShaderEXTAllocator & shaderEXTAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> + createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result - setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, - const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + void destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, - const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result - setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, - const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + void destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, - const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, + size_t * pDataSize, + void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getShaderBinaryDataEXT( + VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_pipeline_binary === + template - void destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, - Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD Result createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR * pBinaries, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename PipelineBinaryKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename PipelineBinaryKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >, + typename std::enable_if< + std::is_same>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineBinaryKHRAllocator>> + createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename std::enable_if< + std::is_same>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineBinaryKHRAllocator>> + createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + void destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, + void destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, - VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_NODISCARD Result getPipelineKeyKHR( const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo, + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineKey, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_USE_PLATFORM_FUCHSIA*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + getPipelineKeyKHR( Optional pipelineCreateInfo VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_HUAWEI_subpass_shading === + template + VULKAN_HPP_NODISCARD Result getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR * pInfo, + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKey, + size_t * pPipelineBinaryDataSize, + void * pPipelineBinaryData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>>::type + getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>>::type + getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + Result releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR * pInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_QCOM_tile_properties === template - VULKAN_HPP_NODISCARD Result - getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, - VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + uint32_t * pPropertiesCount, + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename TilePropertiesQCOMAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename TilePropertiesQCOMAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, + TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + Result getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD ResultValue - getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM + getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_NV_external_memory_rdma === + //=== VK_NV_cooperative_vector === + + template + VULKAN_HPP_NODISCARD Result convertCooperativeVectorMatrixNV( const VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result convertCooperativeVectorMatrixNV( const VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_low_latency2 === + + template + VULKAN_HPP_NODISCARD Result setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV * pSleepModeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + Result latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV * pSleepInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV * pLatencyMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename LatencyTimingsFrameReportNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename LatencyTimingsFrameReportNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + LatencyTimingsFrameReportNVAllocator & latencyTimingsFrameReportNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + + template + VULKAN_HPP_NODISCARD Result getScreenBufferPropertiesQNX( const struct _screen_buffer * buffer, + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_calibrated_timestamps === template - VULKAN_HPP_NODISCARD Result getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, - VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result getCalibratedTimestampsKHR( uint32_t timestampCount, + const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type + getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, + Uint64_tAllocator & uint64_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_pipeline_properties === + //=== VK_NV_cluster_acceleration_structure === template - VULKAN_HPP_NODISCARD Result getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo, - VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getClusterAccelerationStructureBuildSizesNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV * pInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type - getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + getClusterAccelerationStructureBuildSizesNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_pageable_device_local_memory === + //=== VK_NV_partitioned_acceleration_structure === template - void setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, - float priority, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getPartitionedAccelerationStructuresBuildSizesNV( const VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV * pInfo, + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + getPartitionedAccelerationStructuresBuildSizesNV( const VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_KHR_maintenance4 === + //=== VK_EXT_device_generated_commands === template - void getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, - VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 - getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain - getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, - VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT * pIndirectCommandsLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 - getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - VULKAN_HPP_NODISCARD StructureChain - getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD typename ResultValueType::type + createIndirectCommandsLayoutEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createIndirectCommandsLayoutEXTUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD std::vector - getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = SparseImageMemoryRequirements2Allocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD std::vector - getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, - SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_VALVE_descriptor_set_host_mapping === + template + void destroyIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference, - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE - getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, - void ** ppData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result createIndirectExecutionSetEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT * pIndirectExecutionSet, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD void * getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_EXT_shader_module_identifier === + VULKAN_HPP_NODISCARD typename ResultValueType::type + createIndirectExecutionSetEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createIndirectExecutionSetEXTUnique( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, - VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroyIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT - getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void destroyIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, - VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void destroy( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT - getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - //=== VK_QCOM_tile_properties === + void destroy( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, - uint32_t * pPropertiesCount, - VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void updateIndirectExecutionSetPipelineEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT * pExecutionSetWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = TilePropertiesQCOMAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, - TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template + void updateIndirectExecutionSetPipelineEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - Result getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, - VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void updateIndirectExecutionSetShaderEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT * pExecutionSetWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM - getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + void updateIndirectExecutionSetShaderEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + + template + VULKAN_HPP_NODISCARD Result getMemoryMetalHandleEXT( const VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT * pGetMetalHandleInfo, + void ** pHandle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryMetalHandleEXT( const VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT & getMetalHandleInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getMemoryMetalHandlePropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + const void * pHandle, + VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT * pMemoryMetalHandleProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getMemoryMetalHandlePropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, + HandleType const & handle, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ operator VkDevice() const VULKAN_HPP_NOEXCEPT { @@ -11391,6 +15448,14 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::Device; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Device; + }; +#endif + template <> struct isVulkanHandleType { @@ -11408,11 +15473,28 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR; public: - VULKAN_HPP_CONSTEXPR DisplayModeKHR() = default; - VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + DisplayModeKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DisplayModeKHR( DisplayModeKHR const & rhs ) = default; + DisplayModeKHR & operator=( DisplayModeKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DisplayModeKHR( DisplayModeKHR && rhs ) = default; + DisplayModeKHR & operator=( DisplayModeKHR && rhs ) = default; +#else + DisplayModeKHR( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_displayModeKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ) ) {} + + DisplayModeKHR & operator=( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_displayModeKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT : m_displayModeKHR( displayModeKHR ) {} -#if defined( VULKAN_HPP_TYPESAFE_CONVERSION ) +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) DisplayModeKHR & operator=( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT { m_displayModeKHR = displayModeKHR; @@ -11426,25 +15508,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayModeKHR const & ) const = default; -#else - bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR == rhs.m_displayModeKHR; - } - - bool operator!=( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR != rhs.m_displayModeKHR; - } - - bool operator<( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR < rhs.m_displayModeKHR; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT { return m_displayModeKHR; @@ -11476,21 +15539,19 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> - struct isVulkanHandleType + struct CppType { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; }; +#endif -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - class UniqueHandleTraits + template <> + struct isVulkanHandleType { - public: - using deleter = ObjectDestroy; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - using UniqueDevice = UniqueHandle; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ class PhysicalDevice { @@ -11503,8 +15564,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice; public: - VULKAN_HPP_CONSTEXPR PhysicalDevice() = default; + PhysicalDevice() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + PhysicalDevice( PhysicalDevice const & rhs ) = default; + PhysicalDevice & operator=( PhysicalDevice const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PhysicalDevice( PhysicalDevice && rhs ) = default; + PhysicalDevice & operator=( PhysicalDevice && rhs ) = default; +#else + PhysicalDevice( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT : m_physicalDevice( VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) {} + + PhysicalDevice & operator=( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ); + return *this; + } +#endif + VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT : m_physicalDevice( physicalDevice ) {} PhysicalDevice & operator=( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT @@ -11519,25 +15597,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevice const & ) const = default; -#else - bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice == rhs.m_physicalDevice; - } - - bool operator!=( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice != rhs.m_physicalDevice; - } - - bool operator<( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice < rhs.m_physicalDevice; - } -#endif - //=== VK_VERSION_1_0 === template @@ -11547,7 +15606,7 @@ namespace VULKAN_HPP_NAMESPACE template VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, @@ -11557,7 +15616,7 @@ namespace VULKAN_HPP_NAMESPACE template VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, @@ -11576,7 +15635,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, @@ -11585,7 +15644,7 @@ namespace VULKAN_HPP_NAMESPACE template VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, @@ -11593,17 +15652,19 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = QueueFamilyPropertiesAllocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, @@ -11612,7 +15673,7 @@ namespace VULKAN_HPP_NAMESPACE template VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, @@ -11631,8 +15692,8 @@ namespace VULKAN_HPP_NAMESPACE createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result enumerateDeviceExtensionProperties( const char * pLayerName, @@ -11640,36 +15701,39 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename ExtensionPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = ExtensionPropertiesAllocator, - typename std::enable_if::value, int>::type = 0> + template < + typename ExtensionPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result enumerateDeviceLayerProperties( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = LayerPropertiesAllocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, @@ -11681,8 +15745,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename SparseImageFormatPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, @@ -11690,10 +15757,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = SparseImageFormatPropertiesAllocator, - typename std::enable_if::value, int>::type = 0> + template < + typename SparseImageFormatPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, @@ -11702,7 +15770,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageTiling tiling, SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_VERSION_1_1 === @@ -11714,8 +15782,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, @@ -11725,8 +15794,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, @@ -11737,9 +15807,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, @@ -11751,10 +15821,10 @@ namespace VULKAN_HPP_NAMESPACE getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD typename ResultValueType>::type + VULKAN_HPP_NODISCARD typename ResultValueType>::type getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, @@ -11762,29 +15832,31 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = QueueFamilyProperties2Allocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = StructureChainAllocator, - typename std::enable_if::value, int>::type = 0> + typename StructureChainAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, @@ -11794,9 +15866,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, @@ -11805,19 +15877,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = SparseImageFormatProperties2Allocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, @@ -11828,7 +15904,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, @@ -11839,7 +15915,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, @@ -11850,7 +15926,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_VERSION_1_3 === @@ -11860,17 +15936,21 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getToolProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = PhysicalDeviceToolPropertiesAllocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_KHR_surface === @@ -11883,7 +15963,7 @@ namespace VULKAN_HPP_NAMESPACE template VULKAN_HPP_NODISCARD typename ResultValueType::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, @@ -11893,7 +15973,7 @@ namespace VULKAN_HPP_NAMESPACE template VULKAN_HPP_NODISCARD typename ResultValueType::type getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, @@ -11901,20 +15981,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename SurfaceFormatKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = SurfaceFormatKHRAllocator, - typename std::enable_if::value, int>::type = 0> + template < + typename SurfaceFormatKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, @@ -11922,19 +16004,20 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = PresentModeKHRAllocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, PresentModeKHRAllocator & presentModeKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_KHR_swapchain === @@ -11944,16 +16027,17 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Rect2D * pRects, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = Rect2DAllocator, - typename std::enable_if::value, int>::type = 0> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_KHR_display === @@ -11963,35 +16047,41 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = DisplayPropertiesKHRAllocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename DisplayPlanePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlanePropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = DisplayPlanePropertiesKHRAllocator, - typename std::enable_if::value, int>::type = 0> + template < + typename DisplayPlanePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, @@ -11999,16 +16089,17 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = DisplayKHRAllocator, - typename std::enable_if::value, int>::type = 0> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, @@ -12016,19 +16107,23 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename DisplayModePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = DisplayModePropertiesKHRAllocator, - typename std::enable_if::value, int>::type = 0> + template < + typename DisplayModePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, @@ -12050,8 +16145,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, @@ -12062,7 +16157,7 @@ namespace VULKAN_HPP_NAMESPACE template VULKAN_HPP_NODISCARD typename ResultValueType::type getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === @@ -12078,7 +16173,7 @@ namespace VULKAN_HPP_NAMESPACE Display & dpy, VisualID visualID, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_XLIB_KHR*/ #if defined( VK_USE_PLATFORM_XCB_KHR ) @@ -12095,7 +16190,7 @@ namespace VULKAN_HPP_NAMESPACE xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_XCB_KHR*/ #if defined( VK_USE_PLATFORM_WAYLAND_KHR ) @@ -12110,7 +16205,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #if defined( VK_USE_PLATFORM_WIN32_KHR ) @@ -12120,45 +16215,62 @@ namespace VULKAN_HPP_NAMESPACE Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_queue === template VULKAN_HPP_NODISCARD Result getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile, VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD typename ResultValueType::type getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD typename ResultValueType>::type + VULKAN_HPP_NODISCARD typename ResultValueType>::type getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, uint32_t * pVideoFormatPropertyCount, VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename VideoFormatPropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = VideoFormatPropertiesKHRAllocator, - typename std::enable_if::value, int>::type = 0> + template < + typename VideoFormatPropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_NV_external_memory_capabilities === @@ -12181,7 +16293,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_KHR_get_physical_device_properties2 === @@ -12193,8 +16305,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, @@ -12204,8 +16317,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, @@ -12216,9 +16330,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, @@ -12230,10 +16344,10 @@ namespace VULKAN_HPP_NAMESPACE getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD typename ResultValueType>::type + VULKAN_HPP_NODISCARD typename ResultValueType>::type getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, @@ -12241,29 +16355,31 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = QueueFamilyProperties2Allocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = StructureChainAllocator, - typename std::enable_if::value, int>::type = 0> + typename StructureChainAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, @@ -12273,9 +16389,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, @@ -12284,19 +16400,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = SparseImageFormatProperties2Allocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_KHR_external_memory_capabilities === @@ -12309,7 +16429,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_KHR_external_semaphore_capabilities === @@ -12322,7 +16442,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_EXT_direct_mode_display === @@ -12345,7 +16465,7 @@ namespace VULKAN_HPP_NAMESPACE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result getRandROutputDisplayEXT( Display * dpy, @@ -12360,8 +16480,8 @@ namespace VULKAN_HPP_NAMESPACE template VULKAN_HPP_NODISCARD typename ResultValueType>::type getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ //=== VK_EXT_display_surface_counter === @@ -12374,7 +16494,7 @@ namespace VULKAN_HPP_NAMESPACE template VULKAN_HPP_NODISCARD typename ResultValueType::type getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_KHR_external_fence_capabilities === @@ -12387,7 +16507,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_KHR_performance_query === @@ -12401,7 +16521,11 @@ namespace VULKAN_HPP_NAMESPACE #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , typename PerformanceCounterDescriptionKHRAllocator = std::allocator, - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value && + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType, std::vector>>::type @@ -12409,11 +16533,10 @@ namespace VULKAN_HPP_NAMESPACE template , typename PerformanceCounterDescriptionKHRAllocator = std::allocator, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = PerformanceCounterKHRAllocator, - typename B2 = PerformanceCounterDescriptionKHRAllocator, - typename std::enable_if::value && - std::is_same::value, - int>::type = 0> + typename std::enable_if< + std::is_same::value && + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType, std::vector>>::type @@ -12421,7 +16544,7 @@ namespace VULKAN_HPP_NAMESPACE PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, @@ -12432,7 +16555,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD uint32_t getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_KHR_get_surface_capabilities2 === @@ -12446,10 +16569,10 @@ namespace VULKAN_HPP_NAMESPACE getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template - VULKAN_HPP_NODISCARD typename ResultValueType>::type + VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, @@ -12457,35 +16580,37 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename SurfaceFormat2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = SurfaceFormat2KHRAllocator, - typename std::enable_if::value, int>::type = 0> + template < + typename SurfaceFormat2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = StructureChainAllocator, - typename std::enable_if::value, int>::type = 0> + typename StructureChainAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, StructureChainAllocator & structureChainAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_KHR_get_display_properties2 === @@ -12495,35 +16620,41 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = DisplayProperties2KHRAllocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename DisplayPlaneProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = DisplayPlaneProperties2KHRAllocator, - typename std::enable_if::value, int>::type = 0> + template < + typename DisplayPlaneProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, @@ -12531,19 +16662,37 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename DisplayModeProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = DisplayModeProperties2KHRAllocator, - typename std::enable_if::value, int>::type = 0> + template < + typename DisplayModeProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo, @@ -12554,7 +16703,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD typename ResultValueType::type getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_EXT_sample_locations === @@ -12567,25 +16716,26 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_EXT_calibrated_timestamps === template VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, - VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains, + VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD typename ResultValueType>::type + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type getCalibrateableTimeDomainsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template , + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = TimeDomainEXTAllocator, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibrateableTimeDomainsEXT( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_KHR_fragment_shading_rate === @@ -12595,19 +16745,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getFragmentShadingRatesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = PhysicalDeviceFragmentShadingRateKHRAllocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_EXT_tooling_info === @@ -12617,17 +16771,21 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getToolPropertiesEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = PhysicalDeviceToolPropertiesAllocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_NV_cooperative_matrix === @@ -12637,19 +16795,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getCooperativeMatrixPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = CooperativeMatrixPropertiesNVAllocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_NV_coverage_reduction_mode === @@ -12660,19 +16822,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = FramebufferMixedSamplesCombinationNVAllocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV( FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_EXT_full_screen_exclusive === @@ -12683,19 +16849,20 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = PresentModeKHRAllocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, PresentModeKHRAllocator & presentModeKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_acquire_drm_display === @@ -12708,7 +16875,7 @@ namespace VULKAN_HPP_NAMESPACE #else template typename ResultValueType::type - acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -12724,8 +16891,26 @@ namespace VULKAN_HPP_NAMESPACE template VULKAN_HPP_NODISCARD typename ResultValueType>::type getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_video_encode_queue === + + template + VULKAN_HPP_NODISCARD Result + getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === @@ -12752,8 +16937,8 @@ namespace VULKAN_HPP_NAMESPACE template VULKAN_HPP_NODISCARD typename ResultValueType>::type getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) @@ -12768,7 +16953,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ #if defined( VK_USE_PLATFORM_SCREEN_QNX ) @@ -12783,9 +16968,139 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + //=== VK_NV_optical_flow === + + template + VULKAN_HPP_NODISCARD Result getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, + uint32_t * pFormatCount, + VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, + OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cooperative_vector === + + template + VULKAN_HPP_NODISCARD Result getCooperativeVectorPropertiesNV( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeVectorPropertiesNV * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeVectorPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeVectorPropertiesNV( CooperativeVectorPropertiesNVAllocator & cooperativeVectorPropertiesNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_cooperative_matrix === + + template + VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesKHR( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeMatrixPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> + VULKAN_HPP_NODISCARD + typename ResultValueType>::type + getCooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHRAllocator & cooperativeMatrixPropertiesKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_calibrated_timestamps === + + template + VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsKHR( uint32_t * pTimeDomainCount, + VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibrateableTimeDomainsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getCalibrateableTimeDomainsKHR( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cooperative_matrix2 === + + template + VULKAN_HPP_NODISCARD Result + getCooperativeMatrixFlexibleDimensionsPropertiesNV( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getCooperativeMatrixFlexibleDimensionsPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getCooperativeMatrixFlexibleDimensionsPropertiesNV( + CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator & cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT { return m_physicalDevice; @@ -12817,36 +17132,19 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> - struct isVulkanHandleType + struct CppType { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice; }; +#endif -#ifndef VULKAN_HPP_NO_SMART_HANDLE - class Instance; - template - class UniqueHandleTraits - { - public: - using deleter = ObjectDestroy; - }; - using UniqueDebugReportCallbackEXT = UniqueHandle; - template - class UniqueHandleTraits - { - public: - using deleter = ObjectDestroy; - }; - using UniqueDebugUtilsMessengerEXT = UniqueHandle; - template - class UniqueHandleTraits + template <> + struct isVulkanHandleType { - public: - using deleter = ObjectDestroy; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - using UniqueSurfaceKHR = UniqueHandle; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ class Instance { @@ -12859,8 +17157,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance; public: - VULKAN_HPP_CONSTEXPR Instance() = default; + Instance() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Instance( Instance const & rhs ) = default; + Instance & operator=( Instance const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Instance( Instance && rhs ) = default; + Instance & operator=( Instance && rhs ) = default; +#else + Instance( Instance && rhs ) VULKAN_HPP_NOEXCEPT : m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) ) {} + + Instance & operator=( Instance && rhs ) VULKAN_HPP_NOEXCEPT + { + m_instance = VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ); + return *this; + } +#endif + VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT : m_instance( instance ) {} Instance & operator=( VkInstance instance ) VULKAN_HPP_NOEXCEPT @@ -12875,25 +17190,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Instance const & ) const = default; -#else - bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_instance == rhs.m_instance; - } - - bool operator!=( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_instance != rhs.m_instance; - } - - bool operator<( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_instance < rhs.m_instance; - } -#endif - //=== VK_VERSION_1_0 === template @@ -12903,30 +17199,32 @@ namespace VULKAN_HPP_NAMESPACE template void destroy( Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDevices( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = PhysicalDeviceAllocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template PFN_vkVoidFunction getProcAddr( const char * pName, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_NAMESPACE::PFN_VoidFunction getProcAddr( const std::string & name, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_VERSION_1_1 === @@ -12936,19 +17234,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroups( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = PhysicalDeviceGroupPropertiesAllocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_KHR_surface === @@ -12961,7 +17263,7 @@ namespace VULKAN_HPP_NAMESPACE void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, @@ -12972,7 +17274,7 @@ namespace VULKAN_HPP_NAMESPACE void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_KHR_display === @@ -12993,8 +17295,8 @@ namespace VULKAN_HPP_NAMESPACE createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === @@ -13016,8 +17318,8 @@ namespace VULKAN_HPP_NAMESPACE createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_XLIB_KHR*/ #if defined( VK_USE_PLATFORM_XCB_KHR ) @@ -13040,8 +17342,8 @@ namespace VULKAN_HPP_NAMESPACE createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_XCB_KHR*/ #if defined( VK_USE_PLATFORM_WAYLAND_KHR ) @@ -13064,8 +17366,8 @@ namespace VULKAN_HPP_NAMESPACE createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #if defined( VK_USE_PLATFORM_ANDROID_KHR ) @@ -13088,8 +17390,8 @@ namespace VULKAN_HPP_NAMESPACE createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_USE_PLATFORM_WIN32_KHR ) @@ -13112,8 +17414,8 @@ namespace VULKAN_HPP_NAMESPACE createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_debug_report === @@ -13135,8 +17437,8 @@ namespace VULKAN_HPP_NAMESPACE createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, @@ -13147,7 +17449,7 @@ namespace VULKAN_HPP_NAMESPACE void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, @@ -13158,7 +17460,7 @@ namespace VULKAN_HPP_NAMESPACE void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, @@ -13179,7 +17481,7 @@ namespace VULKAN_HPP_NAMESPACE const std::string & layerPrefix, const std::string & message, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === @@ -13201,8 +17503,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_GGP*/ #if defined( VK_USE_PLATFORM_VI_NN ) @@ -13225,8 +17527,8 @@ namespace VULKAN_HPP_NAMESPACE createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_VI_NN*/ //=== VK_KHR_device_group_creation === @@ -13237,19 +17539,23 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = PhysicalDeviceGroupPropertiesAllocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #if defined( VK_USE_PLATFORM_IOS_MVK ) //=== VK_MVK_ios_surface === @@ -13271,8 +17577,8 @@ namespace VULKAN_HPP_NAMESPACE createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_IOS_MVK*/ #if defined( VK_USE_PLATFORM_MACOS_MVK ) @@ -13295,8 +17601,8 @@ namespace VULKAN_HPP_NAMESPACE createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_MACOS_MVK*/ //=== VK_EXT_debug_utils === @@ -13318,8 +17624,8 @@ namespace VULKAN_HPP_NAMESPACE createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, @@ -13330,7 +17636,7 @@ namespace VULKAN_HPP_NAMESPACE void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, @@ -13341,7 +17647,7 @@ namespace VULKAN_HPP_NAMESPACE void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, @@ -13354,7 +17660,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_imagepipe_surface === @@ -13376,8 +17682,8 @@ namespace VULKAN_HPP_NAMESPACE createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) @@ -13400,8 +17706,8 @@ namespace VULKAN_HPP_NAMESPACE createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_EXT_headless_surface === @@ -13423,8 +17729,8 @@ namespace VULKAN_HPP_NAMESPACE createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) //=== VK_EXT_directfb_surface === @@ -13446,8 +17752,8 @@ namespace VULKAN_HPP_NAMESPACE createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ #if defined( VK_USE_PLATFORM_SCREEN_QNX ) @@ -13470,8 +17776,8 @@ namespace VULKAN_HPP_NAMESPACE createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ operator VkInstance() const VULKAN_HPP_NOEXCEPT @@ -13505,6 +17811,14 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::Instance; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::Instance; + }; +#endif + template <> struct isVulkanHandleType { @@ -13513,16 +17827,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_VERSION_1_0 === -#ifndef VULKAN_HPP_NO_SMART_HANDLE - template - class UniqueHandleTraits - { - public: - using deleter = ObjectDestroy; - }; - using UniqueInstance = UniqueHandle; -#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ - template VULKAN_HPP_NODISCARD Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, @@ -13540,8 +17844,8 @@ namespace VULKAN_HPP_NAMESPACE createInstanceUnique( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); -# endif /*VULKAN_HPP_NO_SMART_HANDLE*/ -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result enumerateInstanceExtensionProperties( const char * pLayerName, @@ -13549,36 +17853,39 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename ExtensionPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = ExtensionPropertiesAllocator, - typename std::enable_if::value, int>::type = 0> + template < + typename ExtensionPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_NODISCARD Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename B1 = LayerPropertiesAllocator, - typename std::enable_if::value, int>::type = 0> + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_VERSION_1_1 === @@ -13588,7 +17895,57 @@ namespace VULKAN_HPP_NAMESPACE #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD typename ResultValueType::type enumerateInstanceVersion( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); -#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + // operators to compare vk::-handles +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + template ::value, int>::type = 0> + auto operator<=>( T const & lhs, T const & rhs ) + { + return static_cast( lhs ) <=> static_cast( rhs ); + } +#else + template ::value, int>::type = 0> + bool operator==( T const & lhs, T const & rhs ) + { + return static_cast( lhs ) == static_cast( rhs ); + } + + template ::value, int>::type = 0> + bool operator!=( T const & lhs, T const & rhs ) + { + return static_cast( lhs ) != static_cast( rhs ); + } + + template ::value, int>::type = 0> + bool operator<( T const & lhs, T const & rhs ) + { + return static_cast( lhs ) < static_cast( rhs ); + } +#endif + + template ::value, int>::type = 0> + bool operator==( T const & v, std::nullptr_t ) + { + return !v; + } + + template ::value, int>::type = 0> + bool operator==( std::nullptr_t, T const & v ) + { + return !v; + } + + template ::value, int>::type = 0> + bool operator!=( T const & v, std::nullptr_t ) + { + return !!v; + } + template ::value, int>::type = 0> + bool operator!=( std::nullptr_t, T const & v ) + { + return !!v; + } } // namespace VULKAN_HPP_NAMESPACE #endif diff --git a/src/libraries/vulkanheaders/vulkan_hash.hpp b/src/libraries/vulkanheaders/vulkan_hash.hpp index 7833e3e87..353d2f61e 100644 --- a/src/libraries/vulkanheaders/vulkan_hash.hpp +++ b/src/libraries/vulkanheaders/vulkan_hash.hpp @@ -1,4 +1,4 @@ -// Copyright 2015-2022 The Khronos Group Inc. +// Copyright 2015-2025 The Khronos Group Inc. // // SPDX-License-Identifier: Apache-2.0 OR MIT // @@ -340,7 +340,6 @@ namespace std } }; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_queue === template <> @@ -360,7 +359,6 @@ namespace std return std::hash{}( static_cast( videoSessionParametersKHR ) ); } }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_NVX_binary_import === @@ -459,6 +457,28 @@ namespace std } }; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaModuleNV const & cudaModuleNV ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( cudaModuleNV ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaFunctionNV const & cudaFunctionNV ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( cudaFunctionNV ) ); + } + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_buffer_collection === @@ -472,6 +492,70 @@ namespace std }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ + //=== VK_EXT_opacity_micromap === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapEXT const & micromapEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( micromapEXT ) ); + } + }; + + //=== VK_NV_optical_flow === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV const & opticalFlowSessionNV ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( opticalFlowSessionNV ) ); + } + }; + + //=== VK_EXT_shader_object === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderEXT const & shaderEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( shaderEXT ) ); + } + }; + + //=== VK_KHR_pipeline_binary === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR const & pipelineBinaryKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipelineBinaryKHR ) ); + } + }; + + //=== VK_EXT_device_generated_commands === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT const & indirectCommandsLayoutEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( indirectCommandsLayoutEXT ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT const & indirectExecutionSetEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( indirectExecutionSetEXT ) ); + } + }; + #if 14 <= VULKAN_HPP_CPP_VERSION //====================================== //=== HASH structures for structures === @@ -529,6 +613,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT const & accelerationStructureCaptureDescriptorDataInfoEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCaptureDescriptorDataInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCaptureDescriptorDataInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCaptureDescriptorDataInfoEXT.accelerationStructure ); + VULKAN_HPP_HASH_COMBINE( seed, accelerationStructureCaptureDescriptorDataInfoEXT.accelerationStructureNV ); + return seed; + } + }; + template <> struct hash { @@ -782,6 +881,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapUsageEXT const & micromapUsageEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.count ); + VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.subdivisionLevel ); + VULKAN_HPP_HASH_COMBINE( seed, micromapUsageEXT.format ); + return seed; + } + }; + template <> struct hash { @@ -916,6 +1028,23 @@ namespace std }; # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatResolvePropertiesANDROID const & androidHardwareBufferFormatResolvePropertiesANDROID ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatResolvePropertiesANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatResolvePropertiesANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, androidHardwareBufferFormatResolvePropertiesANDROID.colorAttachmentFormat ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + # if defined( VK_USE_PLATFORM_ANDROID_KHR ) template <> struct hash @@ -964,6 +1093,35 @@ namespace std }; # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD const & antiLagPresentationInfoAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, antiLagPresentationInfoAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagPresentationInfoAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagPresentationInfoAMD.stage ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagPresentationInfoAMD.frameIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AntiLagDataAMD const & antiLagDataAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, antiLagDataAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagDataAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagDataAMD.mode ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagDataAMD.maxFPS ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagDataAMD.pPresentationInfo ); + return seed; + } + }; + template <> struct hash { @@ -1219,6 +1377,41 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT const & bindDescriptorBufferEmbeddedSamplersInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorBufferEmbeddedSamplersInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorBufferEmbeddedSamplersInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorBufferEmbeddedSamplersInfoEXT.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorBufferEmbeddedSamplersInfoEXT.layout ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorBufferEmbeddedSamplersInfoEXT.set ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo const & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.firstSet ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.descriptorSetCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.pDescriptorSets ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.dynamicOffsetCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.pDynamicOffsets ); + return seed; + } + }; + template <> struct hash { @@ -1301,6 +1494,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandEXT const & bindIndexBufferIndirectCommandEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandEXT.bufferAddress ); + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandEXT.size ); + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandEXT.indexType ); + return seed; + } + }; + template <> struct hash { @@ -1314,6 +1520,30 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindMemoryStatus const & bindMemoryStatus ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindMemoryStatus.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindMemoryStatus.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindMemoryStatus.pResult ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV const & bindPipelineIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindPipelineIndirectCommandNV.pipelineAddress ); + return seed; + } + }; + template <> struct hash { @@ -1456,6 +1686,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandEXT const & bindVertexBufferIndirectCommandEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandEXT.bufferAddress ); + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandEXT.size ); + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandEXT.stride ); + return seed; + } + }; + template <> struct hash { @@ -1469,7 +1712,6 @@ namespace std } }; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> struct hash { @@ -1485,7 +1727,19 @@ namespace std return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BlitImageCubicWeightsInfoQCOM const & blitImageCubicWeightsInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, blitImageCubicWeightsInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageCubicWeightsInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, blitImageCubicWeightsInfoQCOM.cubicWeights ); + return seed; + } + }; template <> struct hash @@ -1542,6 +1796,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT const & bufferCaptureDescriptorDataInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferCaptureDescriptorDataInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCaptureDescriptorDataInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferCaptureDescriptorDataInfoEXT.buffer ); + return seed; + } + }; + # if defined( VK_USE_PLATFORM_FUCHSIA ) template <> struct hash @@ -1840,6 +2107,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfo const & bufferUsageFlags2CreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfo.usage ); + return seed; + } + }; + template <> struct hash { @@ -1858,14 +2138,77 @@ namespace std }; template <> - struct hash + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV const & stridedDeviceAddressNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressNV.startAddress ); + VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressNV.strideInBytes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureIndirectCommandNV const & + buildPartitionedAccelerationStructureIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureIndirectCommandNV.opType ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureIndirectCommandNV.argCount ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureIndirectCommandNV.argData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV const & partitionedAccelerationStructureInstancesInputNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureInstancesInputNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureInstancesInputNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureInstancesInputNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureInstancesInputNV.instanceCount ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureInstancesInputNV.maxInstancePerPartitionCount ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureInstancesInputNV.partitionCount ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureInstancesInputNV.maxInstanceInGlobalPartitionCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV const & buildPartitionedAccelerationStructureInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureInfoNV.input ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureInfoNV.srcAccelerationStructureData ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureInfoNV.dstAccelerationStructureData ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureInfoNV.scratchData ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureInfoNV.srcInfos ); + VULKAN_HPP_HASH_COMBINE( seed, buildPartitionedAccelerationStructureInfoNV.srcInfosCount ); + return seed; + } + }; + + template <> + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const & calibratedTimestampInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR const & calibratedTimestampInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoEXT.timeDomain ); + VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, calibratedTimestampInfoKHR.timeDomain ); return seed; } }; @@ -1924,104 +2267,314 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const & coarseSampleLocationNV ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & + clusterAccelerationStructureBuildClustersBottomLevelInfoNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.pixelX ); - VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.pixelY ); - VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.sample ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildClustersBottomLevelInfoNV.clusterReferencesCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildClustersBottomLevelInfoNV.clusterReferencesStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildClustersBottomLevelInfoNV.clusterReferences ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const & coarseSampleOrderCustomNV ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & + clusterAccelerationStructureGeometryIndexAndGeometryFlagsNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.shadingRate ); - VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.sampleCount ); - VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.sampleLocationCount ); - VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.pSampleLocations ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureGeometryIndexAndGeometryFlagsNV.geometryIndex ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureGeometryIndexAndGeometryFlagsNV.reserved ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureGeometryIndexAndGeometryFlagsNV.geometryFlags ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & commandBufferAllocateInfo ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildTriangleClusterInfoNV const & + clusterAccelerationStructureBuildTriangleClusterInfoNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.sType ); - VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.commandPool ); - VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.level ); - VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.commandBufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.clusterID ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.clusterFlags ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.triangleCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.positionTruncateBitCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.indexType ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexType ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.baseGeometryIndexAndGeometryFlags ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.indexBufferStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexBufferStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.geometryIndexAndFlagsBufferStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexBufferStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.indexBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.geometryIndexAndFlagsBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapArray ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexBuffer ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const & commandBufferInheritanceInfo ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & + clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.sType ); - VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.renderPass ); - VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.subpass ); - VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.framebuffer ); - VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.occlusionQueryEnable ); - VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.queryFlags ); - VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.pipelineStatistics ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.clusterID ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.clusterFlags ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.triangleCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.vertexCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.positionTruncateBitCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.indexType ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.opacityMicromapIndexType ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.baseGeometryIndexAndGeometryFlags ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.indexBufferStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.vertexBufferStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.geometryIndexAndFlagsBufferStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.opacityMicromapIndexBufferStride ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.indexBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.vertexBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.geometryIndexAndFlagsBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.opacityMicromapArray ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.opacityMicromapIndexBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.instantiationBoundingBoxLimit ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const & commandBufferBeginInfo ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClustersBottomLevelInputNV const & + clusterAccelerationStructureClustersBottomLevelInputNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.sType ); - VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.flags ); - VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.pInheritanceInfo ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureClustersBottomLevelInputNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureClustersBottomLevelInputNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureClustersBottomLevelInputNV.maxTotalClusterCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureClustersBottomLevelInputNV.maxClusterCountPerAccelerationStructure ); return seed; } }; template <> - struct hash + struct hash { std::size_t - operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const & commandBufferInheritanceConditionalRenderingInfoEXT ) const + operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTriangleClusterInputNV const & clusterAccelerationStructureTriangleClusterInputNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.conditionalRenderingEnable ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.vertexFormat ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.maxGeometryIndexValue ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.maxClusterUniqueGeometryCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.maxClusterTriangleCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.maxClusterVertexCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.maxTotalTriangleCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.maxTotalVertexCount ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureTriangleClusterInputNV.minPositionTruncateBitCount ); return seed; } }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const & commandBufferInheritanceRenderPassTransformInfoQCOM ) const + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInputNV const & clusterAccelerationStructureMoveObjectsInputNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.sType ); - VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureMoveObjectsInputNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureMoveObjectsInputNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureMoveObjectsInputNV.type ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureMoveObjectsInputNV.noMoveOverlap ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureMoveObjectsInputNV.maxMovedBytes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const & stridedDeviceAddressRegionKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.deviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.stride ); + VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInstantiateClusterInfoNV const & + clusterAccelerationStructureInstantiateClusterInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureInstantiateClusterInfoNV.clusterIdOffset ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureInstantiateClusterInfoNV.geometryIndexOffset ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureInstantiateClusterInfoNV.reserved ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureInstantiateClusterInfoNV.clusterTemplateAddress ); + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureInstantiateClusterInfoNV.vertexBuffer ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInfoNV const & clusterAccelerationStructureMoveObjectsInfoNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, clusterAccelerationStructureMoveObjectsInfoNV.srcAccelerationStructure ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const & coarseSampleLocationNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.pixelX ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.pixelY ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleLocationNV.sample ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const & coarseSampleOrderCustomNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.shadingRate ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.sampleCount ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.sampleLocationCount ); + VULKAN_HPP_HASH_COMBINE( seed, coarseSampleOrderCustomNV.pSampleLocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT const & colorBlendAdvancedEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.advancedBlendOp ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.srcPremultiplied ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.dstPremultiplied ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.blendOverlap ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendAdvancedEXT.clampResults ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT const & colorBlendEquationEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.srcColorBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.dstColorBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.colorBlendOp ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.srcAlphaBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.dstAlphaBlendFactor ); + VULKAN_HPP_HASH_COMBINE( seed, colorBlendEquationEXT.alphaBlendOp ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & commandBufferAllocateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.commandPool ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.level ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferAllocateInfo.commandBufferCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const & commandBufferInheritanceInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.renderPass ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.subpass ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.framebuffer ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.occlusionQueryEnable ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.queryFlags ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceInfo.pipelineStatistics ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const & commandBufferBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferBeginInfo.pInheritanceInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const & commandBufferInheritanceConditionalRenderingInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceConditionalRenderingInfoEXT.conditionalRenderingEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const & commandBufferInheritanceRenderPassTransformInfoQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.pNext ); VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.transform ); VULKAN_HPP_HASH_COMBINE( seed, commandBufferInheritanceRenderPassTransformInfoQCOM.renderArea ); return seed; @@ -2172,6 +2725,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV const & computePipelineIndirectBufferInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.deviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.size ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.pipelineDeviceAddressCaptureReplay ); + return seed; + } + }; + template <> struct hash { @@ -2201,6 +2769,50 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV const & cooperativeMatrixFlexibleDimensionsPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.MGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.NGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.KGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.AType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.BType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.CType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.ResultType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.saturatingAccumulation ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.scope ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.workgroupInvocations ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR const & cooperativeMatrixPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.MSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.NSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.KSize ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.AType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.BType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.CType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.ResultType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.saturatingAccumulation ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixPropertiesKHR.scope ); + return seed; + } + }; + template <> struct hash { @@ -2221,6 +2833,24 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CooperativeVectorPropertiesNV const & cooperativeVectorPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cooperativeVectorPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeVectorPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeVectorPropertiesNV.inputType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeVectorPropertiesNV.inputInterpretation ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeVectorPropertiesNV.matrixInterpretation ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeVectorPropertiesNV.biasInterpretation ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeVectorPropertiesNV.resultType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeVectorPropertiesNV.transpose ); + return seed; + } + }; + template <> struct hash { @@ -2353,6 +2983,139 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToImageInfo const & copyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageToMemoryCopy const & imageToMemoryCopy ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.pHostPointer ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.memoryRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.memoryImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo const & copyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV const & copyMemoryIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryIndirectCommandNV.srcAddress ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryIndirectCommandNV.dstAddress ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryIndirectCommandNV.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV const & copyMemoryToImageIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.srcAddress ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.bufferRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.bufferImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageIndirectCommandNV.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryToImageCopy const & memoryToImageCopy ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.pHostPointer ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.memoryRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.memoryImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo const & copyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT const & copyMicromapInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.src ); + VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.dst ); + VULKAN_HPP_HASH_COMBINE( seed, copyMicromapInfoEXT.mode ); + return seed; + } + }; + template <> struct hash { @@ -2408,23 +3171,97 @@ namespace std } }; -# if defined( VK_USE_PLATFORM_WIN32_KHR ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const & d3D12FenceSubmitInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuModuleTexturingModeCreateInfoNVX const & cuModuleTexturingModeCreateInfoNVX ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.waitSemaphoreValuesCount ); - VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pWaitSemaphoreValues ); - VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.signalSemaphoreValuesCount ); - VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pSignalSemaphoreValues ); + VULKAN_HPP_HASH_COMBINE( seed, cuModuleTexturingModeCreateInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cuModuleTexturingModeCreateInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cuModuleTexturingModeCreateInfoNVX.use64bitTexturing ); return seed; } }; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & cudaFunctionCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cudaFunctionCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cudaFunctionCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cudaFunctionCreateInfoNV.module ); + for ( const char * p = cudaFunctionCreateInfoNV.pName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV const & cudaLaunchInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.function ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.gridDimX ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.gridDimY ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.gridDimZ ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.blockDimX ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.blockDimY ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.blockDimZ ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.sharedMemBytes ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.paramCount ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.pParams ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.extraCount ); + VULKAN_HPP_HASH_COMBINE( seed, cudaLaunchInfoNV.pExtras ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & cudaModuleCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, cudaModuleCreateInfoNV.pData ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const & d3D12FenceSubmitInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.waitSemaphoreValuesCount ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pWaitSemaphoreValues ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.signalSemaphoreValuesCount ); + VULKAN_HPP_HASH_COMBINE( seed, d3D12FenceSubmitInfoKHR.pSignalSemaphoreValues ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ template <> struct hash @@ -2596,6 +3433,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV const & decompressMemoryRegionNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.srcAddress ); + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.dstAddress ); + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.compressedSize ); + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.decompressedSize ); + VULKAN_HPP_HASH_COMBINE( seed, decompressMemoryRegionNV.decompressionMethod ); + return seed; + } + }; + template <> struct hash { @@ -2710,6 +3562,90 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT const & depthBiasInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, depthBiasInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasInfoEXT.depthBiasConstantFactor ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasInfoEXT.depthBiasClamp ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasInfoEXT.depthBiasSlopeFactor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DepthBiasRepresentationInfoEXT const & depthBiasRepresentationInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, depthBiasRepresentationInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasRepresentationInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasRepresentationInfoEXT.depthBiasRepresentation ); + VULKAN_HPP_HASH_COMBINE( seed, depthBiasRepresentationInfoEXT.depthBiasExact ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DepthClampRangeEXT const & depthClampRangeEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, depthClampRangeEXT.minDepthClamp ); + VULKAN_HPP_HASH_COMBINE( seed, depthClampRangeEXT.maxDepthClamp ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT const & descriptorAddressInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.address ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.range ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorAddressInfoEXT.format ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT const & descriptorBufferBindingInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingInfoEXT.address ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingInfoEXT.usage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT const & + descriptorBufferBindingPushDescriptorBufferHandleEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingPushDescriptorBufferHandleEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingPushDescriptorBufferHandleEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, descriptorBufferBindingPushDescriptorBufferHandleEXT.buffer ); + return seed; + } + }; + template <> struct hash { @@ -2945,6 +3881,22 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT const & deviceAddressBindingCallbackDataEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.baseAddress ); + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.size ); + VULKAN_HPP_HASH_COMBINE( seed, deviceAddressBindingCallbackDataEXT.bindingType ); + return seed; + } + }; + template <> struct hash { @@ -3113,6 +4065,94 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT const & deviceFaultAddressInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.addressType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.reportedAddress ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultAddressInfoEXT.addressPrecision ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT const & deviceFaultCountsEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.addressInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.vendorInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultCountsEXT.vendorBinarySize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT const & deviceFaultVendorInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.description[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.vendorFaultCode ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorInfoEXT.vendorFaultData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT const & deviceFaultInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pNext ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.description[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pAddressInfos ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pVendorInfos ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultInfoEXT.pVendorBinaryData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT const & deviceFaultVendorBinaryHeaderVersionOneEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.headerSize ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.headerVersion ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.vendorID ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.deviceID ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.driverVersion ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.pipelineCacheUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.applicationNameOffset ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.applicationVersion ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.engineNameOffset ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.engineVersion ); + VULKAN_HPP_HASH_COMBINE( seed, deviceFaultVendorBinaryHeaderVersionOneEXT.apiVersion ); + return seed; + } + }; + template <> struct hash { @@ -3271,6 +4311,33 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresource2 const & imageSubresource2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2.imageSubresource ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo const & deviceImageSubresourceInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfo.pCreateInfo ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfo.pSubresource ); + return seed; + } + }; + template <> struct hash { @@ -3317,6 +4384,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DevicePipelineBinaryInternalCacheControlKHR const & devicePipelineBinaryInternalCacheControlKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, devicePipelineBinaryInternalCacheControlKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, devicePipelineBinaryInternalCacheControlKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, devicePipelineBinaryInternalCacheControlKHR.disableInternalCache ); + return seed; + } + }; + template <> struct hash { @@ -3331,15 +4412,14 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR const & deviceQueueGlobalPriorityCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfo const & deviceQueueGlobalPriorityCreateInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfoKHR.globalPriority ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfo.globalPriority ); return seed; } }; @@ -3359,6 +4439,49 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM const & deviceQueueShaderCoreControlCreateInfoARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueShaderCoreControlCreateInfoARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueShaderCoreControlCreateInfoARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueShaderCoreControlCreateInfoARM.shaderCoreCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG const & directDriverLoadingInfoLUNARG ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingInfoLUNARG.sType ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingInfoLUNARG.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingInfoLUNARG.flags ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingInfoLUNARG.pfnGetInstanceProcAddr ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG const & directDriverLoadingListLUNARG ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.sType ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.mode ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.driverCount ); + VULKAN_HPP_HASH_COMBINE( seed, directDriverLoadingListLUNARG.pDrivers ); + return seed; + } + }; + # if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) template <> struct hash @@ -3453,6 +4576,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeStereoPropertiesNV const & displayModeStereoPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayModeStereoPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeStereoPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeStereoPropertiesNV.hdmi3DSupported ); + return seed; + } + }; + template <> struct hash { @@ -3619,6 +4755,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoCreateInfoNV const & displaySurfaceStereoCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceStereoCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceStereoCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceStereoCreateInfoNV.stereoType ); + return seed; + } + }; + template <> struct hash { @@ -3648,6 +4797,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrawIndirectCountIndirectCommandEXT const & drawIndirectCountIndirectCommandEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCountIndirectCommandEXT.bufferAddress ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCountIndirectCommandEXT.stride ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCountIndirectCommandEXT.commandCount ); + return seed; + } + }; + template <> struct hash { @@ -3741,26 +4903,79 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const & exportFenceCreateInfo ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & pipelineLibraryCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.sType ); - VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.handleTypes ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.libraryCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pLibraries ); return seed; } }; -# if defined( VK_USE_PLATFORM_WIN32_KHR ) +# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const & exportFenceWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & executionGraphPipelineCreateInfoAMDX ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.flags ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.stageCount ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pStages ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pLibraryInfo ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.layout ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.basePipelineHandle ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.basePipelineIndex ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX const & executionGraphPipelineScratchSizeAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.minSize ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.maxSize ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.sizeGranularity ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const & exportFenceCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, exportFenceCreateInfo.handleTypes ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const & exportFenceWin32HandleInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.sType ); VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.pNext ); VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.pAttributes ); VULKAN_HPP_HASH_COMBINE( seed, exportFenceWin32HandleInfoKHR.dwAccess ); @@ -4058,6 +5273,21 @@ namespace std }; # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalFormatQNX const & externalFormatQNX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalFormatQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalFormatQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalFormatQNX.externalFormat ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + template <> struct hash { @@ -4100,6 +5330,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExternalMemoryAcquireUnmodifiedEXT const & externalMemoryAcquireUnmodifiedEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryAcquireUnmodifiedEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryAcquireUnmodifiedEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, externalMemoryAcquireUnmodifiedEXT.acquireUnmodifiedMemory ); + return seed; + } + }; + template <> struct hash { @@ -4267,6 +5510,27 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::FrameBoundaryEXT const & frameBoundaryEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.frameID ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.imageCount ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.pImages ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.bufferCount ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.pBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.tagName ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.tagSize ); + VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryEXT.pTag ); + return seed; + } + }; + template <> struct hash { @@ -4335,6 +5599,28 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT const & generatedCommandsInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.shaderStages ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.indirectExecutionSet ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.indirectCommandsLayout ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.indirectAddress ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.indirectAddressSize ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.preprocessAddress ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.preprocessSize ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.maxSequenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.sequenceCountAddress ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.maxDrawCount ); + return seed; + } + }; + template <> struct hash { @@ -4372,6 +5658,23 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT const & generatedCommandsMemoryRequirementsInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.indirectExecutionSet ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.indirectCommandsLayout ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.maxSequenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.maxDrawCount ); + return seed; + } + }; + template <> struct hash { @@ -4389,6 +5692,73 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsPipelineInfoEXT const & generatedCommandsPipelineInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsPipelineInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsPipelineInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsPipelineInfoEXT.pipeline ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsShaderInfoEXT const & generatedCommandsShaderInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsShaderInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsShaderInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsShaderInfoEXT.shaderCount ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsShaderInfoEXT.pShaders ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV const & latencyTimingsFrameReportNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.presentID ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.inputSampleTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.simStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.simEndTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.renderSubmitStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.renderSubmitEndTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.presentStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.presentEndTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.driverStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.driverEndTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.osRenderQueueStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.osRenderQueueEndTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.gpuRenderStartTimeUs ); + VULKAN_HPP_HASH_COMBINE( seed, latencyTimingsFrameReportNV.gpuRenderEndTimeUs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV const & getLatencyMarkerInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, getLatencyMarkerInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, getLatencyMarkerInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, getLatencyMarkerInfoNV.timingCount ); + VULKAN_HPP_HASH_COMBINE( seed, getLatencyMarkerInfoNV.pTimings ); + return seed; + } + }; + template <> struct hash { @@ -4721,6 +6091,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HdrVividDynamicMetadataHUAWEI const & hdrVividDynamicMetadataHUAWEI ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, hdrVividDynamicMetadataHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, hdrVividDynamicMetadataHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, hdrVividDynamicMetadataHUAWEI.dynamicMetadataSize ); + VULKAN_HPP_HASH_COMBINE( seed, hdrVividDynamicMetadataHUAWEI.pDynamicMetadata ); + return seed; + } + }; + template <> struct hash { @@ -4734,6 +6118,36 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQuery const & hostImageCopyDevicePerformanceQuery ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQuery.sType ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQuery.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQuery.optimalDeviceAccess ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQuery.identicalMemoryLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo const & hostImageLayoutTransitionInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.oldLayout ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.newLayout ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.subresourceRange ); + return seed; + } + }; + # if defined( VK_USE_PLATFORM_IOS_MVK ) template <> struct hash @@ -4750,6 +6164,19 @@ namespace std }; # endif /*VK_USE_PLATFORM_IOS_MVK*/ + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageAlignmentControlCreateInfoMESA const & imageAlignmentControlCreateInfoMESA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageAlignmentControlCreateInfoMESA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageAlignmentControlCreateInfoMESA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageAlignmentControlCreateInfoMESA.maximumRequestedAlignment ); + return seed; + } + }; + template <> struct hash { @@ -4770,6 +6197,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT const & imageCaptureDescriptorDataInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageCaptureDescriptorDataInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageCaptureDescriptorDataInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageCaptureDescriptorDataInfoEXT.image ); + return seed; + } + }; + template <> struct hash { @@ -5058,19 +6498,6 @@ namespace std } }; - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresource2EXT const & imageSubresource2EXT ) const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.imageSubresource ); - return seed; - } - }; - template <> struct hash { @@ -5111,6 +6538,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT const & imageViewCaptureDescriptorDataInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewCaptureDescriptorDataInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCaptureDescriptorDataInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewCaptureDescriptorDataInfoEXT.imageView ); + return seed; + } + }; + template <> struct hash { @@ -5172,6 +6613,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageViewSlicedCreateInfoEXT const & imageViewSlicedCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageViewSlicedCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewSlicedCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewSlicedCreateInfoEXT.sliceOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageViewSlicedCreateInfoEXT.sliceCount ); + return seed; + } + }; + template <> struct hash { @@ -5280,6 +6735,22 @@ namespace std } }; +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportMemoryMetalHandleInfoEXT const & importMemoryMetalHandleInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importMemoryMetalHandleInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryMetalHandleInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryMetalHandleInfoEXT.handleType ); + VULKAN_HPP_HASH_COMBINE( seed, importMemoryMetalHandleInfoEXT.handle ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + # if defined( VK_USE_PLATFORM_WIN32_KHR ) template <> struct hash @@ -5390,6 +6861,21 @@ namespace std }; # endif /*VK_USE_PLATFORM_METAL_EXT*/ +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImportScreenBufferInfoQNX const & importScreenBufferInfoQNX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, importScreenBufferInfoQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, importScreenBufferInfoQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, importScreenBufferInfoQNX.buffer ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + template <> struct hash { @@ -5444,6 +6930,82 @@ namespace std }; # endif /*VK_USE_PLATFORM_FUCHSIA*/ + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT const & indirectCommandsExecutionSetTokenEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsExecutionSetTokenEXT.type ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsExecutionSetTokenEXT.shaderStages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT const & indirectCommandsIndexBufferTokenEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsIndexBufferTokenEXT.mode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushConstantRange const & pushConstantRange ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.offset ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT const & indirectCommandsPushConstantTokenEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsPushConstantTokenEXT.updateRange ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT const & indirectCommandsVertexBufferTokenEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsVertexBufferTokenEXT.vertexBindingUnit ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const & indirectCommandsLayoutCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.shaderStages ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.indirectStride ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.pipelineLayout ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.tokenCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.pTokens ); + return seed; + } + }; + template <> struct hash { @@ -5488,20 +7050,67 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const & initializePerformanceApiInfoINTEL ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT const & indirectExecutionSetPipelineInfoEXT ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.sType ); - VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.pUserData ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetPipelineInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetPipelineInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetPipelineInfoEXT.initialPipeline ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetPipelineInfoEXT.maxPipelineCount ); return seed; } }; template <> - struct hash + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT const & indirectExecutionSetShaderLayoutInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderLayoutInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderLayoutInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderLayoutInfoEXT.setLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderLayoutInfoEXT.pSetLayouts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT const & indirectExecutionSetShaderInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.shaderCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.pInitialShaders ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.pSetLayoutInfos ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.maxShaderCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.pushConstantRangeCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.pPushConstantRanges ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const & initializePerformanceApiInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, initializePerformanceApiInfoINTEL.pUserData ); + return seed; + } + }; + + template <> + struct hash { std::size_t operator()( VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const & inputAttachmentAspectReference ) const VULKAN_HPP_NOEXCEPT { @@ -5543,6 +7152,62 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LatencySleepInfoNV const & latencySleepInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, latencySleepInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepInfoNV.signalSemaphore ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepInfoNV.value ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV const & latencySleepModeInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, latencySleepModeInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepModeInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepModeInfoNV.lowLatencyMode ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepModeInfoNV.lowLatencyBoost ); + VULKAN_HPP_HASH_COMBINE( seed, latencySleepModeInfoNV.minimumIntervalUs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LatencySubmissionPresentIdNV const & latencySubmissionPresentIdNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, latencySubmissionPresentIdNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, latencySubmissionPresentIdNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, latencySubmissionPresentIdNV.presentID ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LatencySurfaceCapabilitiesNV const & latencySurfaceCapabilitiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, latencySurfaceCapabilitiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, latencySurfaceCapabilitiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, latencySurfaceCapabilitiesNV.presentModeCount ); + VULKAN_HPP_HASH_COMBINE( seed, latencySurfaceCapabilitiesNV.pPresentModes ); + return seed; + } + }; + template <> struct hash { @@ -5563,6 +7228,41 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LayerSettingEXT const & layerSettingEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + for ( const char * p = layerSettingEXT.pLayerName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + for ( const char * p = layerSettingEXT.pSettingName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, layerSettingEXT.type ); + VULKAN_HPP_HASH_COMBINE( seed, layerSettingEXT.valueCount ); + VULKAN_HPP_HASH_COMBINE( seed, layerSettingEXT.pValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::LayerSettingsCreateInfoEXT const & layerSettingsCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, layerSettingsCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, layerSettingsCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, layerSettingsCreateInfoEXT.settingCount ); + VULKAN_HPP_HASH_COMBINE( seed, layerSettingsCreateInfoEXT.pSettings ); + return seed; + } + }; + # if defined( VK_USE_PLATFORM_MACOS_MVK ) template <> struct hash @@ -5636,6 +7336,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryBarrierAccessFlags3KHR const & memoryBarrierAccessFlags3KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrierAccessFlags3KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrierAccessFlags3KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrierAccessFlags3KHR.srcAccessMask3 ); + VULKAN_HPP_HASH_COMBINE( seed, memoryBarrierAccessFlags3KHR.dstAccessMask3 ); + return seed; + } + }; + template <> struct hash { @@ -5707,6 +7421,22 @@ namespace std } }; +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT const & memoryGetMetalHandleInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryGetMetalHandleInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetMetalHandleInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetMetalHandleInfoEXT.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryGetMetalHandleInfoEXT.handleType ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + template <> struct hash { @@ -5778,6 +7508,50 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryMapInfo const & memoryMapInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.offset ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryMapPlacedInfoEXT const & memoryMapPlacedInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryMapPlacedInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapPlacedInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapPlacedInfoEXT.pPlacedAddress ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT const & memoryMetalHandlePropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryMetalHandlePropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMetalHandlePropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMetalHandlePropertiesEXT.memoryTypeBits ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + template <> struct hash { @@ -5843,6 +7617,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryUnmapInfo const & memoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfo.memory ); + return seed; + } + }; + # if defined( VK_USE_PLATFORM_WIN32_KHR ) template <> struct hash @@ -5889,6 +7677,65 @@ namespace std }; # endif /*VK_USE_PLATFORM_METAL_EXT*/ + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT const & micromapBuildSizesInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.micromapSize ); + VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.buildScratchSize ); + VULKAN_HPP_HASH_COMBINE( seed, micromapBuildSizesInfoEXT.discardable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & micromapCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.createFlags ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.buffer ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.offset ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.size ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.type ); + VULKAN_HPP_HASH_COMBINE( seed, micromapCreateInfoEXT.deviceAddress ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapTriangleEXT const & micromapTriangleEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.dataOffset ); + VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.subdivisionLevel ); + VULKAN_HPP_HASH_COMBINE( seed, micromapTriangleEXT.format ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT const & micromapVersionInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, micromapVersionInfoEXT.pVersionData ); + return seed; + } + }; + template <> struct hash { @@ -5956,6 +7803,22 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & multiviewPerViewRenderAreasRenderPassBeginInfoQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewRenderAreasRenderPassBeginInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewRenderAreasRenderPassBeginInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewRenderAreasRenderPassBeginInfoQCOM.perViewRenderAreaCount ); + VULKAN_HPP_HASH_COMBINE( seed, multiviewPerViewRenderAreasRenderPassBeginInfoQCOM.pPerViewRenderAreas ); + return seed; + } + }; + template <> struct hash { @@ -5983,153 +7846,326 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const & pastPresentationTimingGOOGLE ) const VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT const & opaqueCaptureDescriptorDataCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.presentID ); - VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.desiredPresentTime ); - VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.actualPresentTime ); - VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.earliestPresentTime ); - VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.presentMargin ); + VULKAN_HPP_HASH_COMBINE( seed, opaqueCaptureDescriptorDataCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opaqueCaptureDescriptorDataCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opaqueCaptureDescriptorDataCreateInfoEXT.opaqueCaptureDescriptorData ); return seed; } }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & performanceConfigurationAcquireInfoINTEL ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV const & opticalFlowExecuteInfoNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.sType ); - VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.type ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.flags ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowExecuteInfoNV.pRegions ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const & performanceCounterDescriptionKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV const & opticalFlowImageFormatInfoNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.flags ); - for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) - { - VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.name[i] ); - } - for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) - { - VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.category[i] ); - } - for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) - { - VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.description[i] ); - } + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatInfoNV.usage ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const & performanceCounterKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV const & opticalFlowImageFormatPropertiesNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.unit ); - VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.scope ); - VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.storage ); - for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) - { - VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.uuid[i] ); - } + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowImageFormatPropertiesNV.format ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const & performanceMarkerInfoINTEL ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & opticalFlowSessionCreateInfoNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.sType ); - VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.marker ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.width ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.height ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.imageFormat ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.flowVectorFormat ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.costFormat ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.outputGridSize ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.hintGridSize ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.performanceLevel ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreateInfoNV.flags ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const & performanceOverrideInfoINTEL ) const VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV const & opticalFlowSessionCreatePrivateDataInfoNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.sType ); - VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.type ); - VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.enable ); - VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.parameter ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.id ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.size ); + VULKAN_HPP_HASH_COMBINE( seed, opticalFlowSessionCreatePrivateDataInfoNV.pPrivateData ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const & performanceQuerySubmitInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV const & outOfBandQueueTypeInfoNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.counterPassIndex ); + VULKAN_HPP_HASH_COMBINE( seed, outOfBandQueueTypeInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, outOfBandQueueTypeInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, outOfBandQueueTypeInfoNV.queueType ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const & performanceStreamMarkerInfoINTEL ) const VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureFlagsNV const & partitionedAccelerationStructureFlagsNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.sType ); - VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.marker ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureFlagsNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureFlagsNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureFlagsNV.enablePartitionTranslation ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const & physicalDevice16BitStorageFeatures ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureUpdateInstanceDataNV const & + partitionedAccelerationStructureUpdateInstanceDataNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storageBuffer16BitAccess ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.uniformAndStorageBuffer16BitAccess ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storagePushConstant16 ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storageInputOutput16 ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureUpdateInstanceDataNV.instanceIndex ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureUpdateInstanceDataNV.instanceContributionToHitGroupIndex ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureUpdateInstanceDataNV.accelerationStructure ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT const & physicalDevice4444FormatsFeaturesEXT ) const VULKAN_HPP_NOEXCEPT - { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureWriteInstanceDataNV const & partitionedAccelerationStructureWriteInstanceDataNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.transform ); + for ( size_t i = 0; i < 6; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.explicitAABB[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.instanceID ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.instanceMask ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.instanceContributionToHitGroupIndex ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.instanceFlags ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.instanceIndex ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.partitionIndex ); + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWriteInstanceDataNV.accelerationStructure ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureWritePartitionTranslationDataNV const & + partitionedAccelerationStructureWritePartitionTranslationDataNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWritePartitionTranslationDataNV.partitionIndex ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, partitionedAccelerationStructureWritePartitionTranslationDataNV.partitionTranslation[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const & pastPresentationTimingGOOGLE ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.presentID ); + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.desiredPresentTime ); + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.actualPresentTime ); + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.earliestPresentTime ); + VULKAN_HPP_HASH_COMBINE( seed, pastPresentationTimingGOOGLE.presentMargin ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & performanceConfigurationAcquireInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceConfigurationAcquireInfoINTEL.type ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const & performanceCounterDescriptionKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.flags ); + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.name[i] ); + } + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.category[i] ); + } + for ( size_t i = 0; i < VK_MAX_DESCRIPTION_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterDescriptionKHR.description[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const & performanceCounterKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.unit ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.scope ); + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.storage ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, performanceCounterKHR.uuid[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const & performanceMarkerInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceMarkerInfoINTEL.marker ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const & performanceOverrideInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.type ); + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.enable ); + VULKAN_HPP_HASH_COMBINE( seed, performanceOverrideInfoINTEL.parameter ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const & performanceQuerySubmitInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceQuerySubmitInfoKHR.counterPassIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const & performanceStreamMarkerInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, performanceStreamMarkerInfoINTEL.marker ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const & physicalDevice16BitStorageFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storageBuffer16BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.uniformAndStorageBuffer16BitAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storagePushConstant16 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevice16BitStorageFeatures.storageInputOutput16 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT const & physicalDevice4444FormatsFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { std::size_t seed = 0; VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.sType ); VULKAN_HPP_HASH_COMBINE( seed, physicalDevice4444FormatsFeaturesEXT.pNext ); @@ -6206,6 +8242,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT const & physicalDeviceAddressBindingReportFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAddressBindingReportFeaturesEXT.reportAddressBinding ); + return seed; + } + }; + template <> struct hash { @@ -6220,6 +8270,33 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAntiLagFeaturesAMD const & physicalDeviceAntiLagFeaturesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAntiLagFeaturesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAntiLagFeaturesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAntiLagFeaturesAMD.antiLag ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & + physicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT.attachmentFeedbackLoopDynamicState ); + return seed; + } + }; + template <> struct hash { @@ -6315,111 +8392,354 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const & physicalDeviceCoherentMemoryFeaturesAMD ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterAccelerationStructureFeaturesNV const & + physicalDeviceClusterAccelerationStructureFeaturesNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.deviceCoherentMemory ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructureFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructureFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructureFeaturesNV.clusterAccelerationStructure ); return seed; } }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT const & physicalDeviceColorWriteEnableFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterAccelerationStructurePropertiesNV const & + physicalDeviceClusterAccelerationStructurePropertiesNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.colorWriteEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.maxVerticesPerCluster ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.maxTrianglesPerCluster ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.clusterScratchByteAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.clusterByteAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.clusterTemplateByteAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.clusterBottomLevelByteAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.clusterTemplateBoundsByteAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterAccelerationStructurePropertiesNV.maxClusterGeometryIndex ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const & physicalDeviceComputeShaderDerivativesFeaturesNV ) + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & physicalDeviceClusterCullingShaderFeaturesHUAWEI ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.computeDerivativeGroupQuads ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.computeDerivativeGroupLinear ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderFeaturesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderFeaturesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderFeaturesHUAWEI.clustercullingShader ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderFeaturesHUAWEI.multiviewClusterCullingShader ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const & physicalDeviceConditionalRenderingFeaturesEXT ) const + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & physicalDeviceClusterCullingShaderPropertiesHUAWEI ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.conditionalRendering ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.inheritedConditionalRendering ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.pNext ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.maxWorkGroupCount[i] ); + } + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.maxWorkGroupSize[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.maxOutputClusterCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderPropertiesHUAWEI.indirectBufferOffsetAlignment ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const & - physicalDeviceConservativeRasterizationPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & physicalDeviceClusterCullingShaderVrsFeaturesHUAWEI ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.primitiveOverestimationSize ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.maxExtraPrimitiveOverestimationSize ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.extraPrimitiveOverestimationSizeGranularity ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.primitiveUnderestimation ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.conservativePointAndLineRasterization ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.degenerateTrianglesRasterized ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.degenerateLinesRasterized ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.fullyCoveredFragmentShaderInputVariable ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.conservativeRasterizationPostDepthCoverage ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderVrsFeaturesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderVrsFeaturesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceClusterCullingShaderVrsFeaturesHUAWEI.clusterShadingRate ); return seed; } }; template <> - struct hash + struct hash { std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const & physicalDeviceCooperativeMatrixFeaturesNV ) const VULKAN_HPP_NOEXCEPT + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const & physicalDeviceCoherentMemoryFeaturesAMD ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.cooperativeMatrix ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.cooperativeMatrixRobustBufferAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCoherentMemoryFeaturesAMD.deviceCoherentMemory ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const & physicalDeviceCooperativeMatrixPropertiesNV ) const - VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT const & physicalDeviceColorWriteEnableFeaturesEXT ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.cooperativeMatrixSupportedStages ); - return seed; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceColorWriteEnableFeaturesEXT.colorWriteEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCommandBufferInheritanceFeaturesNV const & physicalDeviceCommandBufferInheritanceFeaturesNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCommandBufferInheritanceFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCommandBufferInheritanceFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCommandBufferInheritanceFeaturesNV.commandBufferInheritance ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & physicalDeviceComputeShaderDerivativesFeaturesKHR ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesKHR.computeDerivativeGroupQuads ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesKHR.computeDerivativeGroupLinear ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & physicalDeviceComputeShaderDerivativesPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesPropertiesKHR.meshAndTaskShaderDerivatives ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const & physicalDeviceConditionalRenderingFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.conditionalRendering ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConditionalRenderingFeaturesEXT.inheritedConditionalRendering ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const & + physicalDeviceConservativeRasterizationPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.primitiveOverestimationSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.maxExtraPrimitiveOverestimationSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.extraPrimitiveOverestimationSizeGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.primitiveUnderestimation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.conservativePointAndLineRasterization ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.degenerateTrianglesRasterized ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.degenerateLinesRasterized ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.fullyCoveredFragmentShaderInputVariable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceConservativeRasterizationPropertiesEXT.conservativeRasterizationPostDepthCoverage ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2FeaturesNV const & physicalDeviceCooperativeMatrix2FeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixWorkgroupScope ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixFlexibleDimensions ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixReductions ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixConversions ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixPerElementOperations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixTensorAddressing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixBlockLoads ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2PropertiesNV const & physicalDeviceCooperativeMatrix2PropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.cooperativeMatrixWorkgroupScopeMaxWorkgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.cooperativeMatrixFlexibleDimensionsMaxDimension ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.cooperativeMatrixWorkgroupScopeReservedSharedMemory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesKHR const & physicalDeviceCooperativeMatrixFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesKHR.cooperativeMatrix ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesKHR.cooperativeMatrixRobustBufferAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const & physicalDeviceCooperativeMatrixFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.cooperativeMatrix ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixFeaturesNV.cooperativeMatrixRobustBufferAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesKHR const & physicalDeviceCooperativeMatrixPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesKHR.cooperativeMatrixSupportedStages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const & physicalDeviceCooperativeMatrixPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrixPropertiesNV.cooperativeMatrixSupportedStages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeVectorFeaturesNV const & physicalDeviceCooperativeVectorFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorFeaturesNV.cooperativeVector ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorFeaturesNV.cooperativeVectorTraining ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeVectorPropertiesNV const & physicalDeviceCooperativeVectorPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorPropertiesNV.cooperativeVectorSupportedStages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorPropertiesNV.cooperativeVectorTrainingFloat16Accumulation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorPropertiesNV.cooperativeVectorTrainingFloat32Accumulation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeVectorPropertiesNV.maxCooperativeVectorComponents ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV const & physicalDeviceCopyMemoryIndirectFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectFeaturesNV.indirectCopy ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV const & physicalDeviceCopyMemoryIndirectPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCopyMemoryIndirectPropertiesNV.supportedQueues ); + return seed; } }; @@ -6451,6 +8771,66 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicClampFeaturesQCOM const & physicalDeviceCubicClampFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicClampFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicClampFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicClampFeaturesQCOM.cubicRangeClamp ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicWeightsFeaturesQCOM const & physicalDeviceCubicWeightsFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicWeightsFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicWeightsFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCubicWeightsFeaturesQCOM.selectableCubicWeights ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV const & physicalDeviceCudaKernelLaunchFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchFeaturesNV.cudaKernelLaunchFeatures ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV const & physicalDeviceCudaKernelLaunchPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.computeCapabilityMinor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCudaKernelLaunchPropertiesNV.computeCapabilityMajor ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template <> struct hash { @@ -6495,15 +8875,46 @@ namespace std }; template <> - struct hash + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthBiasControlFeaturesEXT const & physicalDeviceDepthBiasControlFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.depthBiasControl ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.leastRepresentableValueForceUnormRepresentation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.floatRepresentation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthBiasControlFeaturesEXT.depthBiasExact ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampControlFeaturesEXT const & physicalDeviceDepthClampControlFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampControlFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampControlFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampControlFeaturesEXT.depthClampControl ); + return seed; + } + }; + + template <> + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT const & physicalDeviceDepthClampZeroOneFeaturesEXT ) const + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesKHR const & physicalDeviceDepthClampZeroOneFeaturesKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesEXT.depthClampZeroOne ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampZeroOneFeaturesKHR.depthClampZeroOne ); return seed; } }; @@ -6553,6 +8964,83 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & + physicalDeviceDescriptorBufferDensityMapPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferDensityMapPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferDensityMapPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferDensityMapPropertiesEXT.combinedImageSamplerDensityMapDescriptorSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT const & physicalDeviceDescriptorBufferFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.descriptorBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.descriptorBufferCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.descriptorBufferImageLayoutIgnored ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferFeaturesEXT.descriptorBufferPushDescriptors ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT const & physicalDeviceDescriptorBufferPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.combinedImageSamplerDescriptorSingleArray ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.bufferlessPushDescriptors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.allowSamplerImageViewPostSubmitCreation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.descriptorBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxDescriptorBufferBindings ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxResourceDescriptorBufferBindings ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxSamplerDescriptorBufferBindings ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxEmbeddedImmutableSamplerBindings ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxEmbeddedImmutableSamplers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.bufferCaptureReplayDescriptorDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.imageCaptureReplayDescriptorDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.imageViewCaptureReplayDescriptorDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.samplerCaptureReplayDescriptorDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.accelerationStructureCaptureReplayDescriptorDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.samplerDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.combinedImageSamplerDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.sampledImageDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.storageImageDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.uniformTexelBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.robustUniformTexelBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.storageTexelBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.robustStorageTexelBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.uniformBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.robustUniformBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.storageBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.robustStorageBufferDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.inputAttachmentDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.accelerationStructureDescriptorSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxSamplerDescriptorBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.maxResourceDescriptorBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.samplerDescriptorBufferAddressSpaceSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.resourceDescriptorBufferAddressSpaceSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferPropertiesEXT.descriptorBufferAddressSpaceSize ); + return seed; + } + }; + template <> struct hash { @@ -6623,10 +9111,24 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & physicalDeviceDescriptorSetHostMappingFeaturesVALVE ) const + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & + physicalDeviceDescriptorPoolOverallocationFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorPoolOverallocationFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorPoolOverallocationFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorPoolOverallocationFeaturesNV.descriptorPoolOverallocation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & physicalDeviceDescriptorSetHostMappingFeaturesVALVE ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; @@ -6637,6 +9139,37 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & + physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.deviceGeneratedCompute ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.deviceGeneratedComputePipelines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.deviceGeneratedComputeCaptureReplay ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & physicalDeviceDeviceGeneratedCommandsFeaturesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesEXT.deviceGeneratedCommands ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesEXT.dynamicGeneratedPipelineLayout ); + return seed; + } + }; + template <> struct hash { @@ -6651,6 +9184,32 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & physicalDeviceDeviceGeneratedCommandsPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectPipelineCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectShaderObjectCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectSequenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectCommandsTokenCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectCommandsTokenOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectCommandsIndirectStride ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.supportedIndirectCommandsInputModes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.supportedIndirectCommandsShaderStages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.supportedIndirectCommandsShaderStagesPipelineBinding ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.supportedIndirectCommandsShaderStagesShaderBinding ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.deviceGeneratedCommandsTransformFeedback ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.deviceGeneratedCommandsMultiDrawIndirectCount ); + return seed; + } + }; + template <> struct hash { @@ -6715,6 +9274,38 @@ namespace std } }; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapFeaturesNV const & physicalDeviceDisplacementMicromapFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapFeaturesNV.displacementMicromap ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapPropertiesNV const & physicalDeviceDisplacementMicromapPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDisplacementMicromapPropertiesNV.maxDisplacementMicromapSubdivisionLevel ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template <> struct hash { @@ -6769,6 +9360,34 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingLocalReadFeatures const & physicalDeviceDynamicRenderingLocalReadFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingLocalReadFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingLocalReadFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingLocalReadFeatures.dynamicRenderingLocalRead ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & + physicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT.dynamicRenderingUnusedAttachments ); + return seed; + } + }; + template <> struct hash { @@ -6799,6 +9418,64 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT const & physicalDeviceExtendedDynamicState3FeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3TessellationDomainOrigin ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClampEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3PolygonMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RasterizationSamples ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3SampleMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3AlphaToCoverageEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3AlphaToOneEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LogicOpEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendEquation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorWriteMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RasterizationStream ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ConservativeRasterizationMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ExtraPrimitiveOverestimationSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClipEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3SampleLocationsEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ColorBlendAdvanced ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ProvokingVertexMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LineRasterizationMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3LineStippleEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3DepthClipNegativeOneToOne ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ViewportWScalingEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ViewportSwizzle ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageToColorEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageToColorLocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationTableEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageModulationTable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3CoverageReductionMode ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3RepresentativeFragmentTestEnable ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3FeaturesEXT.extendedDynamicState3ShadingRateImageEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT const & physicalDeviceExtendedDynamicState3PropertiesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedDynamicState3PropertiesEXT.dynamicPrimitiveTopologyUnrestricted ); + return seed; + } + }; + template <> struct hash { @@ -6813,6 +9490,37 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & physicalDeviceExtendedSparseAddressSpaceFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpaceFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpaceFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpaceFeaturesNV.extendedSparseAddressSpace ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & + physicalDeviceExtendedSparseAddressSpacePropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpacePropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpacePropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpacePropertiesNV.extendedSparseAddressSpaceSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpacePropertiesNV.extendedSparseImageUsageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExtendedSparseAddressSpacePropertiesNV.extendedSparseBufferUsageFlags ); + return seed; + } + }; + template <> struct hash { @@ -6841,6 +9549,41 @@ namespace std } }; +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolveFeaturesANDROID const & physicalDeviceExternalFormatResolveFeaturesANDROID ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolveFeaturesANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolveFeaturesANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolveFeaturesANDROID.externalFormatResolve ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolvePropertiesANDROID const & + physicalDeviceExternalFormatResolvePropertiesANDROID ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolvePropertiesANDROID.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolvePropertiesANDROID.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolvePropertiesANDROID.nullColorAttachmentWithExternalFormatResolve ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolvePropertiesANDROID.externalFormatResolveChromaOffsetX ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalFormatResolvePropertiesANDROID.externalFormatResolveChromaOffsetY ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + template <> struct hash { @@ -6883,6 +9626,23 @@ namespace std } }; +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & physicalDeviceExternalMemoryScreenBufferFeaturesQNX ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryScreenBufferFeaturesQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryScreenBufferFeaturesQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalMemoryScreenBufferFeaturesQNX.screenBufferImport ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + template <> struct hash { @@ -6896,6 +9656,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT const & physicalDeviceFaultFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.deviceFault ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFaultFeaturesEXT.deviceFaultVendorBinary ); + return seed; + } + }; + template <> struct hash { @@ -7168,15 +9942,29 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & physicalDeviceGlobalPriorityQueryFeaturesKHR ) const - VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceFrameBoundaryFeaturesEXT const & physicalDeviceFrameBoundaryFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFrameBoundaryFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFrameBoundaryFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceFrameBoundaryFeaturesEXT.frameBoundary ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeatures const & physicalDeviceGlobalPriorityQueryFeatures ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeaturesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeaturesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeaturesKHR.globalPriorityQuery ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeatures.globalPriorityQuery ); return seed; } }; @@ -7229,6 +10017,54 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHdrVividFeaturesHUAWEI const & physicalDeviceHdrVividFeaturesHUAWEI ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHdrVividFeaturesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHdrVividFeaturesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHdrVividFeaturesHUAWEI.hdrVivid ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeatures const & physicalDeviceHostImageCopyFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeatures.hostImageCopy ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyProperties const & physicalDeviceHostImageCopyProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.copySrcLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.pCopySrcLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.copyDstLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.pCopyDstLayouts ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.optimalTilingLayoutUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.identicalMemoryTypeRequirements ); + return seed; + } + }; + template <> struct hash { @@ -7283,6 +10119,34 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlFeaturesMESA const & physicalDeviceImageAlignmentControlFeaturesMESA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlFeaturesMESA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlFeaturesMESA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlFeaturesMESA.imageAlignmentControl ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlPropertiesMESA const & physicalDeviceImageAlignmentControlPropertiesMESA ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlPropertiesMESA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlPropertiesMESA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlPropertiesMESA.supportedImageAlignmentMask ); + return seed; + } + }; + template <> struct hash { @@ -7345,6 +10209,34 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2FeaturesQCOM const & physicalDeviceImageProcessing2FeaturesQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2FeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2FeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2FeaturesQCOM.textureBlockMatch2 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2PropertiesQCOM const & physicalDeviceImageProcessing2PropertiesQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2PropertiesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2PropertiesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageProcessing2PropertiesQCOM.maxBlockMatchWindow ); + return seed; + } + }; + template <> struct hash { @@ -7392,6 +10284,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & physicalDeviceImageSlicedViewOf3DFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageSlicedViewOf3DFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageSlicedViewOf3DFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageSlicedViewOf3DFeaturesEXT.imageSlicedViewOf3D ); + return seed; + } + }; + template <> struct hash { @@ -7435,15 +10341,14 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const & physicalDeviceIndexTypeUint8FeaturesEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8Features const & physicalDeviceIndexTypeUint8Features ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8FeaturesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8FeaturesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8FeaturesEXT.indexTypeUint8 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8Features.indexTypeUint8 ); return seed; } }; @@ -7510,15 +10415,36 @@ namespace std }; template <> - struct hash + struct hash { std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT const & physicalDeviceLegacyDitheringFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR const & physicalDeviceLayeredApiPropertiesKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.legacyDithering ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.vendorID ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.deviceID ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.layeredAPI ); + for ( size_t i = 0; i < VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.deviceName[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesListKHR const & physicalDeviceLayeredApiPropertiesListKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesListKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesListKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesListKHR.layeredApiCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesListKHR.pLayeredApis ); return seed; } }; @@ -7658,34 +10584,157 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const & physicalDeviceLineRasterizationFeaturesEXT ) const - VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const & physicalDeviceSparseProperties ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.rectangularLines ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.bresenhamLines ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.smoothLines ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.stippledRectangularLines ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.stippledBresenhamLines ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesEXT.stippledSmoothLines ); - return seed; - } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard2DBlockShape ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard2DMultisampleBlockShape ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard3DBlockShape ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyAlignedMipSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyNonResidentStrict ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const & physicalDeviceProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.apiVersion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.driverVersion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.vendorID ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceID ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceType ); + for ( size_t i = 0; i < VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceName[i] ); + } + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.pipelineCacheUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.limits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.sparseProperties ); + return seed; + } }; template <> - struct hash + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const & physicalDeviceProperties2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.properties ); + return seed; + } + }; + + template <> + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const & physicalDeviceLineRasterizationPropertiesEXT ) const + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiVulkanPropertiesKHR const & physicalDeviceLayeredApiVulkanPropertiesKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationPropertiesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationPropertiesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationPropertiesEXT.lineSubPixelPrecisionBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiVulkanPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiVulkanPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiVulkanPropertiesKHR.properties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredDriverPropertiesMSFT const & physicalDeviceLayeredDriverPropertiesMSFT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredDriverPropertiesMSFT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredDriverPropertiesMSFT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredDriverPropertiesMSFT.underlyingAPI ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT const & physicalDeviceLegacyDitheringFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.legacyDithering ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & physicalDeviceLegacyVertexAttributesFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesFeaturesEXT.legacyVertexAttributes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & physicalDeviceLegacyVertexAttributesPropertiesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesPropertiesEXT.nativeUnalignedPerformance ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeatures const & physicalDeviceLineRasterizationFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.rectangularLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.bresenhamLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.smoothLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.stippledRectangularLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.stippledBresenhamLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.stippledSmoothLines ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationProperties const & physicalDeviceLineRasterizationProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationProperties.lineSubPixelPrecisionBits ); return seed; } }; @@ -7744,6 +10793,144 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Features const & physicalDeviceMaintenance5Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Features.maintenance5 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Properties const & physicalDeviceMaintenance5Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.earlyFragmentMultisampleCoverageAfterSampleCounting ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.earlyFragmentSampleMaskTestBeforeSampleCounting ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.depthStencilSwizzleOneSupport ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.polygonModePointSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.nonStrictSinglePixelWideLinesUseParallelogram ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.nonStrictWideLinesUseParallelogram ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Features const & physicalDeviceMaintenance6Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Features.maintenance6 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Properties const & physicalDeviceMaintenance6Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Properties.blockTexelViewCompatibleMultipleLayers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Properties.maxCombinedImageSamplerDescriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Properties.fragmentShadingRateClampCombinerInputs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7FeaturesKHR const & physicalDeviceMaintenance7FeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7FeaturesKHR.maintenance7 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7PropertiesKHR const & physicalDeviceMaintenance7PropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.robustFragmentShadingRateAttachmentAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.separateDepthStencilAttachmentAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetTotalUniformBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetTotalStorageBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetTotalBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetUpdateAfterBindTotalBuffersDynamic ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance8FeaturesKHR const & physicalDeviceMaintenance8FeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance8FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance8FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance8FeaturesKHR.maintenance8 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT const & physicalDeviceMapMemoryPlacedFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.memoryMapPlaced ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.memoryMapRangePlaced ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedFeaturesEXT.memoryUnmapReserve ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedPropertiesEXT const & physicalDeviceMapMemoryPlacedPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMapMemoryPlacedPropertiesEXT.minPlacedMemoryMapAlignment ); + return seed; + } + }; + template <> struct hash { @@ -7765,6 +10952,35 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV const & physicalDeviceMemoryDecompressionFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionFeaturesNV.memoryDecompression ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV const & physicalDeviceMemoryDecompressionPropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.decompressionMethods ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMemoryDecompressionPropertiesNV.maxDecompressionIndirectCount ); + return seed; + } + }; + template <> struct hash { @@ -7996,6 +11212,35 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & + physicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM.multiviewPerViewRenderAreas ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & physicalDeviceMultiviewPerViewViewportsFeaturesQCOM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewViewportsFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewViewportsFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMultiviewPerViewViewportsFeaturesQCOM.multiviewPerViewViewports ); + return seed; + } + }; + template <> struct hash { @@ -8025,84 +11270,257 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & physicalDeviceNonSeamlessCubeMapFeaturesEXT ) const + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferFeaturesEXT const & physicalDeviceNestedCommandBufferFeaturesEXT ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.nonSeamlessCubeMap ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferFeaturesEXT.nestedCommandBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferFeaturesEXT.nestedCommandBufferRendering ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferFeaturesEXT.nestedCommandBufferSimultaneousUse ); return seed; } }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const & physicalDevicePCIBusInfoPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferPropertiesEXT const & physicalDeviceNestedCommandBufferPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciDomain ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciBus ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciDevice ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciFunction ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNestedCommandBufferPropertiesEXT.maxCommandBufferNestingLevel ); return seed; } }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & physicalDevicePageableDeviceLocalMemoryFeaturesEXT ) const + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & physicalDeviceNonSeamlessCubeMapFeaturesEXT ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.pageableDeviceLocalMemory ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceNonSeamlessCubeMapFeaturesEXT.nonSeamlessCubeMap ); return seed; } }; template <> - struct hash + struct hash { std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const & physicalDevicePerformanceQueryFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT const & physicalDeviceOpacityMicromapFeaturesEXT ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.performanceCounterQueryPools ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.performanceCounterMultipleQueryPools ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromap ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromapCaptureReplay ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapFeaturesEXT.micromapHostCommands ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const & physicalDevicePerformanceQueryPropertiesKHR ) const + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT const & physicalDeviceOpacityMicromapPropertiesEXT ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.allowCommandBufferQueryCopies ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.maxOpacity2StateSubdivisionLevel ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpacityMicromapPropertiesEXT.maxOpacity4StateSubdivisionLevel ); return seed; } }; template <> - struct hash + struct hash { - std::size_t + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV const & physicalDeviceOpticalFlowFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowFeaturesNV.opticalFlow ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV const & physicalDeviceOpticalFlowPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.supportedOutputGridSizes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.supportedHintGridSizes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.hintSupported ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.costSupported ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.bidirectionalFlowSupported ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.globalFlowSupported ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.minWidth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.minHeight ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxWidth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxHeight ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceOpticalFlowPropertiesNV.maxNumRegionsOfInterest ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const & physicalDevicePCIBusInfoPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciDomain ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciBus ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciDevice ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePCIBusInfoPropertiesEXT.pciFunction ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & physicalDevicePageableDeviceLocalMemoryFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePageableDeviceLocalMemoryFeaturesEXT.pageableDeviceLocalMemory ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & + physicalDevicePartitionedAccelerationStructureFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePartitionedAccelerationStructureFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePartitionedAccelerationStructureFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePartitionedAccelerationStructureFeaturesNV.partitionedAccelerationStructure ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & + physicalDevicePartitionedAccelerationStructurePropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePartitionedAccelerationStructurePropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePartitionedAccelerationStructurePropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePartitionedAccelerationStructurePropertiesNV.maxPartitionCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePerStageDescriptorSetFeaturesNV const & physicalDevicePerStageDescriptorSetFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerStageDescriptorSetFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerStageDescriptorSetFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerStageDescriptorSetFeaturesNV.perStageDescriptorSet ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerStageDescriptorSetFeaturesNV.dynamicPipelineLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const & physicalDevicePerformanceQueryFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.performanceCounterQueryPools ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryFeaturesKHR.performanceCounterMultipleQueryPools ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const & physicalDevicePerformanceQueryPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePerformanceQueryPropertiesKHR.allowCommandBufferQueryCopies ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryFeaturesKHR const & physicalDevicePipelineBinaryFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryFeaturesKHR.pipelineBinaries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryPropertiesKHR const & physicalDevicePipelineBinaryPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pipelineBinaryInternalCache ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pipelineBinaryInternalCacheControl ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pipelineBinaryPrefersInternalCache ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pipelineBinaryPrecompiledInternalCache ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pipelineBinaryCompressedData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures const & physicalDevicePipelineCreationCacheControlFeatures ) const VULKAN_HPP_NOEXCEPT { @@ -8128,6 +11546,34 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & + physicalDevicePipelineLibraryGroupHandlesFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineLibraryGroupHandlesFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineLibraryGroupHandlesFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineLibraryGroupHandlesFeaturesEXT.pipelineLibraryGroupHandles ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineOpacityMicromapFeaturesARM const & physicalDevicePipelineOpacityMicromapFeaturesARM ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineOpacityMicromapFeaturesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineOpacityMicromapFeaturesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineOpacityMicromapFeaturesARM.pipelineOpacityMicromap ); + return seed; + } + }; + template <> struct hash { @@ -8143,32 +11589,46 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT const & physicalDevicePipelineRobustnessFeaturesEXT ) const + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeatures const & physicalDevicePipelineProtectedAccessFeatures ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeaturesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeaturesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeaturesEXT.pipelineRobustness ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeatures.pipelineProtectedAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeatures const & physicalDevicePipelineRobustnessFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeatures.pipelineRobustness ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT const & physicalDevicePipelineRobustnessPropertiesEXT ) const + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessProperties const & physicalDevicePipelineRobustnessProperties ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessStorageBuffers ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessUniformBuffers ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessVertexInputs ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.defaultRobustnessStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.defaultRobustnessUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.defaultRobustnessVertexInputs ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.defaultRobustnessImages ); return seed; } }; @@ -8233,6 +11693,20 @@ namespace std }; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV const & physicalDevicePresentBarrierFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentBarrierFeaturesNV.presentBarrier ); + return seed; + } + }; + template <> struct hash { @@ -8246,6 +11720,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & physicalDevicePresentModeFifoLatestReadyFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentModeFifoLatestReadyFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentModeFifoLatestReadyFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentModeFifoLatestReadyFeaturesEXT.presentModeFifoLatestReady ); + return seed; + } + }; + template <> struct hash { @@ -8303,59 +11792,6 @@ namespace std } }; - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const & physicalDeviceSparseProperties ) const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard2DBlockShape ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard2DMultisampleBlockShape ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard3DBlockShape ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyAlignedMipSize ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyNonResidentStrict ); - return seed; - } - }; - - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const & physicalDeviceProperties ) const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.apiVersion ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.driverVersion ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.vendorID ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceID ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceType ); - for ( size_t i = 0; i < VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; ++i ) - { - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceName[i] ); - } - for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) - { - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.pipelineCacheUUID[i] ); - } - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.limits ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.sparseProperties ); - return seed; - } - }; - - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const & physicalDeviceProperties2 ) const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.properties ); - return seed; - } - }; - template <> struct hash { @@ -8415,15 +11851,15 @@ namespace std }; template <> - struct hash + struct hash { std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const & physicalDevicePushDescriptorPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorProperties const & physicalDevicePushDescriptorProperties ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorPropertiesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorPropertiesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorPropertiesKHR.maxPushDescriptors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorProperties.maxPushDescriptors ); return seed; } }; @@ -8458,6 +11894,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRawAccessChainsFeaturesNV const & physicalDeviceRawAccessChainsFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRawAccessChainsFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRawAccessChainsFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRawAccessChainsFeaturesNV.shaderRawAccessChains ); + return seed; + } + }; + template <> struct hash { @@ -8471,6 +11921,50 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & physicalDeviceRayTracingInvocationReorderFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderFeaturesNV.rayTracingInvocationReorder ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & + physicalDeviceRayTracingInvocationReorderPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingInvocationReorderPropertiesNV.rayTracingInvocationReorderReorderingHint ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & + physicalDeviceRayTracingLinearSweptSpheresFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingLinearSweptSpheresFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingLinearSweptSpheresFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingLinearSweptSpheresFeaturesNV.spheres ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingLinearSweptSpheresFeaturesNV.linearSweptSpheres ); + return seed; + } + }; + template <> struct hash { @@ -8540,6 +12034,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & physicalDeviceRayTracingPositionFetchFeaturesKHR ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPositionFetchFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPositionFetchFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingPositionFetchFeaturesKHR.rayTracingPositionFetch ); + return seed; + } + }; + template <> struct hash { @@ -8560,6 +12068,63 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingValidationFeaturesNV const & physicalDeviceRayTracingValidationFeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingValidationFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingValidationFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRayTracingValidationFeaturesNV.rayTracingValidation ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & physicalDeviceRelaxedLineRasterizationFeaturesIMG ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRelaxedLineRasterizationFeaturesIMG.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRelaxedLineRasterizationFeaturesIMG.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRelaxedLineRasterizationFeaturesIMG.relaxedLineRasterization ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedFeaturesARM const & physicalDeviceRenderPassStripedFeaturesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedFeaturesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedFeaturesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedFeaturesARM.renderPassStriped ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedPropertiesARM const & physicalDeviceRenderPassStripedPropertiesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedPropertiesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedPropertiesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedPropertiesARM.renderPassStripeGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceRenderPassStripedPropertiesARM.maxRenderPassStripes ); + return seed; + } + }; + template <> struct hash { @@ -8669,6 +12234,34 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM const & physicalDeviceSchedulingControlsFeaturesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsFeaturesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsFeaturesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsFeaturesARM.schedulingControls ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM const & physicalDeviceSchedulingControlsPropertiesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsPropertiesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsPropertiesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSchedulingControlsPropertiesARM.schedulingControlsFlags ); + return seed; + } + }; + template <> struct hash { @@ -8683,6 +12276,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & physicalDeviceShaderAtomicFloat16VectorFeaturesNV ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat16VectorFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat16VectorFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderAtomicFloat16VectorFeaturesNV.shaderFloat16VectorAtomics ); + return seed; + } + }; + template <> struct hash { @@ -8762,6 +12369,36 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & physicalDeviceShaderCoreBuiltinsFeaturesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsFeaturesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsFeaturesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsFeaturesARM.shaderCoreBuiltins ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & physicalDeviceShaderCoreBuiltinsPropertiesARM ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.shaderCoreMask ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.shaderCoreCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCoreBuiltinsPropertiesARM.shaderWarpsPerCore ); + return seed; + } + }; + template <> struct hash { @@ -8804,6 +12441,22 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM const & physicalDeviceShaderCorePropertiesARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesARM.pixelRate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesARM.texelRate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderCorePropertiesARM.fmaRate ); + return seed; + } + }; + template <> struct hash { @@ -8846,6 +12499,62 @@ namespace std } }; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX const & physicalDeviceShaderEnqueueFeaturesAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.shaderEnqueue ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.shaderMeshEnqueue ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX const & physicalDeviceShaderEnqueuePropertiesAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphDepth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderOutputNodes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderPayloadSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderPayloadCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.executionGraphDispatchAddressAlignment ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphWorkgroupCount[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphWorkgroups ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderExpectAssumeFeatures const & physicalDeviceShaderExpectAssumeFeatures ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderExpectAssumeFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderExpectAssumeFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderExpectAssumeFeatures.shaderExpectAssume ); + return seed; + } + }; + template <> struct hash { @@ -8861,6 +12570,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloatControls2Features const & physicalDeviceShaderFloatControls2Features ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloatControls2Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloatControls2Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloatControls2Features.shaderFloatControls2 ); + return seed; + } + }; + template <> struct hash { @@ -8963,6 +12686,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & physicalDeviceShaderMaximalReconvergenceFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderMaximalReconvergenceFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderMaximalReconvergenceFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderMaximalReconvergenceFeaturesKHR.shaderMaximalReconvergence ); + return seed; + } + }; + template <> struct hash { @@ -8994,6 +12732,81 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT const & physicalDeviceShaderObjectFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectFeaturesEXT.shaderObject ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT const & physicalDeviceShaderObjectPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.pNext ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.shaderBinaryUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderObjectPropertiesEXT.shaderBinaryVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderQuadControlFeaturesKHR const & physicalDeviceShaderQuadControlFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderQuadControlFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderQuadControlFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderQuadControlFeaturesKHR.shaderQuadControl ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & + physicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR.shaderRelaxedExtendedInstruction ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & physicalDeviceShaderReplicatedCompositesFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderReplicatedCompositesFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderReplicatedCompositesFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderReplicatedCompositesFeaturesEXT.shaderReplicatedComposites ); + return seed; + } + }; + template <> struct hash { @@ -9037,6 +12850,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupRotateFeatures const & physicalDeviceShaderSubgroupRotateFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupRotateFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupRotateFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupRotateFeatures.shaderSubgroupRotate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupRotateFeatures.shaderSubgroupRotateClustered ); + return seed; + } + }; + template <> struct hash { @@ -9065,6 +12893,38 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT const & physicalDeviceShaderTileImageFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageColorReadAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageDepthReadAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImageFeaturesEXT.shaderTileImageStencilReadAccess ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT const & physicalDeviceShaderTileImagePropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageCoherentReadAccelerated ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageReadSampleFromPixelRateInvocation ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderTileImagePropertiesEXT.shaderTileImageReadFromHelperInvocation ); + return seed; + } + }; + template <> struct hash { @@ -9216,6 +13076,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & physicalDeviceSwapchainMaintenance1FeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSwapchainMaintenance1FeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSwapchainMaintenance1FeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSwapchainMaintenance1FeaturesEXT.swapchainMaintenance1 ); + return seed; + } + }; + template <> struct hash { @@ -9414,16 +13288,31 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( - VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & physicalDeviceVertexAttributeDivisorFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeatures const & physicalDeviceVertexAttributeDivisorFeatures ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeatures.vertexAttributeInstanceRateDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeatures.vertexAttributeInstanceRateZeroDivisor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorProperties const & physicalDeviceVertexAttributeDivisorProperties ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesEXT.vertexAttributeInstanceRateDivisor ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesEXT.vertexAttributeInstanceRateZeroDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorProperties.maxVertexAttribDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorProperties.supportsNonZeroFirstInstance ); return seed; } }; @@ -9443,20 +13332,94 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & physicalDeviceVertexInputDynamicStateFeaturesEXT ) - const VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & physicalDeviceVertexAttributeRobustnessFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.vertexInputDynamicState ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeRobustnessFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeRobustnessFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeRobustnessFeaturesEXT.vertexAttributeRobustness ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & physicalDeviceVertexInputDynamicStateFeaturesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexInputDynamicStateFeaturesEXT.vertexInputDynamicState ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeAV1FeaturesKHR const & physicalDeviceVideoEncodeAV1FeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeAV1FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeAV1FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeAV1FeaturesKHR.videoEncodeAV1 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR const & videoProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.videoCodecOperation ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.chromaSubsampling ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.lumaBitDepth ); + VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.chromaBitDepth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & physicalDeviceVideoEncodeQualityLevelInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQualityLevelInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQualityLevelInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQualityLevelInfoKHR.pVideoProfile ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQualityLevelInfoKHR.qualityLevel ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & physicalDeviceVideoEncodeQuantizationMapFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQuantizationMapFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQuantizationMapFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQuantizationMapFeaturesKHR.videoEncodeQuantizationMap ); return seed; } }; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> struct hash { @@ -9469,7 +13432,34 @@ namespace std return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance1FeaturesKHR const & physicalDeviceVideoMaintenance1FeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoMaintenance1FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoMaintenance1FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoMaintenance1FeaturesKHR.videoMaintenance1 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance2FeaturesKHR const & physicalDeviceVideoMaintenance2FeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoMaintenance2FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoMaintenance2FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoMaintenance2FeaturesKHR.videoMaintenance2 ); + return seed; + } + }; template <> struct hash @@ -9744,6 +13734,79 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Features const & physicalDeviceVulkan14Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.globalPriorityQuery ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.shaderSubgroupRotate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.shaderSubgroupRotateClustered ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.shaderFloatControls2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.shaderExpectAssume ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.rectangularLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.bresenhamLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.smoothLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.stippledRectangularLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.stippledBresenhamLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.stippledSmoothLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.vertexAttributeInstanceRateDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.vertexAttributeInstanceRateZeroDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.indexTypeUint8 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.dynamicRenderingLocalRead ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.maintenance5 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.maintenance6 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.pipelineProtectedAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.pipelineRobustness ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.hostImageCopy ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.pushDescriptor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Properties const & physicalDeviceVulkan14Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.lineSubPixelPrecisionBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.maxVertexAttribDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.supportsNonZeroFirstInstance ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.maxPushDescriptors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.dynamicRenderingLocalReadDepthStencilAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.dynamicRenderingLocalReadMultisampledAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.earlyFragmentMultisampleCoverageAfterSampleCounting ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.earlyFragmentSampleMaskTestBeforeSampleCounting ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.depthStencilSwizzleOneSupport ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.polygonModePointSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.nonStrictSinglePixelWideLinesUseParallelogram ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.nonStrictWideLinesUseParallelogram ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.blockTexelViewCompatibleMultipleLayers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.maxCombinedImageSamplerDescriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.fragmentShadingRateClampCombinerInputs ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.defaultRobustnessStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.defaultRobustnessUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.defaultRobustnessVertexInputs ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.defaultRobustnessImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.copySrcLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.pCopySrcLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.copyDstLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.pCopyDstLayouts ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.optimalTilingLayoutUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.identicalMemoryTypeRequirements ); + return seed; + } + }; + template <> struct hash { @@ -9791,6 +13854,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrDegammaFeaturesQCOM const & physicalDeviceYcbcrDegammaFeaturesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrDegammaFeaturesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrDegammaFeaturesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceYcbcrDegammaFeaturesQCOM.ycbcrDegamma ); + return seed; + } + }; + template <> struct hash { @@ -9820,6 +13897,116 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR const & pipelineBinaryKeyKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeyKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeyKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeyKHR.keySize ); + for ( size_t i = 0; i < VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeyKHR.key[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR const & pipelineBinaryDataKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryDataKHR.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryDataKHR.pData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR const & pipelineBinaryKeysAndDataKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeysAndDataKHR.binaryCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeysAndDataKHR.pPipelineBinaryKeys ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeysAndDataKHR.pPipelineBinaryData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR const & pipelineCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateInfoKHR.pNext ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & pipelineBinaryCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryCreateInfoKHR.pKeysAndDataInfo ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryCreateInfoKHR.pipeline ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryCreateInfoKHR.pPipelineCreateInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR const & pipelineBinaryDataInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryDataInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryDataInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryDataInfoKHR.pipelineBinary ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR const & pipelineBinaryHandlesInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryHandlesInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryHandlesInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryHandlesInfoKHR.pipelineBinaryCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryHandlesInfoKHR.pPipelineBinaries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryInfoKHR const & pipelineBinaryInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryInfoKHR.binaryCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryInfoKHR.pPipelineBinaries ); + return seed; + } + }; + template <> struct hash { @@ -9945,6 +14132,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfo const & pipelineCreateFlags2CreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfo.flags ); + return seed; + } + }; + template <> struct hash { @@ -10087,27 +14287,28 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineInfoKHR const & pipelineInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV const & pipelineIndirectDeviceAddressInfoNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.pipeline ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.pipeline ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PushConstantRange const & pushConstantRange ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineInfoKHR const & pipelineInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.stageFlags ); - VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.offset ); - VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.size ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineInfoKHR.pipeline ); return seed; } }; @@ -10129,20 +14330,6 @@ namespace std } }; - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & pipelineLibraryCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.libraryCount ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pLibraries ); - return seed; - } - }; - template <> struct hash { @@ -10192,18 +14379,18 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const & pipelineRasterizationLineStateCreateInfoEXT ) const - VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfo const & pipelineRasterizationLineStateCreateInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.lineRasterizationMode ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.stippledLineEnable ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.lineStippleFactor ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoEXT.lineStipplePattern ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.lineRasterizationMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.stippledLineEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.lineStippleFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.lineStipplePattern ); return seed; } }; @@ -10284,17 +14471,17 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT const & pipelineRobustnessCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfo const & pipelineRobustnessCreateInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.storageBuffers ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.uniformBuffers ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.vertexInputs ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.images ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.storageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.uniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.vertexInputs ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.images ); return seed; } }; @@ -10329,6 +14516,26 @@ namespace std } }; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX const & pipelineShaderStageNodeCreateInfoAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.pNext ); + for ( const char * p = pipelineShaderStageNodeCreateInfoAMDX.pName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.index ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template <> struct hash { @@ -10358,29 +14565,28 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT const & vertexInputBindingDivisorDescriptionEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription const & vertexInputBindingDivisorDescription ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDivisorDescriptionEXT.binding ); - VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDivisorDescriptionEXT.divisor ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDivisorDescription.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDivisorDescription.divisor ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const & pipelineVertexInputDivisorStateCreateInfoEXT ) const - VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfo const & pipelineVertexInputDivisorStateCreateInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoEXT.vertexBindingDivisorCount ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoEXT.pVertexBindingDivisors ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfo.vertexBindingDivisorCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfo.pVertexBindingDivisors ); return seed; } }; @@ -10402,6 +14608,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClampControlCreateInfoEXT const & pipelineViewportDepthClampControlCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClampControlCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClampControlCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClampControlCreateInfoEXT.depthClampMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClampControlCreateInfoEXT.pDepthClampRange ); + return seed; + } + }; + template <> struct hash { @@ -10656,50 +14877,147 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & queryPoolCreateInfo ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushConstantsInfo const & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.sType ); - VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.flags ); - VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.queryType ); - VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.queryCount ); - VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.pipelineStatistics ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.offset ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.size ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.pValues ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const & queryPoolPerformanceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSet const & writeDescriptorSet ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.queueFamilyIndex ); - VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.counterIndexCount ); - VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.pCounterIndices ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstSet ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstBinding ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstArrayElement ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.descriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.descriptorType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pImageInfo ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pBufferInfo ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pTexelBufferView ); return seed; } }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL const & queryPoolPerformanceQueryCreateInfoINTEL ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo const & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceQueryCreateInfoINTEL.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.set ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.descriptorWriteCount ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.pDescriptorWrites ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo const & pushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.descriptorUpdateTemplate ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.set ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.pData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV const & queryLowLatencySupportNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryLowLatencySupportNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queryLowLatencySupportNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queryLowLatencySupportNV.pQueriedLowLatencyData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & queryPoolCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.queryType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.queryCount ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolCreateInfo.pipelineStatistics ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const & queryPoolPerformanceCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.queueFamilyIndex ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.counterIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceCreateInfoKHR.pCounterIndices ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL const & queryPoolPerformanceQueryCreateInfoINTEL ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceQueryCreateInfoINTEL.sType ); VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceQueryCreateInfoINTEL.pNext ); VULKAN_HPP_HASH_COMBINE( seed, queryPoolPerformanceQueryCreateInfoINTEL.performanceCountersSampling ); return seed; } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::QueryPoolVideoEncodeFeedbackCreateInfoKHR const & queryPoolVideoEncodeFeedbackCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, queryPoolVideoEncodeFeedbackCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolVideoEncodeFeedbackCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queryPoolVideoEncodeFeedbackCreateInfoKHR.encodeFeedbackFlags ); + return seed; + } + }; + template <> struct hash { @@ -10727,18 +15045,17 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR const & queueFamilyGlobalPriorityPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityProperties const & queueFamilyGlobalPriorityProperties ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.priorityCount ); - for ( size_t i = 0; i < VK_MAX_GLOBAL_PRIORITY_SIZE_KHR; ++i ) + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityProperties.priorityCount ); + for ( size_t i = 0; i < VK_MAX_GLOBAL_PRIORITY_SIZE; ++i ) { - VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.priorities[i] ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityProperties.priorities[i] ); } return seed; } @@ -10771,7 +15088,6 @@ namespace std } }; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> struct hash { @@ -10785,9 +15101,7 @@ namespace std return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> struct hash { @@ -10800,7 +15114,20 @@ namespace std return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & + rayTracingPipelineClusterAccelerationStructureCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineClusterAccelerationStructureCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineClusterAccelerationStructureCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, rayTracingPipelineClusterAccelerationStructureCreateInfoNV.allowClusterAccelerationStructure ); + return seed; + } + }; template <> struct hash @@ -10908,6 +15235,34 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR const & releaseCapturedPipelineDataInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, releaseCapturedPipelineDataInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, releaseCapturedPipelineDataInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, releaseCapturedPipelineDataInfoKHR.pipeline ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT const & releaseSwapchainImagesInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.swapchain ); + VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.imageIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, releaseSwapchainImagesInfoEXT.pImageIndices ); + return seed; + } + }; + template <> struct hash { @@ -11173,6 +15528,63 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM const & renderPassStripeInfoARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeInfoARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeInfoARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeInfoARM.stripeArea ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassStripeBeginInfoARM const & renderPassStripeBeginInfoARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeBeginInfoARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeBeginInfoARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeBeginInfoARM.stripeInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeBeginInfoARM.pStripeInfos ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo const & semaphoreSubmitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.semaphore ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.value ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.stageMask ); + VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.deviceIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderPassStripeSubmitInfoARM const & renderPassStripeSubmitInfoARM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeSubmitInfoARM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeSubmitInfoARM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeSubmitInfoARM.stripeSemaphoreInfoCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderPassStripeSubmitInfoARM.pStripeSemaphoreInfos ); + return seed; + } + }; + template <> struct hash { @@ -11216,6 +15628,37 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingAreaInfo const & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.viewMask ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.pColorAttachmentFormats ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.depthAttachmentFormat ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.stencilAttachmentFormat ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo const & renderingAttachmentLocationInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingAttachmentLocationInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAttachmentLocationInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAttachmentLocationInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAttachmentLocationInfo.pColorAttachmentLocations ); + return seed; + } + }; + template <> struct hash { @@ -11267,6 +15710,22 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo const & renderingInputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.pColorAttachmentInputIndices ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.pDepthInputAttachmentIndex ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.pStencilInputAttachmentIndex ); + return seed; + } + }; + template <> struct hash { @@ -11285,6 +15744,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SamplerBlockMatchWindowCreateInfoQCOM const & samplerBlockMatchWindowCreateInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerBlockMatchWindowCreateInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBlockMatchWindowCreateInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBlockMatchWindowCreateInfoQCOM.windowExtent ); + VULKAN_HPP_HASH_COMBINE( seed, samplerBlockMatchWindowCreateInfoQCOM.windowCompareMode ); + return seed; + } + }; + template <> struct hash { @@ -11300,6 +15774,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT const & samplerCaptureDescriptorDataInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerCaptureDescriptorDataInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCaptureDescriptorDataInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCaptureDescriptorDataInfoEXT.sampler ); + return seed; + } + }; + template <> struct hash { @@ -11328,6 +15815,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerCubicWeightsCreateInfoQCOM const & samplerCubicWeightsCreateInfoQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerCubicWeightsCreateInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCubicWeightsCreateInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerCubicWeightsCreateInfoQCOM.cubicWeights ); + return seed; + } + }; + template <> struct hash { @@ -11388,6 +15888,60 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & samplerYcbcrConversionYcbcrDegammaCreateInfoQCOM ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionYcbcrDegammaCreateInfoQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionYcbcrDegammaCreateInfoQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionYcbcrDegammaCreateInfoQCOM.enableYDegamma ); + VULKAN_HPP_HASH_COMBINE( seed, samplerYcbcrConversionYcbcrDegammaCreateInfoQCOM.enableCbCrDegamma ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ScreenBufferFormatPropertiesQNX const & screenBufferFormatPropertiesQNX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.format ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.externalFormat ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.screenUsage ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.formatFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.samplerYcbcrConversionComponents ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.suggestedYcbcrModel ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.suggestedYcbcrRange ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.suggestedXChromaOffset ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferFormatPropertiesQNX.suggestedYChromaOffset ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX const & screenBufferPropertiesQNX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, screenBufferPropertiesQNX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferPropertiesQNX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferPropertiesQNX.allocationSize ); + VULKAN_HPP_HASH_COMBINE( seed, screenBufferPropertiesQNX.memoryTypeBits ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + # if defined( VK_USE_PLATFORM_SCREEN_QNX ) template <> struct hash @@ -11478,22 +16032,6 @@ namespace std } }; - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo const & semaphoreSubmitInfo ) const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.sType ); - VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.semaphore ); - VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.value ); - VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.stageMask ); - VULKAN_HPP_HASH_COMBINE( seed, semaphoreSubmitInfo.deviceIndex ); - return seed; - } - }; - template <> struct hash { @@ -11525,53 +16063,112 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV const & setStateFlagsIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT const & setDescriptorBufferOffsetsInfoEXT ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, setStateFlagsIndirectCommandNV.data ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.layout ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.firstSet ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.setCount ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.pBufferIndices ); + VULKAN_HPP_HASH_COMBINE( seed, setDescriptorBufferOffsetsInfoEXT.pOffsets ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & shaderModuleCreateInfo ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV const & setLatencyMarkerInfoNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.sType ); - VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.flags ); - VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.codeSize ); - VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.pCode ); + VULKAN_HPP_HASH_COMBINE( seed, setLatencyMarkerInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, setLatencyMarkerInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, setLatencyMarkerInfoNV.presentID ); + VULKAN_HPP_HASH_COMBINE( seed, setLatencyMarkerInfoNV.marker ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT const & shaderModuleIdentifierEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV const & setStateFlagsIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.identifierSize ); - for ( size_t i = 0; i < VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT; ++i ) - { - VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.identifier[i] ); - } + VULKAN_HPP_HASH_COMBINE( seed, setStateFlagsIndirectCommandNV.data ); return seed; } }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const & shaderModuleValidationCacheCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & shaderCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.stage ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.nextStage ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.codeType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.codeSize ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pCode ); + for ( const char * p = shaderCreateInfoEXT.pName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.setLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pSetLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pushConstantRangeCount ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pPushConstantRanges ); + VULKAN_HPP_HASH_COMBINE( seed, shaderCreateInfoEXT.pSpecializationInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & shaderModuleCreateInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.codeSize ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleCreateInfo.pCode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT const & shaderModuleIdentifierEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.identifierSize ); + for ( size_t i = 0; i < VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, shaderModuleIdentifierEXT.identifier[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const & shaderModuleValidationCacheCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; VULKAN_HPP_HASH_COMBINE( seed, shaderModuleValidationCacheCreateInfoEXT.sType ); @@ -11699,19 +16296,6 @@ namespace std }; # endif /*VK_USE_PLATFORM_GGP*/ - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const & stridedDeviceAddressRegionKHR ) const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.deviceAddress ); - VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.stride ); - VULKAN_HPP_HASH_COMBINE( seed, stridedDeviceAddressRegionKHR.size ); - return seed; - } - }; - template <> struct hash { @@ -11835,14 +16419,27 @@ namespace std }; template <> - struct hash + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySize const & subresourceHostMemcpySize ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySize.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySize.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySize.size ); + return seed; + } + }; + + template <> + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT const & subresourceLayout2EXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout2 const & subresourceLayout2 ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.subresourceLayout ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2.subresourceLayout ); return seed; } }; @@ -11917,1204 +16514,1778 @@ namespace std return seed; } }; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV const & surfaceCapabilitiesPresentBarrierNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentBarrierNV.presentBarrierSupported ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const & surfaceFormatKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormatKHR.format ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormatKHR.colorSpace ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const & surfaceFormat2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.surfaceFormat ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const & surfaceFullScreenExclusiveInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.fullScreenExclusive ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const & surfaceFullScreenExclusiveWin32InfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.hmonitor ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT const & surfacePresentModeCompatibilityEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeCompatibilityEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeCompatibilityEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeCompatibilityEXT.presentModeCount ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeCompatibilityEXT.pPresentModes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT const & surfacePresentModeEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentModeEXT.presentMode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT const & surfacePresentScalingCapabilitiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.supportedPresentScaling ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.supportedPresentGravityX ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.supportedPresentGravityY ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.minScaledImageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, surfacePresentScalingCapabilitiesEXT.maxScaledImageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const & surfaceProtectedCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.supportsProtected ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const & swapchainCounterCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.surfaceCounters ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & swapchainCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.surface ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.minImageCount ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageFormat ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageColorSpace ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageArrayLayers ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageUsage ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageSharingMode ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.queueFamilyIndexCount ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.pQueueFamilyIndices ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.preTransform ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.compositeAlpha ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.presentMode ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.clipped ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.oldSwapchain ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const & swapchainDisplayNativeHdrCreateInfoAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.localDimmingEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainLatencyCreateInfoNV const & swapchainLatencyCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainLatencyCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainLatencyCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainLatencyCreateInfoNV.latencyModeEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV const & swapchainPresentBarrierCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentBarrierCreateInfoNV.presentBarrierEnable ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT const & swapchainPresentFenceInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentFenceInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentFenceInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentFenceInfoEXT.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentFenceInfoEXT.pFences ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT const & swapchainPresentModeInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModeInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModeInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModeInfoEXT.swapchainCount ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModeInfoEXT.pPresentModes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT const & swapchainPresentModesCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModesCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModesCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModesCreateInfoEXT.presentModeCount ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentModesCreateInfoEXT.pPresentModes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT const & swapchainPresentScalingCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.scalingBehavior ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.presentGravityX ); + VULKAN_HPP_HASH_COMBINE( seed, swapchainPresentScalingCreateInfoEXT.presentGravityY ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const & textureLODGatherFormatPropertiesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.supportsTextureGatherLODBiasAMD ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TilePropertiesQCOM const & tilePropertiesQCOM ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.sType ); + VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.tileSize ); + VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.apronSize ); + VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.origin ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const & timelineSemaphoreSubmitInfo ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.waitSemaphoreValueCount ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pWaitSemaphoreValues ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.signalSemaphoreValueCount ); + VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pSignalSemaphoreValues ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR const & traceRaysIndirectCommand2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.raygenShaderRecordAddress ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.raygenShaderRecordSize ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableAddress ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableSize ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableStride ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableAddress ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableSize ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableStride ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableAddress ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableSize ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableStride ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.width ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.height ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.depth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR const & traceRaysIndirectCommandKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.width ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.height ); + VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.depth ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & validationCacheCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.initialDataSize ); + VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.pInitialData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const & validationFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.enabledValidationFeatureCount ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pEnabledValidationFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.disabledValidationFeatureCount ); + VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pDisabledValidationFeatures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const & validationFlagsEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.disabledValidationCheckCount ); + VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.pDisabledValidationChecks ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT const & vertexInputAttributeDescription2EXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.location ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.format ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.offset ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT const & vertexInputBindingDescription2EXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.stride ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.inputRate ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.divisor ); + return seed; + } + }; + +# if defined( VK_USE_PLATFORM_VI_NN ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & viSurfaceCreateInfoNN ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.sType ); + VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.flags ); + VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.window ); + return seed; + } + }; +# endif /*VK_USE_PLATFORM_VI_NN*/ + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & videoPictureResourceInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.codedOffset ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.codedExtent ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.baseArrayLayer ); + VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.imageViewBinding ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR const & videoReferenceSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.slotIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.pPictureResource ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR const & videoBeginCodingInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.videoSession ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.videoSessionParameters ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.referenceSlotCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.pReferenceSlots ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR const & videoCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minBitstreamBufferOffsetAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minBitstreamBufferSizeAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.pictureAccessGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minCodedExtent ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxCodedExtent ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxDpbSlots ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxActiveReferencePictures ); + VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.stdHeaderVersion ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR const & videoCodingControlInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeAV1CapabilitiesKHR const & videoDecodeAV1CapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1CapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1CapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1CapabilitiesKHR.maxLevel ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeAV1DpbSlotInfoKHR const & videoDecodeAV1DpbSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1DpbSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1DpbSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1DpbSlotInfoKHR.pStdReferenceInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeAV1InlineSessionParametersInfoKHR const & videoDecodeAV1InlineSessionParametersInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1InlineSessionParametersInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1InlineSessionParametersInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1InlineSessionParametersInfoKHR.pStdSequenceHeader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeAV1PictureInfoKHR const & videoDecodeAV1PictureInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1PictureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1PictureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1PictureInfoKHR.pStdPictureInfo ); + for ( size_t i = 0; i < VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1PictureInfoKHR.referenceNameSlotIndices[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1PictureInfoKHR.frameHeaderOffset ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1PictureInfoKHR.tileCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1PictureInfoKHR.pTileOffsets ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1PictureInfoKHR.pTileSizes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeAV1ProfileInfoKHR const & videoDecodeAV1ProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1ProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1ProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1ProfileInfoKHR.stdProfile ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1ProfileInfoKHR.filmGrainSupport ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeAV1SessionParametersCreateInfoKHR const & videoDecodeAV1SessionParametersCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1SessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1SessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeAV1SessionParametersCreateInfoKHR.pStdSequenceHeader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR const & videoDecodeCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.flags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR const & videoDecodeH264CapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesKHR.maxLevelIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesKHR.fieldOffsetGranularity ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR const & videoDecodeH264DpbSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoKHR.pStdReferenceInfo ); + return seed; + } + }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const & surfaceFormatKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264InlineSessionParametersInfoKHR const & videoDecodeH264InlineSessionParametersInfoKHR ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, surfaceFormatKHR.format ); - VULKAN_HPP_HASH_COMBINE( seed, surfaceFormatKHR.colorSpace ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264InlineSessionParametersInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264InlineSessionParametersInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264InlineSessionParametersInfoKHR.pStdSPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264InlineSessionParametersInfoKHR.pStdPPS ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const & surfaceFormat2KHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR const & videoDecodeH264PictureInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, surfaceFormat2KHR.surfaceFormat ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.pStdPictureInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.sliceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoKHR.pSliceOffsets ); return seed; } }; -# if defined( VK_USE_PLATFORM_WIN32_KHR ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const & surfaceFullScreenExclusiveInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR const & videoDecodeH264ProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveInfoEXT.fullScreenExclusive ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoKHR.stdProfileIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoKHR.pictureLayout ); return seed; } }; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ -# if defined( VK_USE_PLATFORM_WIN32_KHR ) template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const & surfaceFullScreenExclusiveWin32InfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR const & videoDecodeH264SessionParametersAddInfoKHR ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, surfaceFullScreenExclusiveWin32InfoEXT.hmonitor ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.stdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.pStdSPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.stdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoKHR.pStdPPSs ); return seed; } }; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const & surfaceProtectedCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR const & videoDecodeH264SessionParametersCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, surfaceProtectedCapabilitiesKHR.supportsProtected ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.maxStdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.maxStdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoKHR.pParametersAddInfo ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const & swapchainCounterCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR const & videoDecodeH265CapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCounterCreateInfoEXT.surfaceCounters ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesKHR.maxLevelIdc ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & swapchainCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR const & videoDecodeH265DpbSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.flags ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.surface ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.minImageCount ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageFormat ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageColorSpace ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageExtent ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageArrayLayers ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageUsage ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.imageSharingMode ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.queueFamilyIndexCount ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.pQueueFamilyIndices ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.preTransform ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.compositeAlpha ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.presentMode ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.clipped ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainCreateInfoKHR.oldSwapchain ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoKHR.pStdReferenceInfo ); return seed; } }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const & swapchainDisplayNativeHdrCreateInfoAMD ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265InlineSessionParametersInfoKHR const & videoDecodeH265InlineSessionParametersInfoKHR ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.sType ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, swapchainDisplayNativeHdrCreateInfoAMD.localDimmingEnable ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265InlineSessionParametersInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265InlineSessionParametersInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265InlineSessionParametersInfoKHR.pStdVPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265InlineSessionParametersInfoKHR.pStdSPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265InlineSessionParametersInfoKHR.pStdPPS ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const & textureLODGatherFormatPropertiesAMD ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR const & videoDecodeH265PictureInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.sType ); - VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, textureLODGatherFormatPropertiesAMD.supportsTextureGatherLODBiasAMD ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.pStdPictureInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.sliceSegmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoKHR.pSliceSegmentOffsets ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::TilePropertiesQCOM const & tilePropertiesQCOM ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR const & videoDecodeH265ProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.sType ); - VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.tileSize ); - VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.apronSize ); - VULKAN_HPP_HASH_COMBINE( seed, tilePropertiesQCOM.origin ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoKHR.stdProfileIdc ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const & timelineSemaphoreSubmitInfo ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR const & videoDecodeH265SessionParametersAddInfoKHR ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.sType ); - VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.waitSemaphoreValueCount ); - VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pWaitSemaphoreValues ); - VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.signalSemaphoreValueCount ); - VULKAN_HPP_HASH_COMBINE( seed, timelineSemaphoreSubmitInfo.pSignalSemaphoreValues ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.stdVPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.pStdVPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.stdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.pStdSPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.stdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoKHR.pStdPPSs ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR const & traceRaysIndirectCommand2KHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR const & videoDecodeH265SessionParametersCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.raygenShaderRecordAddress ); - VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.raygenShaderRecordSize ); - VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableAddress ); - VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableSize ); - VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.missShaderBindingTableStride ); - VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableAddress ); - VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableSize ); - VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.hitShaderBindingTableStride ); - VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableAddress ); - VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableSize ); - VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.callableShaderBindingTableStride ); - VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.width ); - VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.height ); - VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommand2KHR.depth ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.maxStdVPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.maxStdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.maxStdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoKHR.pParametersAddInfo ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR const & traceRaysIndirectCommandKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR const & videoDecodeInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.width ); - VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.height ); - VULKAN_HPP_HASH_COMBINE( seed, traceRaysIndirectCommandKHR.depth ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBufferOffset ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.dstPictureResource ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pSetupReferenceSlot ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.referenceSlotCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pReferenceSlots ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & validationCacheCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR const & videoDecodeUsageInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.flags ); - VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.initialDataSize ); - VULKAN_HPP_HASH_COMBINE( seed, validationCacheCreateInfoEXT.pInitialData ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.videoUsageHints ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const & validationFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilitiesKHR const & videoEncodeAV1CapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.enabledValidationFeatureCount ); - VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pEnabledValidationFeatures ); - VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.disabledValidationFeatureCount ); - VULKAN_HPP_HASH_COMBINE( seed, validationFeaturesEXT.pDisabledValidationFeatures ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxLevel ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.codedPictureAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxTiles ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.minTileSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxTileSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.superblockSizes ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxSingleReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.singleReferenceNameMask ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxUnidirectionalCompoundReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxUnidirectionalCompoundGroup1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.unidirectionalCompoundReferenceNameMask ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxBidirectionalCompoundReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxBidirectionalCompoundGroup1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxBidirectionalCompoundGroup2ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.bidirectionalCompoundReferenceNameMask ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxTemporalLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxSpatialLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxOperatingPoints ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.minQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.prefersGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.requiresGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.stdSyntaxFlags ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const & validationFlagsEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1DpbSlotInfoKHR const & videoEncodeAV1DpbSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.disabledValidationCheckCount ); - VULKAN_HPP_HASH_COMBINE( seed, validationFlagsEXT.pDisabledValidationChecks ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1DpbSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1DpbSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1DpbSlotInfoKHR.pStdReferenceInfo ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT const & vertexInputAttributeDescription2EXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR const & videoEncodeAV1FrameSizeKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.location ); - VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.binding ); - VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.format ); - VULKAN_HPP_HASH_COMBINE( seed, vertexInputAttributeDescription2EXT.offset ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1FrameSizeKHR.intraFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1FrameSizeKHR.predictiveFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1FrameSizeKHR.bipredictiveFrameSize ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT const & vertexInputBindingDescription2EXT ) const VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1GopRemainingFrameInfoKHR const & videoEncodeAV1GopRemainingFrameInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.binding ); - VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.stride ); - VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.inputRate ); - VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDescription2EXT.divisor ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.useGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.gopRemainingIntra ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.gopRemainingPredictive ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.gopRemainingBipredictive ); return seed; } }; -# if defined( VK_USE_PLATFORM_VI_NN ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & viSurfaceCreateInfoNN ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1PictureInfoKHR const & videoEncodeAV1PictureInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.sType ); - VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.flags ); - VULKAN_HPP_HASH_COMBINE( seed, viSurfaceCreateInfoNN.window ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.predictionMode ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.rateControlGroup ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.constantQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.pStdPictureInfo ); + for ( size_t i = 0; i < VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.referenceNameSlotIndices[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.primaryReferenceCdfOnly ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.generateObuExtensionHeader ); return seed; } }; -# endif /*VK_USE_PLATFORM_VI_NN*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & videoPictureResourceInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1ProfileInfoKHR const & videoEncodeAV1ProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.codedOffset ); - VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.codedExtent ); - VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.baseArrayLayer ); - VULKAN_HPP_HASH_COMBINE( seed, videoPictureResourceInfoKHR.imageViewBinding ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1ProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1ProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1ProfileInfoKHR.stdProfile ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR const & videoReferenceSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR const & videoEncodeAV1QIndexKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.slotIndex ); - VULKAN_HPP_HASH_COMBINE( seed, videoReferenceSlotInfoKHR.pPictureResource ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QIndexKHR.intraQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QIndexKHR.predictiveQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QIndexKHR.bipredictiveQIndex ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR const & videoBeginCodingInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QualityLevelPropertiesKHR const & videoEncodeAV1QualityLevelPropertiesKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.flags ); - VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.videoSession ); - VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.videoSessionParameters ); - VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.referenceSlotCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoBeginCodingInfoKHR.pReferenceSlots ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredRateControlFlags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredGopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredKeyFramePeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredConsecutiveBipredictiveFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredTemporalLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredConstantQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxSingleReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredSingleReferenceNameMask ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxUnidirectionalCompoundReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxUnidirectionalCompoundGroup1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredUnidirectionalCompoundReferenceNameMask ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxBidirectionalCompoundReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxBidirectionalCompoundGroup1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxBidirectionalCompoundGroup2ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredBidirectionalCompoundReferenceNameMask ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR const & videoCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QuantizationMapCapabilitiesKHR const & videoEncodeAV1QuantizationMapCapabilitiesKHR ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.capabilityFlags ); - VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minBitstreamBufferOffsetAlignment ); - VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minBitstreamBufferSizeAlignment ); - VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.videoPictureExtentGranularity ); - VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.minExtent ); - VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxExtent ); - VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxReferencePicturesSlotsCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.maxReferencePicturesActiveCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoCapabilitiesKHR.stdHeaderVersion ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QuantizationMapCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QuantizationMapCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QuantizationMapCapabilitiesKHR.minQIndexDelta ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QuantizationMapCapabilitiesKHR.maxQIndexDelta ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR const & videoCodingControlInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlInfoKHR const & videoEncodeAV1RateControlInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoCodingControlInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.gopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.keyFramePeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.consecutiveBipredictiveFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.temporalLayerCount ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR const & videoDecodeCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlLayerInfoKHR const & videoEncodeAV1RateControlLayerInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeCapabilitiesKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.useMinQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.minQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.useMaxQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.maxQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.useMaxFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.maxFrameSize ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT const & videoDecodeH264CapabilitiesEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionCreateInfoKHR const & videoEncodeAV1SessionCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.maxLevel ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264CapabilitiesEXT.fieldOffsetGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionCreateInfoKHR.useMaxLevel ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionCreateInfoKHR.maxLevel ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT const & videoDecodeH264DpbSlotInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionParametersCreateInfoKHR const & videoEncodeAV1SessionParametersCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264DpbSlotInfoEXT.pStdReferenceInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.pStdSequenceHeader ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.pStdDecoderModelInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.stdOperatingPointCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.pStdOperatingPoints ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264MvcInfoEXT const & videoDecodeH264MvcInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR const & videoEncodeCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264MvcInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264MvcInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264MvcInfoEXT.pStdMvc ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.rateControlModes ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.maxRateControlLayers ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.maxBitrate ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.maxQualityLevels ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.encodeInputPictureGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.supportedEncodeFeedbackFlags ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT const & videoDecodeH264PictureInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesKHR const & videoEncodeH264CapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoEXT.pStdPictureInfo ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoEXT.slicesCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264PictureInfoEXT.pSlicesDataOffsets ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxLevelIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxSliceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxPPictureL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxBPictureL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxL1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxTemporalLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.expectDyadicTemporalLayerPattern ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.minQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.maxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.prefersGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.requiresGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesKHR.stdSyntaxFlags ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoEXT const & videoDecodeH264ProfileInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoKHR const & videoEncodeH264DpbSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoEXT.stdProfileIdc ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264ProfileInfoEXT.pictureLayout ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoKHR.pStdReferenceInfo ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT const & videoDecodeH264SessionParametersAddInfoEXT ) const - VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR const & videoEncodeH264FrameSizeKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.spsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.pSpsStd ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.ppsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersAddInfoEXT.pPpsStd ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeKHR.frameISize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeKHR.framePSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeKHR.frameBSize ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT const & videoDecodeH264SessionParametersCreateInfoEXT ) const - VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264GopRemainingFrameInfoKHR const & videoEncodeH264GopRemainingFrameInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoEXT.maxSpsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoEXT.maxPpsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH264SessionParametersCreateInfoEXT.pParametersAddInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.useGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.gopRemainingI ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.gopRemainingP ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264GopRemainingFrameInfoKHR.gopRemainingB ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT const & videoDecodeH265CapabilitiesEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR const & videoEncodeH264NaluSliceInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265CapabilitiesEXT.maxLevel ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoKHR.constantQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoKHR.pStdSliceHeader ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT const & videoDecodeH265DpbSlotInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264PictureInfoKHR const & videoEncodeH264PictureInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265DpbSlotInfoEXT.pStdReferenceInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.naluSliceEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.pNaluSliceEntries ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.pStdPictureInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264PictureInfoKHR.generatePrefixNalu ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT const & videoDecodeH265PictureInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoKHR const & videoEncodeH264ProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoEXT.pStdPictureInfo ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoEXT.slicesCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265PictureInfoEXT.pSlicesDataOffsets ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoKHR.stdProfileIdc ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoEXT const & videoDecodeH265ProfileInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const & videoEncodeH264QpKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265ProfileInfoEXT.stdProfileIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpKHR.qpI ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpKHR.qpP ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpKHR.qpB ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT const & videoDecodeH265SessionParametersAddInfoEXT ) const - VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264QualityLevelPropertiesKHR const & videoEncodeH264QualityLevelPropertiesKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.vpsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.pVpsStd ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.spsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.pSpsStd ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.ppsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersAddInfoEXT.pPpsStd ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredRateControlFlags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredGopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredIdrPeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredConsecutiveBFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredTemporalLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredConstantQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredMaxL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredMaxL1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QualityLevelPropertiesKHR.preferredStdEntropyCodingModeFlag ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT const & videoDecodeH265SessionParametersCreateInfoEXT ) const + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264QuantizationMapCapabilitiesKHR const & videoEncodeH264QuantizationMapCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.maxVpsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.maxSpsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.maxPpsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeH265SessionParametersCreateInfoEXT.pParametersAddInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QuantizationMapCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QuantizationMapCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QuantizationMapCapabilitiesKHR.minQpDelta ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QuantizationMapCapabilitiesKHR.maxQpDelta ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR const & videoDecodeInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoKHR const & videoEncodeH264RateControlInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.flags ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBuffer ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBufferOffset ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.srcBufferRange ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.dstPictureResource ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pSetupReferenceSlot ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.referenceSlotCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeInfoKHR.pReferenceSlots ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.gopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.idrPeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.consecutiveBFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoKHR.temporalLayerCount ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR const & videoDecodeUsageInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoKHR const & videoEncodeH264RateControlLayerInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoDecodeUsageInfoKHR.videoUsageHints ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.useMinQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.minQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.useMaxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.maxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.useMaxFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoKHR.maxFrameSize ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR const & videoEncodeCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoKHR const & videoEncodeH264SessionCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.flags ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.rateControlModes ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.rateControlLayerCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.qualityLevelCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeCapabilitiesKHR.inputImageDataFillAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoKHR.useMaxLevelIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionCreateInfoKHR.maxLevelIdc ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT const & videoEncodeH264CapabilitiesEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR const & videoEncodeH264SessionParametersAddInfoKHR ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.flags ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.inputModeFlags ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.outputModeFlags ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxPPictureL0ReferenceCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxBPictureL0ReferenceCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxL1ReferenceCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.motionVectorsOverPicBoundariesFlag ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxBytesPerPicDenom ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.maxBitsPerMbDenom ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.log2MaxMvLengthHorizontal ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264CapabilitiesEXT.log2MaxMvLengthVertical ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.stdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.pStdSPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.stdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoKHR.pStdPPSs ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT const & videoEncodeH264DpbSlotInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoKHR const & videoEncodeH264SessionParametersCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoEXT.slotIndex ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264DpbSlotInfoEXT.pStdReferenceInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoKHR.maxStdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoKHR.maxStdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoKHR.pParametersAddInfo ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersInfoEXT const & videoEncodeH264EmitPictureParametersInfoEXT ) const - VULKAN_HPP_NOEXCEPT + std::size_t operator()( + VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersFeedbackInfoKHR const & videoEncodeH264SessionParametersFeedbackInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.spsId ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.emitSpsEnable ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.ppsIdEntryCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264EmitPictureParametersInfoEXT.ppsIdEntries ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersFeedbackInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersFeedbackInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersFeedbackInfoKHR.hasStdSPSOverrides ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersFeedbackInfoKHR.hasStdPPSOverrides ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT const & videoEncodeH264FrameSizeEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersGetInfoKHR const & videoEncodeH264SessionParametersGetInfoKHR ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeEXT.frameISize ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeEXT.framePSize ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264FrameSizeEXT.frameBSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.writeStdSPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.writeStdPPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.stdSPSId ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersGetInfoKHR.stdPPSId ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT const & videoEncodeH264ReferenceListsInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesKHR const & videoEncodeH265CapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.referenceList0EntryCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.pReferenceList0Entries ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.referenceList1EntryCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.pReferenceList1Entries ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ReferenceListsInfoEXT.pMemMgmtCtrlOperations ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxLevelIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxSliceSegmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxTiles ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.ctbSizes ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.transformBlockSizes ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxPPictureL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxBPictureL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxL1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxSubLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.expectDyadicTemporalSubLayerPattern ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.minQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.maxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.prefersGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.requiresGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesKHR.stdSyntaxFlags ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT const & videoEncodeH264NaluSliceInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoKHR const & videoEncodeH265DpbSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoEXT.mbCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoEXT.pReferenceFinalLists ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264NaluSliceInfoEXT.pSliceHeaderStd ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoKHR.pStdReferenceInfo ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT const & videoEncodeH264ProfileInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR const & videoEncodeH265FrameSizeKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264ProfileInfoEXT.stdProfileIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeKHR.frameISize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeKHR.framePSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeKHR.frameBSize ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & videoEncodeH264QpEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265GopRemainingFrameInfoKHR const & videoEncodeH265GopRemainingFrameInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpEXT.qpI ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpEXT.qpP ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QpEXT.qpB ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.useGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.gopRemainingI ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.gopRemainingP ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265GopRemainingFrameInfoKHR.gopRemainingB ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT const & videoEncodeH264RateControlInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR const & videoEncodeH265NaluSliceSegmentInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.gopFrameCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.idrPeriod ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.consecutiveBFrameCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.rateControlStructure ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlInfoEXT.temporalLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoKHR.constantQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoKHR.pStdSliceSegmentHeader ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT const & videoEncodeH264RateControlLayerInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265PictureInfoKHR const & videoEncodeH265PictureInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.temporalLayerId ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.useInitialRcQp ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.initialRcQp ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.useMinQp ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.minQp ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.useMaxQp ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.maxQp ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.useMaxFrameSize ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264RateControlLayerInfoEXT.maxFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265PictureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265PictureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265PictureInfoKHR.naluSliceSegmentEntryCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265PictureInfoKHR.pNaluSliceSegmentEntries ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265PictureInfoKHR.pStdPictureInfo ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT const & videoEncodeH264SessionParametersAddInfoEXT ) const - VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoKHR const & videoEncodeH265ProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.spsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.pSpsStd ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.ppsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersAddInfoEXT.pPpsStd ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoKHR.stdProfileIdc ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT const & videoEncodeH264SessionParametersCreateInfoEXT ) const - VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR const & videoEncodeH265QpKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.maxSpsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.maxPpsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264SessionParametersCreateInfoEXT.pParametersAddInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpKHR.qpI ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpKHR.qpP ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpKHR.qpB ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT const & videoEncodeH264VclFrameInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265QualityLevelPropertiesKHR const & videoEncodeH265QualityLevelPropertiesKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pReferenceFinalLists ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.naluSliceEntryCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pNaluSliceEntries ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264VclFrameInfoEXT.pCurrentPictureInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredRateControlFlags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredGopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredIdrPeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredConsecutiveBFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredSubLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredConstantQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredMaxL0ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QualityLevelPropertiesKHR.preferredMaxL1ReferenceCount ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT const & videoEncodeH265CapabilitiesEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265QuantizationMapCapabilitiesKHR const & videoEncodeH265QuantizationMapCapabilitiesKHR ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.flags ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.inputModeFlags ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.outputModeFlags ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.ctbSizes ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.transformBlockSizes ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxPPictureL0ReferenceCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxBPictureL0ReferenceCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxL1ReferenceCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxSubLayersCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.minLog2MinLumaCodingBlockSizeMinus3 ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxLog2MinLumaCodingBlockSizeMinus3 ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.minLog2MinLumaTransformBlockSizeMinus2 ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxLog2MinLumaTransformBlockSizeMinus2 ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.minMaxTransformHierarchyDepthInter ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxMaxTransformHierarchyDepthInter ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.minMaxTransformHierarchyDepthIntra ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxMaxTransformHierarchyDepthIntra ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxDiffCuQpDeltaDepth ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.minMaxNumMergeCand ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265CapabilitiesEXT.maxMaxNumMergeCand ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QuantizationMapCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QuantizationMapCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QuantizationMapCapabilitiesKHR.minQpDelta ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QuantizationMapCapabilitiesKHR.maxQpDelta ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT const & videoEncodeH265DpbSlotInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoKHR const & videoEncodeH265RateControlInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoEXT.slotIndex ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265DpbSlotInfoEXT.pStdReferenceInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.gopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.idrPeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.consecutiveBFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoKHR.subLayerCount ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersInfoEXT const & videoEncodeH265EmitPictureParametersInfoEXT ) const - VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoKHR const & videoEncodeH265RateControlLayerInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.vpsId ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.spsId ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.emitVpsEnable ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.emitSpsEnable ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.ppsIdEntryCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265EmitPictureParametersInfoEXT.ppsIdEntries ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.useMinQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.minQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.useMaxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.maxQp ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.useMaxFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoKHR.maxFrameSize ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT const & videoEncodeH265FrameSizeEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoKHR const & videoEncodeH265SessionCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeEXT.frameISize ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeEXT.framePSize ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265FrameSizeEXT.frameBSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoKHR.useMaxLevelIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionCreateInfoKHR.maxLevelIdc ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT const & videoEncodeH265ReferenceListsInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR const & videoEncodeH265SessionParametersAddInfoKHR ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.referenceList0EntryCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.pReferenceList0Entries ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.referenceList1EntryCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.pReferenceList1Entries ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ReferenceListsInfoEXT.pReferenceModifications ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.stdVPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.pStdVPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.stdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.pStdSPSs ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.stdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoKHR.pStdPPSs ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT const & videoEncodeH265NaluSliceSegmentInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoKHR const & videoEncodeH265SessionParametersCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoEXT.ctbCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoEXT.pReferenceFinalLists ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265NaluSliceSegmentInfoEXT.pSliceSegmentHeaderStd ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.maxStdVPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.maxStdSPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.maxStdPPSCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoKHR.pParametersAddInfo ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT const & videoEncodeH265ProfileInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( + VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersFeedbackInfoKHR const & videoEncodeH265SessionParametersFeedbackInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265ProfileInfoEXT.stdProfileIdc ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersFeedbackInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersFeedbackInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersFeedbackInfoKHR.hasStdVPSOverrides ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersFeedbackInfoKHR.hasStdSPSOverrides ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersFeedbackInfoKHR.hasStdPPSOverrides ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & videoEncodeH265QpEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersGetInfoKHR const & videoEncodeH265SessionParametersGetInfoKHR ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpEXT.qpI ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpEXT.qpP ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QpEXT.qpB ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.writeStdVPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.writeStdSPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.writeStdPPS ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.stdVPSId ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.stdSPSId ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersGetInfoKHR.stdPPSId ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT const & videoEncodeH265RateControlInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR const & videoEncodeInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.gopFrameCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.idrPeriod ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.consecutiveBFrameCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.rateControlStructure ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlInfoEXT.subLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBuffer ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBufferOffset ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBufferRange ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.srcPictureResource ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pSetupReferenceSlot ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.referenceSlotCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pReferenceSlots ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.precedingExternallyEncodedBytes ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT const & videoEncodeH265RateControlLayerInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelInfoKHR const & videoEncodeQualityLevelInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.temporalId ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.useInitialRcQp ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.initialRcQp ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.useMinQp ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.minQp ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.useMaxQp ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.maxQp ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.useMaxFrameSize ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265RateControlLayerInfoEXT.maxFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelInfoKHR.qualityLevel ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT const & videoEncodeH265SessionParametersAddInfoEXT ) const - VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR const & videoEncodeQualityLevelPropertiesKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.vpsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pVpsStd ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.spsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pSpsStd ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.ppsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersAddInfoEXT.pPpsStd ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelPropertiesKHR.preferredRateControlMode ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQualityLevelPropertiesKHR.preferredRateControlLayerCount ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT const & videoEncodeH265SessionParametersCreateInfoEXT ) const - VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapCapabilitiesKHR const & videoEncodeQuantizationMapCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.maxVpsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.maxSpsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.maxPpsStdCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265SessionParametersCreateInfoEXT.pParametersAddInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapCapabilitiesKHR.maxQuantizationMapExtent ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT const & videoEncodeH265VclFrameInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapInfoKHR const & videoEncodeQuantizationMapInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.pReferenceFinalLists ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.naluSliceSegmentEntryCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.pNaluSliceSegmentEntries ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265VclFrameInfoEXT.pCurrentPictureInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapInfoKHR.quantizationMap ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapInfoKHR.quantizationMapExtent ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR const & videoEncodeInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & + videoEncodeQuantizationMapSessionParametersCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.flags ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.qualityLevel ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBitstreamBuffer ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBitstreamBufferOffset ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.dstBitstreamBufferMaxRange ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.srcPictureResource ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pSetupReferenceSlot ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.referenceSlotCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.pReferenceSlots ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeInfoKHR.precedingExternallyEncodedBytes ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapSessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapSessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapSessionParametersCreateInfoKHR.quantizationMapTexelSize ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> struct hash { @@ -13127,14 +18298,10 @@ namespace std VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.maxBitrate ); VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.frameRateNumerator ); VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.frameRateDenominator ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.virtualBufferSizeInMs ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlLayerInfoKHR.initialVirtualBufferSizeInMs ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> struct hash { @@ -13146,13 +18313,41 @@ namespace std VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.flags ); VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.rateControlMode ); VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.layerCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.pLayerConfigs ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.pLayers ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.virtualBufferSizeInMs ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeRateControlInfoKHR.initialVirtualBufferSizeInMs ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR const & videoEncodeSessionParametersFeedbackInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersFeedbackInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersFeedbackInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersFeedbackInfoKHR.hasOverrides ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR const & videoEncodeSessionParametersGetInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersGetInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersGetInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeSessionParametersGetInfoKHR.videoSessionParameters ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> struct hash { @@ -13167,9 +18362,7 @@ namespace std return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> struct hash { @@ -13182,9 +18375,35 @@ namespace std return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoFormatAV1QuantizationMapPropertiesKHR const & videoFormatAV1QuantizationMapPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoFormatAV1QuantizationMapPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatAV1QuantizationMapPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatAV1QuantizationMapPropertiesKHR.compatibleSuperblockSizes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoFormatH265QuantizationMapPropertiesKHR const & videoFormatH265QuantizationMapPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoFormatH265QuantizationMapPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatH265QuantizationMapPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatH265QuantizationMapPropertiesKHR.compatibleCtbSizes ); + return seed; + } + }; + template <> struct hash { @@ -13202,27 +18421,36 @@ namespace std return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR const & videoProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoFormatQuantizationMapPropertiesKHR const & videoFormatQuantizationMapPropertiesKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.videoCodecOperation ); - VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.chromaSubsampling ); - VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.lumaBitDepth ); - VULKAN_HPP_HASH_COMBINE( seed, videoProfileInfoKHR.chromaBitDepth ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatQuantizationMapPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatQuantizationMapPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatQuantizationMapPropertiesKHR.quantizationMapTexelSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoInlineQueryInfoKHR const & videoInlineQueryInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoInlineQueryInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoInlineQueryInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoInlineQueryInfoKHR.queryPool ); + VULKAN_HPP_HASH_COMBINE( seed, videoInlineQueryInfoKHR.firstQuery ); + VULKAN_HPP_HASH_COMBINE( seed, videoInlineQueryInfoKHR.queryCount ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> struct hash { @@ -13236,9 +18464,7 @@ namespace std return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> struct hash { @@ -13252,16 +18478,14 @@ namespace std VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pVideoProfile ); VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pictureFormat ); VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxCodedExtent ); - VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.referencePicturesFormat ); - VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxReferencePicturesSlotsCount ); - VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxReferencePicturesActiveCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.referencePictureFormat ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxDpbSlots ); + VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.maxActiveReferencePictures ); VULKAN_HPP_HASH_COMBINE( seed, videoSessionCreateInfoKHR.pStdHeaderVersion ); return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> struct hash { @@ -13275,9 +18499,7 @@ namespace std return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> struct hash { @@ -13292,9 +18514,7 @@ namespace std return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> struct hash { @@ -13307,7 +18527,6 @@ namespace std return seed; } }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ # if defined( VK_USE_PLATFORM_WAYLAND_KHR ) template <> @@ -13385,26 +18604,6 @@ namespace std }; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSet const & writeDescriptorSet ) const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.sType ); - VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstSet ); - VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstBinding ); - VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.dstArrayElement ); - VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.descriptorCount ); - VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.descriptorType ); - VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pImageInfo ); - VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pBufferInfo ); - VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSet.pTexelBufferView ); - return seed; - } - }; - template <> struct hash { @@ -13449,6 +18648,49 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteDescriptorSetPartitionedAccelerationStructureNV const & + writeDescriptorSetPartitionedAccelerationStructureNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetPartitionedAccelerationStructureNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetPartitionedAccelerationStructureNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetPartitionedAccelerationStructureNV.accelerationStructureCount ); + VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetPartitionedAccelerationStructureNV.pAccelerationStructures ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT const & writeIndirectExecutionSetPipelineEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetPipelineEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetPipelineEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetPipelineEXT.index ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetPipelineEXT.pipeline ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT const & writeIndirectExecutionSetShaderEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetShaderEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetShaderEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetShaderEXT.index ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetShaderEXT.shader ); + return seed; + } + }; + # if defined( VK_USE_PLATFORM_XCB_KHR ) template <> struct hash diff --git a/src/libraries/vulkanheaders/vulkan_hpp_macros.hpp b/src/libraries/vulkanheaders/vulkan_hpp_macros.hpp new file mode 100644 index 000000000..a218f7256 --- /dev/null +++ b/src/libraries/vulkanheaders/vulkan_hpp_macros.hpp @@ -0,0 +1,326 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_HPP_MACROS_HPP +#define VULKAN_HPP_MACROS_HPP + +#if defined( _MSVC_LANG ) +# define VULKAN_HPP_CPLUSPLUS _MSVC_LANG +#else +# define VULKAN_HPP_CPLUSPLUS __cplusplus +#endif + +#if 202002L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 23 +#elif 201703L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 20 +#elif 201402L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 17 +#elif 201103L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 14 +#elif 199711L < VULKAN_HPP_CPLUSPLUS +# define VULKAN_HPP_CPP_VERSION 11 +#else +# error "vulkan.hpp needs at least c++ standard version 11" +#endif + +// include headers holding feature-test macros +#if 20 <= VULKAN_HPP_CPP_VERSION +# include +#else +# include +#endif + +#if defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) +# if !defined( VULKAN_HPP_NO_SMART_HANDLE ) +# define VULKAN_HPP_NO_SMART_HANDLE +# endif +#endif + +#if defined( VULKAN_HPP_NO_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# define VULKAN_HPP_NO_STRUCT_CONSTRUCTORS +# endif +# if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) +# define VULKAN_HPP_NO_UNION_CONSTRUCTORS +# endif +#endif + +#if defined( VULKAN_HPP_NO_SETTERS ) +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +# define VULKAN_HPP_NO_STRUCT_SETTERS +# endif +# if !defined( VULKAN_HPP_NO_UNION_SETTERS ) +# define VULKAN_HPP_NO_UNION_SETTERS +# endif +#endif + +#if !defined( VULKAN_HPP_ASSERT ) +# define VULKAN_HPP_ASSERT assert +#endif + +#if !defined( VULKAN_HPP_ASSERT_ON_RESULT ) +# define VULKAN_HPP_ASSERT_ON_RESULT VULKAN_HPP_ASSERT +#endif + +#if !defined( VULKAN_HPP_STATIC_ASSERT ) +# define VULKAN_HPP_STATIC_ASSERT static_assert +#endif + +#if !defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL ) +# define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1 +#endif + +#if !defined( __has_include ) +# define __has_include( x ) false +#endif + +#if ( 201907 <= __cpp_lib_three_way_comparison ) && __has_include( ) && !defined( VULKAN_HPP_NO_SPACESHIP_OPERATOR ) +# define VULKAN_HPP_HAS_SPACESHIP_OPERATOR +#endif + +#if ( 201803 <= __cpp_lib_span ) +# define VULKAN_HPP_SUPPORT_SPAN +#endif + +#if defined( __cpp_lib_modules ) && !defined( VULKAN_HPP_STD_MODULE ) && defined( VULKAN_HPP_ENABLE_STD_MODULE ) +# define VULKAN_HPP_STD_MODULE std.compat +#endif + +#ifndef VK_USE_64_BIT_PTR_DEFINES +# if defined( __LP64__ ) || defined( _WIN64 ) || ( defined( __x86_64__ ) && !defined( __ILP32__ ) ) || defined( _M_X64 ) || defined( __ia64 ) || \ + defined( _M_IA64 ) || defined( __aarch64__ ) || defined( __powerpc64__ ) || ( defined( __riscv ) && __riscv_xlen == 64 ) +# define VK_USE_64_BIT_PTR_DEFINES 1 +# else +# define VK_USE_64_BIT_PTR_DEFINES 0 +# endif +#endif + +// 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default. +// To enable this feature on 32-bit platforms please #define VULKAN_HPP_TYPESAFE_CONVERSION 1 +// To disable this feature on 64-bit platforms please #define VULKAN_HPP_TYPESAFE_CONVERSION 0 +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) +# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION ) +# define VULKAN_HPP_TYPESAFE_CONVERSION 1 +# endif +#endif + +#if defined( __GNUC__ ) +# define GCC_VERSION ( __GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ ) +#endif + +#if !defined( VULKAN_HPP_HAS_UNRESTRICTED_UNIONS ) +# if defined( __clang__ ) +# if __has_feature( cxx_unrestricted_unions ) +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined( __GNUC__ ) +# if 40600 <= GCC_VERSION +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined( _MSC_VER ) +# if 1900 <= _MSC_VER +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# endif +#endif + +#if !defined( VULKAN_HPP_INLINE ) +# if defined( __clang__ ) +# if __has_attribute( always_inline ) +# define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__ +# else +# define VULKAN_HPP_INLINE inline +# endif +# elif defined( __GNUC__ ) +# define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__ +# elif defined( _MSC_VER ) +# define VULKAN_HPP_INLINE inline +# else +# define VULKAN_HPP_INLINE inline +# endif +#endif + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) +# define VULKAN_HPP_TYPESAFE_EXPLICIT +#else +# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit +#endif + +#if defined( __cpp_constexpr ) +# define VULKAN_HPP_CONSTEXPR constexpr +# if 201304 <= __cpp_constexpr +# define VULKAN_HPP_CONSTEXPR_14 constexpr +# else +# define VULKAN_HPP_CONSTEXPR_14 +# endif +# if ( 201907 <= __cpp_constexpr ) && ( !defined( __GNUC__ ) || ( 110400 < GCC_VERSION ) ) +# define VULKAN_HPP_CONSTEXPR_20 constexpr +# else +# define VULKAN_HPP_CONSTEXPR_20 +# endif +# define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr +#else +# define VULKAN_HPP_CONSTEXPR +# define VULKAN_HPP_CONSTEXPR_14 +# define VULKAN_HPP_CONST_OR_CONSTEXPR const +#endif + +#if !defined( VULKAN_HPP_CONSTEXPR_INLINE ) +# if 201606L <= __cpp_inline_variables +# define VULKAN_HPP_CONSTEXPR_INLINE VULKAN_HPP_CONSTEXPR inline +# else +# define VULKAN_HPP_CONSTEXPR_INLINE VULKAN_HPP_CONSTEXPR +# endif +#endif + +#if !defined( VULKAN_HPP_NOEXCEPT ) +# if defined( _MSC_VER ) && ( _MSC_VER <= 1800 ) +# define VULKAN_HPP_NOEXCEPT +# else +# define VULKAN_HPP_NOEXCEPT noexcept +# define VULKAN_HPP_HAS_NOEXCEPT 1 +# if defined( VULKAN_HPP_NO_EXCEPTIONS ) +# define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS noexcept +# else +# define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS +# endif +# endif +#endif + +#if 14 <= VULKAN_HPP_CPP_VERSION +# define VULKAN_HPP_DEPRECATED( msg ) [[deprecated( msg )]] +#else +# define VULKAN_HPP_DEPRECATED( msg ) +#endif + +#if 17 <= VULKAN_HPP_CPP_VERSION +# define VULKAN_HPP_DEPRECATED_17( msg ) [[deprecated( msg )]] +#else +# define VULKAN_HPP_DEPRECATED_17( msg ) +#endif + +#if ( 17 <= VULKAN_HPP_CPP_VERSION ) && !defined( VULKAN_HPP_NO_NODISCARD_WARNINGS ) +# define VULKAN_HPP_NODISCARD [[nodiscard]] +# if defined( VULKAN_HPP_NO_EXCEPTIONS ) +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS [[nodiscard]] +# else +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS +# endif +#else +# define VULKAN_HPP_NODISCARD +# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS +#endif + +#if !defined( VULKAN_HPP_NAMESPACE ) +# define VULKAN_HPP_NAMESPACE vk +#endif + +#define VULKAN_HPP_STRINGIFY2( text ) #text +#define VULKAN_HPP_STRINGIFY( text ) VULKAN_HPP_STRINGIFY2( text ) +#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY( VULKAN_HPP_NAMESPACE ) + +#if !defined( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC ) +# if defined( VK_NO_PROTOTYPES ) +# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1 +# else +# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0 +# endif +#endif + +#if !defined( VULKAN_HPP_STORAGE_API ) +# if defined( VULKAN_HPP_STORAGE_SHARED ) +# if defined( _MSC_VER ) +# if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT ) +# define VULKAN_HPP_STORAGE_API __declspec( dllexport ) +# else +# define VULKAN_HPP_STORAGE_API __declspec( dllimport ) +# endif +# elif defined( __clang__ ) || defined( __GNUC__ ) +# if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT ) +# define VULKAN_HPP_STORAGE_API __attribute__( ( visibility( "default" ) ) ) +# else +# define VULKAN_HPP_STORAGE_API +# endif +# else +# define VULKAN_HPP_STORAGE_API +# pragma warning Unknown import / export semantics +# endif +# else +# define VULKAN_HPP_STORAGE_API +# endif +#endif + +namespace VULKAN_HPP_NAMESPACE +{ + namespace detail + { + class DispatchLoaderDynamic; + +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 + extern VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; +# endif +#endif + } // namespace detail +} // namespace VULKAN_HPP_NAMESPACE + +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::detail::defaultDispatchLoaderDynamic +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE \ + namespace VULKAN_HPP_NAMESPACE \ + { \ + namespace detail \ + { \ + VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; \ + } \ + } +# else +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::detail::getDispatchLoaderStatic() +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE +# endif +#endif + +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderDynamic +# else +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderStatic +# endif +#endif + +#if defined( VULKAN_HPP_NO_DEFAULT_DISPATCHER ) +# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT +# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT +# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT +#else +# define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT = {} +# define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT = nullptr +# define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT = VULKAN_HPP_DEFAULT_DISPATCHER +#endif + +#if !defined( VULKAN_HPP_EXPECTED ) && ( 23 <= VULKAN_HPP_CPP_VERSION ) && defined( __cpp_lib_expected ) +# if !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) +# include +# endif +# define VULKAN_HPP_EXPECTED std::expected +# define VULKAN_HPP_UNEXPECTED std::unexpected +#endif + +#if !defined( VULKAN_HPP_RAII_NAMESPACE ) +# define VULKAN_HPP_RAII_NAMESPACE raii +#endif + +#if defined( VULKAN_HPP_NO_EXCEPTIONS ) && defined( VULKAN_HPP_EXPECTED ) +# define VULKAN_HPP_RAII_NO_EXCEPTIONS +# define VULKAN_HPP_RAII_CREATE_NOEXCEPT noexcept +#else +# define VULKAN_HPP_RAII_CREATE_NOEXCEPT +#endif + +#endif \ No newline at end of file diff --git a/src/libraries/vulkanheaders/vulkan_ios.h b/src/libraries/vulkanheaders/vulkan_ios.h index 579220543..a705dc600 100644 --- a/src/libraries/vulkanheaders/vulkan_ios.h +++ b/src/libraries/vulkanheaders/vulkan_ios.h @@ -2,7 +2,7 @@ #define VULKAN_IOS_H_ 1 /* -** Copyright 2015-2022 The Khronos Group Inc. +** Copyright 2015-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -19,6 +19,7 @@ extern "C" { +// VK_MVK_ios_surface is a preprocessor guard. Do not pass it to API calls. #define VK_MVK_ios_surface 1 #define VK_MVK_IOS_SURFACE_SPEC_VERSION 3 #define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface" diff --git a/src/libraries/vulkanheaders/vulkan_macos.h b/src/libraries/vulkanheaders/vulkan_macos.h index 8e197c7cf..8d5dd05a7 100644 --- a/src/libraries/vulkanheaders/vulkan_macos.h +++ b/src/libraries/vulkanheaders/vulkan_macos.h @@ -2,7 +2,7 @@ #define VULKAN_MACOS_H_ 1 /* -** Copyright 2015-2022 The Khronos Group Inc. +** Copyright 2015-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -19,6 +19,7 @@ extern "C" { +// VK_MVK_macos_surface is a preprocessor guard. Do not pass it to API calls. #define VK_MVK_macos_surface 1 #define VK_MVK_MACOS_SURFACE_SPEC_VERSION 3 #define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface" diff --git a/src/libraries/vulkanheaders/vulkan_metal.h b/src/libraries/vulkanheaders/vulkan_metal.h index 11b964091..7e44f54cb 100644 --- a/src/libraries/vulkanheaders/vulkan_metal.h +++ b/src/libraries/vulkanheaders/vulkan_metal.h @@ -2,7 +2,7 @@ #define VULKAN_METAL_H_ 1 /* -** Copyright 2015-2022 The Khronos Group Inc. +** Copyright 2015-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -19,6 +19,7 @@ extern "C" { +// VK_EXT_metal_surface is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_metal_surface 1 #ifdef __OBJC__ @class CAMetalLayer; @@ -47,31 +48,32 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT( #endif +// VK_EXT_metal_objects is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_metal_objects 1 #ifdef __OBJC__ @protocol MTLDevice; -typedef id MTLDevice_id; +typedef __unsafe_unretained id MTLDevice_id; #else typedef void* MTLDevice_id; #endif #ifdef __OBJC__ @protocol MTLCommandQueue; -typedef id MTLCommandQueue_id; +typedef __unsafe_unretained id MTLCommandQueue_id; #else typedef void* MTLCommandQueue_id; #endif #ifdef __OBJC__ @protocol MTLBuffer; -typedef id MTLBuffer_id; +typedef __unsafe_unretained id MTLBuffer_id; #else typedef void* MTLBuffer_id; #endif #ifdef __OBJC__ @protocol MTLTexture; -typedef id MTLTexture_id; +typedef __unsafe_unretained id MTLTexture_id; #else typedef void* MTLTexture_id; #endif @@ -79,12 +81,12 @@ typedef void* MTLTexture_id; typedef struct __IOSurface* IOSurfaceRef; #ifdef __OBJC__ @protocol MTLSharedEvent; -typedef id MTLSharedEvent_id; +typedef __unsafe_unretained id MTLSharedEvent_id; #else typedef void* MTLSharedEvent_id; #endif -#define VK_EXT_METAL_OBJECTS_SPEC_VERSION 1 +#define VK_EXT_METAL_OBJECTS_SPEC_VERSION 2 #define VK_EXT_METAL_OBJECTS_EXTENSION_NAME "VK_EXT_metal_objects" typedef enum VkExportMetalObjectTypeFlagBitsEXT { @@ -186,6 +188,47 @@ VKAPI_ATTR void VKAPI_CALL vkExportMetalObjectsEXT( VkExportMetalObjectsInfoEXT* pMetalObjectsInfo); #endif + +// VK_EXT_external_memory_metal is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_external_memory_metal 1 +#define VK_EXT_EXTERNAL_MEMORY_METAL_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_METAL_EXTENSION_NAME "VK_EXT_external_memory_metal" +typedef struct VkImportMemoryMetalHandleInfoEXT { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + void* handle; +} VkImportMemoryMetalHandleInfoEXT; + +typedef struct VkMemoryMetalHandlePropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryMetalHandlePropertiesEXT; + +typedef struct VkMemoryGetMetalHandleInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetMetalHandleInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryMetalHandleEXT)(VkDevice device, const VkMemoryGetMetalHandleInfoEXT* pGetMetalHandleInfo, void** pHandle); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryMetalHandlePropertiesEXT)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHandle, VkMemoryMetalHandlePropertiesEXT* pMemoryMetalHandleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryMetalHandleEXT( + VkDevice device, + const VkMemoryGetMetalHandleInfoEXT* pGetMetalHandleInfo, + void** pHandle); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryMetalHandlePropertiesEXT( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void* pHandle, + VkMemoryMetalHandlePropertiesEXT* pMemoryMetalHandleProperties); +#endif + #ifdef __cplusplus } #endif diff --git a/src/libraries/vulkanheaders/vulkan_raii.hpp b/src/libraries/vulkanheaders/vulkan_raii.hpp index e39a0438e..aa531490d 100644 --- a/src/libraries/vulkanheaders/vulkan_raii.hpp +++ b/src/libraries/vulkanheaders/vulkan_raii.hpp @@ -1,4 +1,4 @@ -// Copyright 2015-2022 The Khronos Group Inc. +// Copyright 2015-2025 The Khronos Group Inc. // // SPDX-License-Identifier: Apache-2.0 OR MIT // @@ -8,2209 +8,2791 @@ #ifndef VULKAN_RAII_HPP #define VULKAN_RAII_HPP -#include -#include // std::exchange, std::forward #include - -#if !defined( VULKAN_HPP_RAII_NAMESPACE ) -# define VULKAN_HPP_RAII_NAMESPACE raii +#if !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) +# include // std::unique_ptr +# include // std::forward #endif +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) namespace VULKAN_HPP_NAMESPACE { namespace VULKAN_HPP_RAII_NAMESPACE { -#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) && !defined( VULKAN_HPP_NO_EXCEPTIONS ) - - template - VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_INLINE T exchange( T & obj, U && newValue ) + namespace detail { -# if ( 14 <= VULKAN_HPP_CPP_VERSION ) - return std::exchange( obj, std::forward( newValue ) ); + template + class CreateReturnType + { + public: +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + using Type = VULKAN_HPP_EXPECTED; # else - T oldValue = std::move( obj ); - obj = std::forward( newValue ); - return oldValue; + using Type = T; # endif - } + }; + + using PFN_dummy = void ( * )(); + + class ContextDispatcher : public ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase + { + public: + ContextDispatcher( PFN_vkGetInstanceProcAddr getProcAddr ) + : vkGetInstanceProcAddr( getProcAddr ) + //=== VK_VERSION_1_0 === + , vkCreateInstance( PFN_vkCreateInstance( getProcAddr( NULL, "vkCreateInstance" ) ) ) + , vkEnumerateInstanceExtensionProperties( + PFN_vkEnumerateInstanceExtensionProperties( getProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ) ) + , vkEnumerateInstanceLayerProperties( PFN_vkEnumerateInstanceLayerProperties( getProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ) ) + //=== VK_VERSION_1_1 === + , vkEnumerateInstanceVersion( PFN_vkEnumerateInstanceVersion( getProcAddr( NULL, "vkEnumerateInstanceVersion" ) ) ) + { + } + + public: + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; - class ContextDispatcher : public DispatchLoaderBase - { - public: - ContextDispatcher( PFN_vkGetInstanceProcAddr getProcAddr ) - : vkGetInstanceProcAddr( getProcAddr ) //=== VK_VERSION_1_0 === - , vkCreateInstance( PFN_vkCreateInstance( getProcAddr( NULL, "vkCreateInstance" ) ) ) - , vkEnumerateInstanceExtensionProperties( PFN_vkEnumerateInstanceExtensionProperties( getProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ) ) - , vkEnumerateInstanceLayerProperties( PFN_vkEnumerateInstanceLayerProperties( getProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ) ) + PFN_vkCreateInstance vkCreateInstance = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; + //=== VK_VERSION_1_1 === - , vkEnumerateInstanceVersion( PFN_vkEnumerateInstanceVersion( getProcAddr( NULL, "vkEnumerateInstanceVersion" ) ) ) + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + }; + + class InstanceDispatcher : public ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase { - } + public: + InstanceDispatcher( PFN_vkGetInstanceProcAddr getProcAddr, VkInstance instance ) : vkGetInstanceProcAddr( getProcAddr ) + { + //=== VK_VERSION_1_0 === + vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); + vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); + vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); + vkGetPhysicalDeviceFormatProperties = + PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); + vkGetPhysicalDeviceImageFormatProperties = + PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); + vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); + vkGetPhysicalDeviceQueueFamilyProperties = + PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); + vkGetPhysicalDeviceMemoryProperties = + PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); + vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); + vkEnumerateDeviceExtensionProperties = + PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); + vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + + //=== VK_VERSION_1_1 === + vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); + vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); + vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); + vkGetPhysicalDeviceFormatProperties2 = + PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); + vkGetPhysicalDeviceImageFormatProperties2 = + PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2 = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); + vkGetPhysicalDeviceMemoryProperties2 = + PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2 = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); + vkGetPhysicalDeviceExternalBufferProperties = + PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); + vkGetPhysicalDeviceExternalFenceProperties = + PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + vkGetPhysicalDeviceExternalSemaphoreProperties = + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + + //=== VK_VERSION_1_3 === + vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); + + //=== VK_KHR_surface === + vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); + vkGetPhysicalDeviceSurfaceSupportKHR = + PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); + vkGetPhysicalDeviceSurfaceFormatsKHR = + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); + vkGetPhysicalDeviceSurfacePresentModesKHR = + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + + //=== VK_KHR_swapchain === + vkGetPhysicalDevicePresentRectanglesKHR = + PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + + //=== VK_KHR_display === + vkGetPhysicalDeviceDisplayPropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); + vkGetDisplayPlaneSupportedDisplaysKHR = + PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); + vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); + vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); + vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); + vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); - public: - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); + vkGetPhysicalDeviceXlibPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ - //=== VK_VERSION_1_0 === - PFN_vkCreateInstance vkCreateInstance = 0; - PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; - PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); + vkGetPhysicalDeviceXcbPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_XCB_KHR*/ - //=== VK_VERSION_1_1 === - PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; - }; +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); + vkGetPhysicalDeviceWaylandPresentationSupportKHR = + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - class InstanceDispatcher : public DispatchLoaderBase - { - public: - InstanceDispatcher( PFN_vkGetInstanceProcAddr getProcAddr, VkInstance instance ) : vkGetInstanceProcAddr( getProcAddr ) - { - //=== VK_VERSION_1_0 === - vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); - vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); - vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); - vkGetPhysicalDeviceFormatProperties = - PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); - vkGetPhysicalDeviceImageFormatProperties = - PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); - vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); - vkGetPhysicalDeviceQueueFamilyProperties = - PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); - vkGetPhysicalDeviceMemoryProperties = - PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); - vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); - vkEnumerateDeviceExtensionProperties = - PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); - vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - //=== VK_VERSION_1_1 === - vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); - vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); - vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); - vkGetPhysicalDeviceFormatProperties2 = - PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); - vkGetPhysicalDeviceImageFormatProperties2 = - PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); - vkGetPhysicalDeviceQueueFamilyProperties2 = - PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); - vkGetPhysicalDeviceMemoryProperties2 = - PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties2 = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); - vkGetPhysicalDeviceExternalBufferProperties = - PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); - vkGetPhysicalDeviceExternalFenceProperties = - PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); - vkGetPhysicalDeviceExternalSemaphoreProperties = - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); + vkGetPhysicalDeviceWin32PresentationSupportKHR = + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_VERSION_1_3 === - vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); + //=== VK_EXT_debug_report === + vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); + vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); + vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); - //=== VK_EXT_acquire_drm_display === - vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); - vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); + //=== VK_KHR_video_queue === + vkGetPhysicalDeviceVideoCapabilitiesKHR = + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); + vkGetPhysicalDeviceVideoFormatPropertiesKHR = + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + vkCreateStreamDescriptorSurfaceGGP = + PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + + //=== VK_KHR_get_physical_device_properties2 === + vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); + if ( !vkGetPhysicalDeviceFeatures2 ) + vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; + vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceProperties2 ) + vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; + vkGetPhysicalDeviceFormatProperties2KHR = + PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceFormatProperties2 ) + vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; + vkGetPhysicalDeviceImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceImageFormatProperties2 ) + vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; + vkGetPhysicalDeviceQueueFamilyProperties2KHR = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) + vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; + vkGetPhysicalDeviceMemoryProperties2KHR = + PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceMemoryProperties2 ) + vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) + vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_device_group_creation === + vkEnumeratePhysicalDeviceGroupsKHR = + PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); + if ( !vkEnumeratePhysicalDeviceGroups ) + vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + + //=== VK_KHR_external_memory_capabilities === + vkGetPhysicalDeviceExternalBufferPropertiesKHR = + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalBufferProperties ) + vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + + //=== VK_KHR_external_semaphore_capabilities === + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) + vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + + //=== VK_EXT_direct_mode_display === + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); # if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) - //=== VK_EXT_acquire_xlib_display === - vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); - vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); + //=== VK_EXT_acquire_xlib_display === + vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); + vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); # endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - //=== VK_EXT_calibrated_timestamps === - vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); - - //=== VK_EXT_debug_report === - vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); - vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); - vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + //=== VK_EXT_display_surface_counter === + vkGetPhysicalDeviceSurfaceCapabilities2EXT = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + + //=== VK_KHR_external_fence_capabilities === + vkGetPhysicalDeviceExternalFencePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalFenceProperties ) + vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + + //=== VK_KHR_performance_query === + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + + //=== VK_KHR_get_surface_capabilities2 === + vkGetPhysicalDeviceSurfaceCapabilities2KHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); + vkGetPhysicalDeviceSurfaceFormats2KHR = + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + + //=== VK_KHR_get_display_properties2 === + vkGetPhysicalDeviceDisplayProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); + vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); + vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); - //=== VK_EXT_debug_utils === - vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); - vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); - vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); +# endif /*VK_USE_PLATFORM_IOS_MVK*/ - //=== VK_EXT_direct_mode_display === - vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ -# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) - //=== VK_EXT_directfb_surface === - vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); - vkGetPhysicalDeviceDirectFBPresentationSupportEXT = - PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); -# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + //=== VK_EXT_debug_utils === + vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); + vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); + vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); - //=== VK_EXT_display_surface_counter === - vkGetPhysicalDeviceSurfaceCapabilities2EXT = - PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + //=== VK_EXT_sample_locations === + vkGetPhysicalDeviceMultisamplePropertiesEXT = + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - vkGetPhysicalDeviceSurfacePresentModes2EXT = - PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_EXT_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); + if ( !vkGetPhysicalDeviceCalibrateableTimeDomainsKHR ) + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; - //=== VK_EXT_headless_surface === - vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_surface === - vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); + //=== VK_EXT_metal_surface === + vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); # endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_EXT_sample_locations === - vkGetPhysicalDeviceMultisamplePropertiesEXT = - PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + //=== VK_KHR_fragment_shading_rate === + vkGetPhysicalDeviceFragmentShadingRatesKHR = + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); - //=== VK_EXT_tooling_info === - vkGetPhysicalDeviceToolPropertiesEXT = - PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); - if ( !vkGetPhysicalDeviceToolProperties ) - vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; + //=== VK_EXT_tooling_info === + vkGetPhysicalDeviceToolPropertiesEXT = + PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); + if ( !vkGetPhysicalDeviceToolProperties ) + vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_imagepipe_surface === - vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); -# endif /*VK_USE_PLATFORM_FUCHSIA*/ + //=== VK_NV_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); -# if defined( VK_USE_PLATFORM_GGP ) - //=== VK_GGP_stream_descriptor_surface === - vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); -# endif /*VK_USE_PLATFORM_GGP*/ + //=== VK_NV_coverage_reduction_mode === + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); -# if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_KHR_android_surface === - vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); -# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkGetPhysicalDeviceSurfacePresentModes2EXT = + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_device_group === - vkGetPhysicalDevicePresentRectanglesKHR = - PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + //=== VK_EXT_headless_surface === + vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); - //=== VK_KHR_device_group_creation === - vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); - if ( !vkEnumeratePhysicalDeviceGroups ) - vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + //=== VK_EXT_acquire_drm_display === + vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); + vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); - //=== VK_KHR_display === - vkGetPhysicalDeviceDisplayPropertiesKHR = - PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); - vkGetPhysicalDeviceDisplayPlanePropertiesKHR = - PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); - vkGetDisplayPlaneSupportedDisplaysKHR = - PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); - vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); - vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); - vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); - vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + //=== VK_KHR_video_encode_queue === + vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR" ) ); - //=== VK_KHR_external_fence_capabilities === - vkGetPhysicalDeviceExternalFencePropertiesKHR = - PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalFenceProperties ) - vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); + vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_external_memory_capabilities === - vkGetPhysicalDeviceExternalBufferPropertiesKHR = - PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalBufferProperties ) - vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); + vkGetPhysicalDeviceDirectFBPresentationSupportEXT = + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - //=== VK_KHR_external_semaphore_capabilities === - vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) - vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); + vkGetPhysicalDeviceScreenPresentationSupportQNX = + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - //=== VK_KHR_fragment_shading_rate === - vkGetPhysicalDeviceFragmentShadingRatesKHR = - PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); + //=== VK_NV_optical_flow === + vkGetPhysicalDeviceOpticalFlowImageFormatsNV = + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) ); - //=== VK_KHR_get_display_properties2 === - vkGetPhysicalDeviceDisplayProperties2KHR = - PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); - vkGetPhysicalDeviceDisplayPlaneProperties2KHR = - PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); - vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); - vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + //=== VK_NV_cooperative_vector === + vkGetPhysicalDeviceCooperativeVectorPropertiesNV = + PFN_vkGetPhysicalDeviceCooperativeVectorPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeVectorPropertiesNV" ) ); - //=== VK_KHR_get_physical_device_properties2 === - vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); - if ( !vkGetPhysicalDeviceFeatures2 ) - vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; - vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceProperties2 ) - vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; - vkGetPhysicalDeviceFormatProperties2KHR = - PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceFormatProperties2 ) - vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; - vkGetPhysicalDeviceImageFormatProperties2KHR = - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceImageFormatProperties2 ) - vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; - vkGetPhysicalDeviceQueueFamilyProperties2KHR = - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) - vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; - vkGetPhysicalDeviceMemoryProperties2KHR = - PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceMemoryProperties2 ) - vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; - vkGetPhysicalDeviceSparseImageFormatProperties2KHR = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) - vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + //=== VK_KHR_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR" ) ); - //=== VK_KHR_get_surface_capabilities2 === - vkGetPhysicalDeviceSurfaceCapabilities2KHR = - PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); - vkGetPhysicalDeviceSurfaceFormats2KHR = - PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + //=== VK_KHR_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR" ) ); - //=== VK_KHR_performance_query === - vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( - vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); - vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + //=== VK_NV_cooperative_matrix2 === + vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV" ) ); - //=== VK_KHR_surface === - vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); - vkGetPhysicalDeviceSurfaceSupportKHR = - PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); - vkGetPhysicalDeviceSurfaceCapabilitiesKHR = - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); - vkGetPhysicalDeviceSurfaceFormatsKHR = - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); - vkGetPhysicalDeviceSurfacePresentModesKHR = - PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); + } -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_queue === - vkGetPhysicalDeviceVideoCapabilitiesKHR = - PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); - vkGetPhysicalDeviceVideoFormatPropertiesKHR = - PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + public: + //=== VK_VERSION_1_0 === + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + PFN_vkCreateDevice vkCreateDevice = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; -# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) - //=== VK_KHR_wayland_surface === - vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); - vkGetPhysicalDeviceWaylandPresentationSupportKHR = - PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); -# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + //=== VK_VERSION_1_1 === + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_win32_surface === - vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); - vkGetPhysicalDeviceWin32PresentationSupportKHR = - PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_VERSION_1_3 === + PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; -# if defined( VK_USE_PLATFORM_XCB_KHR ) - //=== VK_KHR_xcb_surface === - vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); - vkGetPhysicalDeviceXcbPresentationSupportKHR = - PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); -# endif /*VK_USE_PLATFORM_XCB_KHR*/ + //=== VK_KHR_surface === + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; + + //=== VK_KHR_swapchain === + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + + //=== VK_KHR_display === + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; # if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === - vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); - vkGetPhysicalDeviceXlibPresentationSupportKHR = - PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_XLIB_KHR*/ -# if defined( VK_USE_PLATFORM_IOS_MVK ) - //=== VK_MVK_ios_surface === - vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); -# endif /*VK_USE_PLATFORM_IOS_MVK*/ +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_XCB_KHR*/ -# if defined( VK_USE_PLATFORM_MACOS_MVK ) - //=== VK_MVK_macos_surface === - vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); -# endif /*VK_USE_PLATFORM_MACOS_MVK*/ +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -# if defined( VK_USE_PLATFORM_VI_NN ) - //=== VK_NN_vi_surface === - vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); -# endif /*VK_USE_PLATFORM_VI_NN*/ +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; +# else + PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ # if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_acquire_winrt_display === - vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); - vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); + //=== VK_KHR_win32_surface === + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +# else + PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_NV_cooperative_matrix === - vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + //=== VK_EXT_debug_report === + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; - //=== VK_NV_coverage_reduction_mode === - vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + //=== VK_KHR_video_queue === + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; +# else + PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; +# endif /*VK_USE_PLATFORM_GGP*/ //=== VK_NV_external_memory_capabilities === - vkGetPhysicalDeviceExternalImageFormatPropertiesNV = - PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; -# if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_screen_surface === - vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); - vkGetPhysicalDeviceScreenPresentationSupportQNX = - PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); -# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + //=== VK_KHR_get_physical_device_properties2 === + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; - vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); - } +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; +# else + PFN_dummy vkCreateViSurfaceNN_placeholder = 0; +# endif /*VK_USE_PLATFORM_VI_NN*/ - public: - //=== VK_VERSION_1_0 === - PFN_vkDestroyInstance vkDestroyInstance = 0; - PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; - PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; - PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; - PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; - PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; - PFN_vkCreateDevice vkCreateDevice = 0; - PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; - PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + //=== VK_KHR_device_group_creation === + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; - //=== VK_VERSION_1_1 === - PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; - PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; - PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; - PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; - PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + //=== VK_KHR_external_memory_capabilities === + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; - //=== VK_VERSION_1_3 === - PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; + //=== VK_KHR_external_semaphore_capabilities === + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; - //=== VK_EXT_acquire_drm_display === - PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; - PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; + //=== VK_EXT_direct_mode_display === + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; # if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) - //=== VK_EXT_acquire_xlib_display === - PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; - PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; + //=== VK_EXT_acquire_xlib_display === + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; + PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; # else - PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; - PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; + PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; + PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; # endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - //=== VK_EXT_calibrated_timestamps === - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; - - //=== VK_EXT_debug_report === - PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; - PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; - PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; + //=== VK_EXT_display_surface_counter === + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; - //=== VK_EXT_debug_utils === - PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; - PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; - PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; + //=== VK_KHR_external_fence_capabilities === + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; - //=== VK_EXT_direct_mode_display === - PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; + //=== VK_KHR_performance_query === + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; -# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) - //=== VK_EXT_directfb_surface === - PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; - PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; -# else - PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; -# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + //=== VK_KHR_get_surface_capabilities2 === + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; - //=== VK_EXT_display_surface_counter === - PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; + //=== VK_KHR_get_display_properties2 === + PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; # else - PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_EXT_headless_surface === - PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; + PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; +# endif /*VK_USE_PLATFORM_IOS_MVK*/ -# if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_surface === - PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; # else - PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; -# endif /*VK_USE_PLATFORM_METAL_EXT*/ + PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ - //=== VK_EXT_sample_locations === - PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; + //=== VK_EXT_debug_utils === + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; - //=== VK_EXT_tooling_info === - PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; + //=== VK_EXT_sample_locations === + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; + + //=== VK_EXT_calibrated_timestamps === + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; # if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_imagepipe_surface === - PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; + //=== VK_FUCHSIA_imagepipe_surface === + PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; # else - PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; + PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; # endif /*VK_USE_PLATFORM_FUCHSIA*/ -# if defined( VK_USE_PLATFORM_GGP ) - //=== VK_GGP_stream_descriptor_surface === - PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; # else - PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; -# endif /*VK_USE_PLATFORM_GGP*/ + PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ -# if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_KHR_android_surface === - PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; + //=== VK_KHR_fragment_shading_rate === + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; + + //=== VK_EXT_tooling_info === + PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; + + //=== VK_NV_cooperative_matrix === + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; + + //=== VK_NV_coverage_reduction_mode === + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; # else - PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_device_group === - PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + //=== VK_EXT_headless_surface === + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; - //=== VK_KHR_device_group_creation === - PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; + //=== VK_EXT_acquire_drm_display === + PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; + PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; - //=== VK_KHR_display === - PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; - PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; - PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; - PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; - PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; - PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; - PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; + //=== VK_KHR_video_encode_queue === + PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0; - //=== VK_KHR_external_fence_capabilities === - PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; + PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; +# else + PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; + PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_external_memory_capabilities === - PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; +# else + PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - //=== VK_KHR_external_semaphore_capabilities === - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; +# else + PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - //=== VK_KHR_fragment_shading_rate === - PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; + //=== VK_NV_optical_flow === + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0; - //=== VK_KHR_get_display_properties2 === - PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; - PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; - PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; - PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; + //=== VK_NV_cooperative_vector === + PFN_vkGetPhysicalDeviceCooperativeVectorPropertiesNV vkGetPhysicalDeviceCooperativeVectorPropertiesNV = 0; - //=== VK_KHR_get_physical_device_properties2 === - PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; - PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; - PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; + //=== VK_KHR_cooperative_matrix === + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0; - //=== VK_KHR_get_surface_capabilities2 === - PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; + //=== VK_KHR_calibrated_timestamps === + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0; - //=== VK_KHR_performance_query === - PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; + //=== VK_NV_cooperative_matrix2 === + PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = 0; - //=== VK_KHR_surface === - PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; - PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + }; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_queue === - PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; - PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; -# else - PFN_dummy vkGetPhysicalDeviceVideoCapabilitiesKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceVideoFormatPropertiesKHR_placeholder = 0; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + class DeviceDispatcher : public ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase + { + public: + DeviceDispatcher( PFN_vkGetDeviceProcAddr getProcAddr, VkDevice device ) : vkGetDeviceProcAddr( getProcAddr ) + { + //=== VK_VERSION_1_0 === + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = + PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + + //=== VK_VERSION_1_3 === + vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = + PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); + + //=== VK_VERSION_1_4 === + vkCmdSetLineStipple = PFN_vkCmdSetLineStipple( vkGetDeviceProcAddr( device, "vkCmdSetLineStipple" ) ); + vkMapMemory2 = PFN_vkMapMemory2( vkGetDeviceProcAddr( device, "vkMapMemory2" ) ); + vkUnmapMemory2 = PFN_vkUnmapMemory2( vkGetDeviceProcAddr( device, "vkUnmapMemory2" ) ); + vkCmdBindIndexBuffer2 = PFN_vkCmdBindIndexBuffer2( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2" ) ); + vkGetRenderingAreaGranularity = PFN_vkGetRenderingAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularity" ) ); + vkGetDeviceImageSubresourceLayout = PFN_vkGetDeviceImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayout" ) ); + vkGetImageSubresourceLayout2 = PFN_vkGetImageSubresourceLayout2( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2" ) ); + vkCmdPushDescriptorSet = PFN_vkCmdPushDescriptorSet( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet" ) ); + vkCmdPushDescriptorSetWithTemplate = PFN_vkCmdPushDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate" ) ); + vkCmdSetRenderingAttachmentLocations = + PFN_vkCmdSetRenderingAttachmentLocations( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocations" ) ); + vkCmdSetRenderingInputAttachmentIndices = + PFN_vkCmdSetRenderingInputAttachmentIndices( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndices" ) ); + vkCmdBindDescriptorSets2 = PFN_vkCmdBindDescriptorSets2( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2" ) ); + vkCmdPushConstants2 = PFN_vkCmdPushConstants2( vkGetDeviceProcAddr( device, "vkCmdPushConstants2" ) ); + vkCmdPushDescriptorSet2 = PFN_vkCmdPushDescriptorSet2( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2" ) ); + vkCmdPushDescriptorSetWithTemplate2 = PFN_vkCmdPushDescriptorSetWithTemplate2( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2" ) ); + vkCopyMemoryToImage = PFN_vkCopyMemoryToImage( vkGetDeviceProcAddr( device, "vkCopyMemoryToImage" ) ); + vkCopyImageToMemory = PFN_vkCopyImageToMemory( vkGetDeviceProcAddr( device, "vkCopyImageToMemory" ) ); + vkCopyImageToImage = PFN_vkCopyImageToImage( vkGetDeviceProcAddr( device, "vkCopyImageToImage" ) ); + vkTransitionImageLayout = PFN_vkTransitionImageLayout( vkGetDeviceProcAddr( device, "vkTransitionImageLayout" ) ); + + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = + PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = + PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + + //=== VK_KHR_video_queue === + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = + PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); + + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = + PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewHandle64NVX = PFN_vkGetImageViewHandle64NVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandle64NVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; -# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) - //=== VK_KHR_wayland_surface === - PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; -# else - PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; # if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_win32_surface === - PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; - PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; -# else - PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); # endif /*VK_USE_PLATFORM_WIN32_KHR*/ -# if defined( VK_USE_PLATFORM_XCB_KHR ) - //=== VK_KHR_xcb_surface === - PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; -# else - PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_XCB_KHR*/ + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); -# if defined( VK_USE_PLATFORM_XLIB_KHR ) - //=== VK_KHR_xlib_surface === - PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; -# else - PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_XLIB_KHR*/ +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ -# if defined( VK_USE_PLATFORM_IOS_MVK ) - //=== VK_MVK_ios_surface === - PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; -# else - PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; -# endif /*VK_USE_PLATFORM_IOS_MVK*/ + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + if ( !vkCmdPushDescriptorSet ) + vkCmdPushDescriptorSet = vkCmdPushDescriptorSetKHR; + vkCmdPushDescriptorSetWithTemplateKHR = + PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate ) + vkCmdPushDescriptorSetWithTemplate = vkCmdPushDescriptorSetWithTemplateKHR; + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = + PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = + PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) ); + vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) ); + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); -# if defined( VK_USE_PLATFORM_MACOS_MVK ) - //=== VK_MVK_macos_surface === - PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; -# else - PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; -# endif /*VK_USE_PLATFORM_MACOS_MVK*/ +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ -# if defined( VK_USE_PLATFORM_VI_NN ) - //=== VK_NN_vi_surface === - PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; -# else - PFN_dummy vkCreateViSurfaceNN_placeholder = 0; -# endif /*VK_USE_PLATFORM_VI_NN*/ + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + + //=== VK_KHR_performance_query === + vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = + PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = + PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = + PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = + PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = + PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = + PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = + PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = + PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = + PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = + PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = + PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = + PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = + PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = + PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; + + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = + PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = + PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = + PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = + PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); + if ( !vkGetCalibratedTimestampsKHR ) + vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = + PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = + PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = + PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = + PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + + //=== VK_KHR_fragment_shading_rate === + vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + + //=== VK_KHR_dynamic_rendering_local_read === + vkCmdSetRenderingAttachmentLocationsKHR = + PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); + if ( !vkCmdSetRenderingAttachmentLocations ) + vkCmdSetRenderingAttachmentLocations = vkCmdSetRenderingAttachmentLocationsKHR; + vkCmdSetRenderingInputAttachmentIndicesKHR = + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + if ( !vkCmdSetRenderingInputAttachmentIndices ) + vkCmdSetRenderingInputAttachmentIndices = vkCmdSetRenderingInputAttachmentIndicesKHR; + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); # if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_acquire_winrt_display === - PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; - PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; -# else - PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; - PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; + //=== VK_EXT_full_screen_exclusive === + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = + PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); # endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_NV_cooperative_matrix === - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleEXT; + + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = + PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = + PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = + PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = + PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); + if ( !vkCopyMemoryToImage ) + vkCopyMemoryToImage = vkCopyMemoryToImageEXT; + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); + if ( !vkCopyImageToMemory ) + vkCopyImageToMemory = vkCopyImageToMemoryEXT; + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); + if ( !vkCopyImageToImage ) + vkCopyImageToImage = vkCopyImageToImageEXT; + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); + if ( !vkTransitionImageLayout ) + vkTransitionImageLayout = vkTransitionImageLayoutEXT; + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2EXT; + + //=== VK_KHR_map_memory2 === + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); + if ( !vkMapMemory2 ) + vkMapMemory2 = vkMapMemory2KHR; + vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); + if ( !vkUnmapMemory2 ) + vkUnmapMemory2 = vkUnmapMemory2KHR; + + //=== VK_EXT_swapchain_maintenance1 === + vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) ); + + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = + PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + + //=== VK_EXT_depth_bias_control === + vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) ); + + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; + + //=== VK_KHR_video_encode_queue === + vkGetEncodedVideoSessionParametersKHR = + PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) ); + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); - //=== VK_NV_coverage_reduction_mode === - PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) ); + vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) ); + vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) ); + vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) ); + vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) ); + vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) ); +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_NV_external_memory_capabilities === - PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; + + //=== VK_EXT_descriptor_buffer === + vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) ); + vkGetDescriptorSetLayoutBindingOffsetEXT = + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); + vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) ); + vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) ); + vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplersEXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); + vkGetBufferOpaqueCaptureDescriptorDataEXT = + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageViewOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); + vkGetSamplerOpaqueCaptureDescriptorDataEXT = + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); + vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); + + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); + + //=== VK_EXT_mesh_shader === + vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); + vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); + vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; + + //=== VK_EXT_device_fault === + vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); + + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = + PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = + PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); + + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); + + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); + + //=== VK_EXT_pipeline_properties === + vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); + + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); + + //=== VK_KHR_ray_tracing_maintenance1 === + vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); + + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); + + //=== VK_EXT_opacity_micromap === + vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) ); + vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) ); + vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) ); + vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) ); + vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) ); + vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) ); + vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) ); + vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) ); + vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) ); + vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) ); + vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) ); + vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) ); + vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) ); + vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) ); + + //=== VK_HUAWEI_cluster_culling_shader === + vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) ); + vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) ); + + //=== VK_EXT_pageable_device_local_memory === + vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); + + //=== VK_KHR_maintenance4 === + vkGetDeviceBufferMemoryRequirementsKHR = + PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceBufferMemoryRequirements ) + vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; + vkGetDeviceImageMemoryRequirementsKHR = + PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageMemoryRequirements ) + vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; + vkGetDeviceImageSparseMemoryRequirementsKHR = + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageSparseMemoryRequirements ) + vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + + //=== VK_VALVE_descriptor_set_host_mapping === + vkGetDescriptorSetLayoutHostMappingInfoVALVE = + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); + vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); + + //=== VK_NV_copy_memory_indirect === + vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); + vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); + + //=== VK_NV_memory_decompression === + vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); + vkCmdDecompressMemoryIndirectCountNV = + PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); + + //=== VK_NV_device_generated_commands_compute === + vkGetPipelineIndirectMemoryRequirementsNV = + PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); + vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); + vkGetPipelineIndirectDeviceAddressNV = + PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); + + //=== VK_EXT_extended_dynamic_state3 === + vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); + vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) ); + vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) ); + vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) ); + vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) ); + vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) ); + vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) ); + vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) ); + vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) ); + vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) ); + vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); + vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) ); + vkCmdSetConservativeRasterizationModeEXT = + PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) ); + vkCmdSetExtraPrimitiveOverestimationSizeEXT = + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); + vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) ); + vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) ); + vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) ); + vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) ); + vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) ); + vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) ); + vkCmdSetDepthClipNegativeOneToOneEXT = + PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); + vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) ); + vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) ); + vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) ); + vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) ); + vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) ); + vkCmdSetCoverageModulationTableEnableNV = + PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) ); + vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) ); + vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) ); + vkCmdSetRepresentativeFragmentTestEnableNV = + PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); + vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) ); + + //=== VK_EXT_shader_module_identifier === + vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); + vkGetShaderModuleCreateInfoIdentifierEXT = + PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + + //=== VK_NV_optical_flow === + vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) ); + vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) ); + vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); + vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); + + //=== VK_KHR_maintenance5 === + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); + if ( !vkCmdBindIndexBuffer2 ) + vkCmdBindIndexBuffer2 = vkCmdBindIndexBuffer2KHR; + vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); + if ( !vkGetRenderingAreaGranularity ) + vkGetRenderingAreaGranularity = vkGetRenderingAreaGranularityKHR; + vkGetDeviceImageSubresourceLayoutKHR = + PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + if ( !vkGetDeviceImageSubresourceLayout ) + vkGetDeviceImageSubresourceLayout = vkGetDeviceImageSubresourceLayoutKHR; + vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2KHR; + + //=== VK_AMD_anti_lag === + vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) ); + + //=== VK_EXT_shader_object === + vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); + vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); + vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); + vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); + vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampRangeEXT" ) ); + + //=== VK_KHR_pipeline_binary === + vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetDeviceProcAddr( device, "vkCreatePipelineBinariesKHR" ) ); + vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetDeviceProcAddr( device, "vkDestroyPipelineBinaryKHR" ) ); + vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetDeviceProcAddr( device, "vkGetPipelineKeyKHR" ) ); + vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetDeviceProcAddr( device, "vkGetPipelineBinaryDataKHR" ) ); + vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetDeviceProcAddr( device, "vkReleaseCapturedPipelineDataKHR" ) ); + + //=== VK_QCOM_tile_properties === + vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); + vkGetDynamicRenderingTilePropertiesQCOM = + PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); + + //=== VK_NV_cooperative_vector === + vkConvertCooperativeVectorMatrixNV = PFN_vkConvertCooperativeVectorMatrixNV( vkGetDeviceProcAddr( device, "vkConvertCooperativeVectorMatrixNV" ) ); + vkCmdConvertCooperativeVectorMatrixNV = + PFN_vkCmdConvertCooperativeVectorMatrixNV( vkGetDeviceProcAddr( device, "vkCmdConvertCooperativeVectorMatrixNV" ) ); + + //=== VK_NV_low_latency2 === + vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetDeviceProcAddr( device, "vkSetLatencySleepModeNV" ) ); + vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetDeviceProcAddr( device, "vkLatencySleepNV" ) ); + vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) ); + vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) ); + vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) ); + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + vkCmdSetAttachmentFeedbackLoopEnableEXT = + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); # if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_screen_surface === - PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; - PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; -# else - PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; + //=== VK_QNX_external_memory_screen_buffer === + vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) ); # endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; - }; + //=== VK_KHR_line_rasterization === + vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleKHR" ) ); + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleKHR; + + //=== VK_KHR_calibrated_timestamps === + vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) ); + + //=== VK_KHR_maintenance6 === + vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) ); + if ( !vkCmdBindDescriptorSets2 ) + vkCmdBindDescriptorSets2 = vkCmdBindDescriptorSets2KHR; + vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); + if ( !vkCmdPushConstants2 ) + vkCmdPushConstants2 = vkCmdPushConstants2KHR; + vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); + if ( !vkCmdPushDescriptorSet2 ) + vkCmdPushDescriptorSet2 = vkCmdPushDescriptorSet2KHR; + vkCmdPushDescriptorSetWithTemplate2KHR = + PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate2 ) + vkCmdPushDescriptorSetWithTemplate2 = vkCmdPushDescriptorSetWithTemplate2KHR; + vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); + + //=== VK_NV_cluster_acceleration_structure === + vkGetClusterAccelerationStructureBuildSizesNV = + PFN_vkGetClusterAccelerationStructureBuildSizesNV( vkGetDeviceProcAddr( device, "vkGetClusterAccelerationStructureBuildSizesNV" ) ); + vkCmdBuildClusterAccelerationStructureIndirectNV = + PFN_vkCmdBuildClusterAccelerationStructureIndirectNV( vkGetDeviceProcAddr( device, "vkCmdBuildClusterAccelerationStructureIndirectNV" ) ); + + //=== VK_NV_partitioned_acceleration_structure === + vkGetPartitionedAccelerationStructuresBuildSizesNV = + PFN_vkGetPartitionedAccelerationStructuresBuildSizesNV( vkGetDeviceProcAddr( device, "vkGetPartitionedAccelerationStructuresBuildSizesNV" ) ); + vkCmdBuildPartitionedAccelerationStructuresNV = + PFN_vkCmdBuildPartitionedAccelerationStructuresNV( vkGetDeviceProcAddr( device, "vkCmdBuildPartitionedAccelerationStructuresNV" ) ); + + //=== VK_EXT_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsEXT = + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsEXT" ) ); + vkCmdPreprocessGeneratedCommandsEXT = PFN_vkCmdPreprocessGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsEXT" ) ); + vkCmdExecuteGeneratedCommandsEXT = PFN_vkCmdExecuteGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsEXT" ) ); + vkCreateIndirectCommandsLayoutEXT = PFN_vkCreateIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutEXT" ) ); + vkDestroyIndirectCommandsLayoutEXT = PFN_vkDestroyIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutEXT" ) ); + vkCreateIndirectExecutionSetEXT = PFN_vkCreateIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectExecutionSetEXT" ) ); + vkDestroyIndirectExecutionSetEXT = PFN_vkDestroyIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectExecutionSetEXT" ) ); + vkUpdateIndirectExecutionSetPipelineEXT = + PFN_vkUpdateIndirectExecutionSetPipelineEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetPipelineEXT" ) ); + vkUpdateIndirectExecutionSetShaderEXT = + PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetShaderEXT" ) ); - class DeviceDispatcher : public DispatchLoaderBase - { - public: - DeviceDispatcher( PFN_vkGetDeviceProcAddr getProcAddr, VkDevice device ) : vkGetDeviceProcAddr( getProcAddr ) - { +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + vkGetMemoryMetalHandleEXT = PFN_vkGetMemoryMetalHandleEXT( vkGetDeviceProcAddr( device, "vkGetMemoryMetalHandleEXT" ) ); + vkGetMemoryMetalHandlePropertiesEXT = PFN_vkGetMemoryMetalHandlePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryMetalHandlePropertiesEXT" ) ); +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + } + + public: //=== VK_VERSION_1_0 === - vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); - vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); - vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); - vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); - vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); - vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); - vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); - vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); - vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); - vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); - vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); - vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); - vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); - vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); - vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); - vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); - vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); - vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); - vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); - vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); - vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); - vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); - vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); - vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); - vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); - vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); - vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); - vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); - vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); - vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); - vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); - vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); - vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); - vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); - vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); - vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); - vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); - vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); - vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); - vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); - vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); - vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); - vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); - vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); - vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); - vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); - vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); - vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); - vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); - vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); - vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); - vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); - vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); - vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); - vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); - vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); - vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); - vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); - vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); - vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); - vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); - vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); - vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); - vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); - vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); - vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); - vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); - vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); - vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); - vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); - vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); - vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); - vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); - vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); - vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); - vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); - vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); - vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); - vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); - vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); - vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); - vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); - vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); - vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); - vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); - vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); - vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); - vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); - vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); - vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); - vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); - vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); - vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); - vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); - vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); - vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); - vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); - vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkFreeMemory vkFreeMemory = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; + PFN_vkWaitForFences vkWaitForFences = 0; + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkCreateImage vkCreateImage = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; //=== VK_VERSION_1_1 === - vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); - vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); - vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; //=== VK_VERSION_1_2 === - vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); - vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); - vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); - vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); - vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); - vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); - vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); - vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); - vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); - vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); - vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); - vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); - vkGetDeviceMemoryOpaqueCaptureAddress = - PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; + PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; + PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; + PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; + PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; + PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; + PFN_vkResetQueryPool vkResetQueryPool = 0; + PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; + PFN_vkWaitSemaphores vkWaitSemaphores = 0; + PFN_vkSignalSemaphore vkSignalSemaphore = 0; + PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; + PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; //=== VK_VERSION_1_3 === - vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); - vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); - vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); - vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); - vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); - vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); - vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); - vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); - vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); - vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); - vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); - vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); - vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); - vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); - vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); - vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); - vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); - vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); - vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); - vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); - vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); - vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); - vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); - vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); - vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); - vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); - vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); - vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); - vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); - vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); - vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); - vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); - vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); - vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); - vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); - vkGetDeviceImageSparseMemoryRequirements = - PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); - - //=== VK_AMD_buffer_marker === - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); - - //=== VK_AMD_display_native_hdr === - vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; + PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; + PFN_vkSetPrivateData vkSetPrivateData = 0; + PFN_vkGetPrivateData vkGetPrivateData = 0; + PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; + PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; + PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; + PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; + PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; + PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; + PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; + PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; + PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; + PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; + PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; + PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; + PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; + PFN_vkCmdEndRendering vkCmdEndRendering = 0; + PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; + PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; + PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; + PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; + PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; + PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; + PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; + PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; + PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; + PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; + PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; + PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; + PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; + PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; + PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; + PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; + PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; + PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; + + //=== VK_VERSION_1_4 === + PFN_vkCmdSetLineStipple vkCmdSetLineStipple = 0; + PFN_vkMapMemory2 vkMapMemory2 = 0; + PFN_vkUnmapMemory2 vkUnmapMemory2 = 0; + PFN_vkCmdBindIndexBuffer2 vkCmdBindIndexBuffer2 = 0; + PFN_vkGetRenderingAreaGranularity vkGetRenderingAreaGranularity = 0; + PFN_vkGetDeviceImageSubresourceLayout vkGetDeviceImageSubresourceLayout = 0; + PFN_vkGetImageSubresourceLayout2 vkGetImageSubresourceLayout2 = 0; + PFN_vkCmdPushDescriptorSet vkCmdPushDescriptorSet = 0; + PFN_vkCmdPushDescriptorSetWithTemplate vkCmdPushDescriptorSetWithTemplate = 0; + PFN_vkCmdSetRenderingAttachmentLocations vkCmdSetRenderingAttachmentLocations = 0; + PFN_vkCmdSetRenderingInputAttachmentIndices vkCmdSetRenderingInputAttachmentIndices = 0; + PFN_vkCmdBindDescriptorSets2 vkCmdBindDescriptorSets2 = 0; + PFN_vkCmdPushConstants2 vkCmdPushConstants2 = 0; + PFN_vkCmdPushDescriptorSet2 vkCmdPushDescriptorSet2 = 0; + PFN_vkCmdPushDescriptorSetWithTemplate2 vkCmdPushDescriptorSetWithTemplate2 = 0; + PFN_vkCopyMemoryToImage vkCopyMemoryToImage = 0; + PFN_vkCopyImageToMemory vkCopyImageToMemory = 0; + PFN_vkCopyImageToImage vkCopyImageToImage = 0; + PFN_vkTransitionImageLayout vkTransitionImageLayout = 0; - //=== VK_AMD_draw_indirect_count === - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + //=== VK_KHR_swapchain === + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; - //=== VK_AMD_shader_info === - vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + //=== VK_KHR_display_swapchain === + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; -# if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_ANDROID_external_memory_android_hardware_buffer === - vkGetAndroidHardwareBufferPropertiesANDROID = - PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); - vkGetMemoryAndroidHardwareBufferANDROID = - PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); -# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + //=== VK_EXT_debug_marker === + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; - //=== VK_EXT_buffer_device_address === - vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + //=== VK_KHR_video_queue === + PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; + PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; + PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; + PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; + PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; + PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; + PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; + PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; + PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; + PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; - //=== VK_EXT_calibrated_timestamps === - vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); + //=== VK_KHR_video_decode_queue === + PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; - //=== VK_EXT_color_write_enable === - vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); + //=== VK_EXT_transform_feedback === + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; - //=== VK_EXT_conditional_rendering === - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + //=== VK_NVX_binary_import === + PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; + PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; + PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; + PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; + PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; - //=== VK_EXT_debug_marker === - vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); - vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); - vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + //=== VK_NVX_image_view_handle === + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageViewHandle64NVX vkGetImageViewHandle64NVX = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; - //=== VK_EXT_debug_utils === - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); - vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); - vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); - vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + //=== VK_AMD_draw_indirect_count === + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; - //=== VK_EXT_discard_rectangles === - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + //=== VK_AMD_shader_info === + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; - //=== VK_EXT_display_control === - vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); - vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + //=== VK_KHR_dynamic_rendering === + PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; + PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; - //=== VK_EXT_extended_dynamic_state === - vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); - if ( !vkCmdSetCullMode ) - vkCmdSetCullMode = vkCmdSetCullModeEXT; - vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); - if ( !vkCmdSetFrontFace ) - vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; - vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); - if ( !vkCmdSetPrimitiveTopology ) - vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; - vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); - if ( !vkCmdSetViewportWithCount ) - vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; - vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); - if ( !vkCmdSetScissorWithCount ) - vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; - vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); - if ( !vkCmdBindVertexBuffers2 ) - vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; - vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); - if ( !vkCmdSetDepthTestEnable ) - vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; - vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); - if ( !vkCmdSetDepthWriteEnable ) - vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; - vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); - if ( !vkCmdSetDepthCompareOp ) - vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; - vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); - if ( !vkCmdSetDepthBoundsTestEnable ) - vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; - vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); - if ( !vkCmdSetStencilTestEnable ) - vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; - vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); - if ( !vkCmdSetStencilOp ) - vkCmdSetStencilOp = vkCmdSetStencilOpEXT; +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; +# else + PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_EXT_extended_dynamic_state2 === - vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); - vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); - if ( !vkCmdSetRasterizerDiscardEnable ) - vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; - vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); - if ( !vkCmdSetDepthBiasEnable ) - vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; - vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); - vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); - if ( !vkCmdSetPrimitiveRestartEnable ) - vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + //=== VK_KHR_device_group === + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; - //=== VK_EXT_external_memory_host === - vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + //=== VK_KHR_maintenance1 === + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; # if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); - vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); - vkGetDeviceGroupSurfacePresentModes2EXT = - PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); + //=== VK_KHR_external_memory_win32 === + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; +# else + PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_EXT_hdr_metadata === - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + //=== VK_KHR_external_memory_fd === + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; - //=== VK_EXT_host_query_reset === - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); - if ( !vkResetQueryPool ) - vkResetQueryPool = vkResetQueryPoolEXT; +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +# else + PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_EXT_image_compression_control === - vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); + //=== VK_KHR_external_semaphore_fd === + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; - //=== VK_EXT_image_drm_format_modifier === - vkGetImageDrmFormatModifierPropertiesEXT = - PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + //=== VK_KHR_push_descriptor === + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; - //=== VK_EXT_line_rasterization === - vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); + //=== VK_EXT_conditional_rendering === + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; - //=== VK_EXT_mesh_shader === - vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); - vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); - vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + //=== VK_KHR_descriptor_update_template === + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; -# if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === - vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); -# endif /*VK_USE_PLATFORM_METAL_EXT*/ + //=== VK_NV_clip_space_w_scaling === + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; - //=== VK_EXT_multi_draw === - vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); - vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); + //=== VK_EXT_display_control === + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; - //=== VK_EXT_pageable_device_local_memory === - vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); + //=== VK_GOOGLE_display_timing === + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; - //=== VK_EXT_pipeline_properties === - vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); + //=== VK_EXT_discard_rectangles === + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; + PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0; + PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0; - //=== VK_EXT_private_data === - vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); - if ( !vkCreatePrivateDataSlot ) - vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; - vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); - if ( !vkDestroyPrivateDataSlot ) - vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; - vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); - if ( !vkSetPrivateData ) - vkSetPrivateData = vkSetPrivateDataEXT; - vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); - if ( !vkGetPrivateData ) - vkGetPrivateData = vkGetPrivateDataEXT; + //=== VK_EXT_hdr_metadata === + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; - //=== VK_EXT_sample_locations === - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + //=== VK_KHR_create_renderpass2 === + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; - //=== VK_EXT_shader_module_identifier === - vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); - vkGetShaderModuleCreateInfoIdentifierEXT = - PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + //=== VK_KHR_shared_presentable_image === + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; - //=== VK_EXT_transform_feedback === - vkCmdBindTransformFeedbackBuffersEXT = - PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; +# else + PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_EXT_validation_cache === - vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); - vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); - vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); - vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + //=== VK_KHR_external_fence_fd === + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; - //=== VK_EXT_vertex_input_dynamic_state === - vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); + //=== VK_KHR_performance_query === + PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; + PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === - vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); - vkSetBufferCollectionImageConstraintsFUCHSIA = - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); - vkSetBufferCollectionBufferConstraintsFUCHSIA = - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); - vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); - vkGetBufferCollectionPropertiesFUCHSIA = - PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); -# endif /*VK_USE_PLATFORM_FUCHSIA*/ + //=== VK_EXT_debug_utils === + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_memory === - vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); - vkGetMemoryZirconHandlePropertiesFUCHSIA = - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); -# endif /*VK_USE_PLATFORM_FUCHSIA*/ +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; + PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; +# else + PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; + PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_semaphore === - vkImportSemaphoreZirconHandleFUCHSIA = - PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); - vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); -# endif /*VK_USE_PLATFORM_FUCHSIA*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0; + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0; + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0; + PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0; + PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; + PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; + PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; +# else + PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0; + PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_GOOGLE_display_timing === - vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); - vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + //=== VK_EXT_sample_locations === + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; - //=== VK_HUAWEI_invocation_mask === - vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); + //=== VK_KHR_get_memory_requirements2 === + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; - //=== VK_HUAWEI_subpass_shading === - vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); - vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); + //=== VK_KHR_acceleration_structure === + PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; + PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; + PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; + PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; + PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; + PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; + PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; + PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; + PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; - //=== VK_INTEL_performance_query === - vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = - PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkAcquirePerformanceConfigurationINTEL = - PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); - vkReleasePerformanceConfigurationINTEL = - PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); - vkQueueSetPerformanceConfigurationINTEL = - PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); - vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + //=== VK_KHR_ray_tracing_pipeline === + PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; + PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; + PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; + PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; + PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; + PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; - //=== VK_KHR_acceleration_structure === - vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); - vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); - vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); - vkCmdBuildAccelerationStructuresIndirectKHR = - PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); - vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); - vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); - vkCopyAccelerationStructureToMemoryKHR = - PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); - vkCopyMemoryToAccelerationStructureKHR = - PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); - vkWriteAccelerationStructuresPropertiesKHR = - PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); - vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); - vkCmdCopyAccelerationStructureToMemoryKHR = - PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); - vkCmdCopyMemoryToAccelerationStructureKHR = - PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); - vkGetAccelerationStructureDeviceAddressKHR = - PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); - vkCmdWriteAccelerationStructuresPropertiesKHR = - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); - vkGetDeviceAccelerationStructureCompatibilityKHR = - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); - vkGetAccelerationStructureBuildSizesKHR = - PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + //=== VK_KHR_sampler_ycbcr_conversion === + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; //=== VK_KHR_bind_memory2 === - vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); - if ( !vkBindBufferMemory2 ) - vkBindBufferMemory2 = vkBindBufferMemory2KHR; - vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); - if ( !vkBindImageMemory2 ) - vkBindImageMemory2 = vkBindImageMemory2KHR; + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; - //=== VK_KHR_buffer_device_address === - vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; - vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); - if ( !vkGetBufferOpaqueCaptureAddress ) - vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; - vkGetDeviceMemoryOpaqueCaptureAddressKHR = - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); - if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) - vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + //=== VK_EXT_image_drm_format_modifier === + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; - //=== VK_KHR_copy_commands2 === - vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); - if ( !vkCmdCopyBuffer2 ) - vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; - vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); - if ( !vkCmdCopyImage2 ) - vkCmdCopyImage2 = vkCmdCopyImage2KHR; - vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); - if ( !vkCmdCopyBufferToImage2 ) - vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; - vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); - if ( !vkCmdCopyImageToBuffer2 ) - vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; - vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); - if ( !vkCmdBlitImage2 ) - vkCmdBlitImage2 = vkCmdBlitImage2KHR; - vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); - if ( !vkCmdResolveImage2 ) - vkCmdResolveImage2 = vkCmdResolveImage2KHR; + //=== VK_EXT_validation_cache === + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; - //=== VK_KHR_create_renderpass2 === - vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); - if ( !vkCreateRenderPass2 ) - vkCreateRenderPass2 = vkCreateRenderPass2KHR; - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); - if ( !vkCmdBeginRenderPass2 ) - vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; - vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); - if ( !vkCmdNextSubpass2 ) - vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); - if ( !vkCmdEndRenderPass2 ) - vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + //=== VK_NV_shading_rate_image === + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; - //=== VK_KHR_deferred_host_operations === - vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); - vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); - vkGetDeferredOperationMaxConcurrencyKHR = - PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); - vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); - vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + //=== VK_NV_ray_tracing === + PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; + PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; + PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; + PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; + PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; + PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; - //=== VK_KHR_descriptor_update_template === - vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); - if ( !vkCreateDescriptorUpdateTemplate ) - vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; - vkDestroyDescriptorUpdateTemplateKHR = - PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); - if ( !vkDestroyDescriptorUpdateTemplate ) - vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; - vkUpdateDescriptorSetWithTemplateKHR = - PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); - if ( !vkUpdateDescriptorSetWithTemplate ) - vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; - vkCmdPushDescriptorSetWithTemplateKHR = - PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + //=== VK_KHR_maintenance3 === + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; - //=== VK_KHR_device_group === - vkGetDeviceGroupPeerMemoryFeaturesKHR = - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); - if ( !vkGetDeviceGroupPeerMemoryFeatures ) - vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; - vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); - if ( !vkCmdSetDeviceMask ) - vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); - if ( !vkCmdDispatchBase ) - vkCmdDispatchBase = vkCmdDispatchBaseKHR; - vkGetDeviceGroupPresentCapabilitiesKHR = - PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); - vkGetDeviceGroupSurfacePresentModesKHR = - PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); - vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + //=== VK_KHR_draw_indirect_count === + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; - //=== VK_KHR_display_swapchain === - vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + //=== VK_EXT_external_memory_host === + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; - //=== VK_KHR_draw_indirect_count === - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + //=== VK_AMD_buffer_marker === + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; - //=== VK_KHR_dynamic_rendering === - vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); - if ( !vkCmdBeginRendering ) - vkCmdBeginRendering = vkCmdBeginRenderingKHR; - vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); - if ( !vkCmdEndRendering ) - vkCmdEndRendering = vkCmdEndRenderingKHR; + //=== VK_EXT_calibrated_timestamps === + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; - //=== VK_KHR_external_fence_fd === - vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); - vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + //=== VK_NV_mesh_shader === + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_fence_win32 === - vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); - vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_NV_scissor_exclusive === + PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0; + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; - //=== VK_KHR_external_memory_fd === - vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); - vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + //=== VK_NV_device_diagnostic_checkpoints === + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; + PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_memory_win32 === - vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); - vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_KHR_timeline_semaphore === + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; + PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; - //=== VK_KHR_external_semaphore_fd === - vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); - vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); + //=== VK_INTEL_performance_query === + PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; + PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; + PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; + PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; + PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; + PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; + PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; + PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_semaphore_win32 === - vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); - vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_AMD_display_native_hdr === + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; //=== VK_KHR_fragment_shading_rate === - vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; - //=== VK_KHR_get_memory_requirements2 === - vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); - if ( !vkGetImageMemoryRequirements2 ) - vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; - vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); - if ( !vkGetBufferMemoryRequirements2 ) - vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; - vkGetImageSparseMemoryRequirements2KHR = - PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); - if ( !vkGetImageSparseMemoryRequirements2 ) - vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + //=== VK_KHR_dynamic_rendering_local_read === + PFN_vkCmdSetRenderingAttachmentLocationsKHR vkCmdSetRenderingAttachmentLocationsKHR = 0; + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR vkCmdSetRenderingInputAttachmentIndicesKHR = 0; - //=== VK_KHR_maintenance1 === - vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); - if ( !vkTrimCommandPool ) - vkTrimCommandPool = vkTrimCommandPoolKHR; + //=== VK_EXT_buffer_device_address === + PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; - //=== VK_KHR_maintenance3 === - vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); - if ( !vkGetDescriptorSetLayoutSupport ) - vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + //=== VK_KHR_present_wait === + PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; - //=== VK_KHR_maintenance4 === - vkGetDeviceBufferMemoryRequirementsKHR = - PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceBufferMemoryRequirements ) - vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; - vkGetDeviceImageMemoryRequirementsKHR = - PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageMemoryRequirements ) - vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; - vkGetDeviceImageSparseMemoryRequirementsKHR = - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageSparseMemoryRequirements ) - vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; + PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; + PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; +# else + PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_performance_query === - vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); - vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + //=== VK_KHR_buffer_device_address === + PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; + PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; - //=== VK_KHR_pipeline_executable_properties === - vkGetPipelineExecutablePropertiesKHR = - PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); - vkGetPipelineExecutableStatisticsKHR = - PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); - vkGetPipelineExecutableInternalRepresentationsKHR = - PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + //=== VK_EXT_line_rasterization === + PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; - //=== VK_KHR_present_wait === - vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); + //=== VK_EXT_host_query_reset === + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; - //=== VK_KHR_push_descriptor === - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + //=== VK_EXT_extended_dynamic_state === + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; + PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; + PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; - //=== VK_KHR_ray_tracing_maintenance1 === - vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); + //=== VK_KHR_deferred_host_operations === + PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; + PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; + PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; + PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; + PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; - //=== VK_KHR_ray_tracing_pipeline === - vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); - vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); - vkGetRayTracingShaderGroupHandlesKHR = - PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); - vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); - vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); - vkGetRayTracingShaderGroupStackSizeKHR = - PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); - vkCmdSetRayTracingPipelineStackSizeKHR = - PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + //=== VK_KHR_pipeline_executable_properties === + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; + PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; - //=== VK_KHR_sampler_ycbcr_conversion === - vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); - if ( !vkCreateSamplerYcbcrConversion ) - vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; - vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); - if ( !vkDestroySamplerYcbcrConversion ) - vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + //=== VK_EXT_host_image_copy === + PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; + PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; + PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0; + PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; + PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; - //=== VK_KHR_shared_presentable_image === - vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); + //=== VK_KHR_map_memory2 === + PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; + PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; - //=== VK_KHR_swapchain === - vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); - vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); - vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); - vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); - vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); + //=== VK_EXT_swapchain_maintenance1 === + PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0; - //=== VK_KHR_synchronization2 === - vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); - if ( !vkCmdSetEvent2 ) - vkCmdSetEvent2 = vkCmdSetEvent2KHR; - vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); - if ( !vkCmdResetEvent2 ) - vkCmdResetEvent2 = vkCmdResetEvent2KHR; - vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); - if ( !vkCmdWaitEvents2 ) - vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; - vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); - if ( !vkCmdPipelineBarrier2 ) - vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; - vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); - if ( !vkCmdWriteTimestamp2 ) - vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; - vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); - if ( !vkQueueSubmit2 ) - vkQueueSubmit2 = vkQueueSubmit2KHR; - vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); - vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + //=== VK_NV_device_generated_commands === + PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; - //=== VK_KHR_timeline_semaphore === - vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); - if ( !vkGetSemaphoreCounterValue ) - vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; - vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); - if ( !vkWaitSemaphores ) - vkWaitSemaphores = vkWaitSemaphoresKHR; - vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); - if ( !vkSignalSemaphore ) - vkSignalSemaphore = vkSignalSemaphoreKHR; + //=== VK_EXT_depth_bias_control === + PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_decode_queue === - vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_private_data === + PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; + PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; + PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; + PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_encode_queue === - vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0; + PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; # if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_queue === - vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); - vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); - vkGetVideoSessionMemoryRequirementsKHR = - PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); - vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); - vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); - vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); - vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); - vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); - vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); - vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); + //=== VK_NV_cuda_kernel_launch === + PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0; + PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0; + PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0; + PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0; + PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0; + PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0; +# else + PFN_dummy vkCreateCudaModuleNV_placeholder = 0; + PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0; + PFN_dummy vkCreateCudaFunctionNV_placeholder = 0; + PFN_dummy vkDestroyCudaModuleNV_placeholder = 0; + PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0; + PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_NVX_binary_import === - vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); - vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); - vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); - vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); - vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); - - //=== VK_NVX_image_view_handle === - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); - - //=== VK_NV_clip_space_w_scaling === - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); - - //=== VK_NV_device_diagnostic_checkpoints === - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); - - //=== VK_NV_device_generated_commands === - vkGetGeneratedCommandsMemoryRequirementsNV = - PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); - vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); - vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); - vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); - vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); - vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); - - //=== VK_NV_external_memory_rdma === - vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_external_memory_win32 === - vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_NV_fragment_shading_rate_enums === - vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); - - //=== VK_NV_mesh_shader === - vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); - - //=== VK_NV_ray_tracing === - vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); - vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); - vkGetAccelerationStructureMemoryRequirementsNV = - PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); - vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); - vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); - vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); - if ( !vkGetRayTracingShaderGroupHandlesKHR ) - vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; - vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); - vkCmdWriteAccelerationStructuresPropertiesNV = - PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); - - //=== VK_NV_scissor_exclusive === - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); - - //=== VK_NV_shading_rate_image === - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); - vkCmdSetViewportShadingRatePaletteNV = - PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); - - //=== VK_QCOM_tile_properties === - vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); - vkGetDynamicRenderingTilePropertiesQCOM = - PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); - - //=== VK_VALVE_descriptor_set_host_mapping === - vkGetDescriptorSetLayoutHostMappingInfoVALVE = - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); - vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); - } - - public: - //=== VK_VERSION_1_0 === - PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; - PFN_vkDestroyDevice vkDestroyDevice = 0; - PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; - PFN_vkQueueSubmit vkQueueSubmit = 0; - PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; - PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; - PFN_vkAllocateMemory vkAllocateMemory = 0; - PFN_vkFreeMemory vkFreeMemory = 0; - PFN_vkMapMemory vkMapMemory = 0; - PFN_vkUnmapMemory vkUnmapMemory = 0; - PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; - PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; - PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; - PFN_vkBindBufferMemory vkBindBufferMemory = 0; - PFN_vkBindImageMemory vkBindImageMemory = 0; - PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; - PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; - PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; - PFN_vkQueueBindSparse vkQueueBindSparse = 0; - PFN_vkCreateFence vkCreateFence = 0; - PFN_vkDestroyFence vkDestroyFence = 0; - PFN_vkResetFences vkResetFences = 0; - PFN_vkGetFenceStatus vkGetFenceStatus = 0; - PFN_vkWaitForFences vkWaitForFences = 0; - PFN_vkCreateSemaphore vkCreateSemaphore = 0; - PFN_vkDestroySemaphore vkDestroySemaphore = 0; - PFN_vkCreateEvent vkCreateEvent = 0; - PFN_vkDestroyEvent vkDestroyEvent = 0; - PFN_vkGetEventStatus vkGetEventStatus = 0; - PFN_vkSetEvent vkSetEvent = 0; - PFN_vkResetEvent vkResetEvent = 0; - PFN_vkCreateQueryPool vkCreateQueryPool = 0; - PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; - PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; - PFN_vkCreateBuffer vkCreateBuffer = 0; - PFN_vkDestroyBuffer vkDestroyBuffer = 0; - PFN_vkCreateBufferView vkCreateBufferView = 0; - PFN_vkDestroyBufferView vkDestroyBufferView = 0; - PFN_vkCreateImage vkCreateImage = 0; - PFN_vkDestroyImage vkDestroyImage = 0; - PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; - PFN_vkCreateImageView vkCreateImageView = 0; - PFN_vkDestroyImageView vkDestroyImageView = 0; - PFN_vkCreateShaderModule vkCreateShaderModule = 0; - PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; - PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; - PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; - PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; - PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; - PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; - PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; - PFN_vkDestroyPipeline vkDestroyPipeline = 0; - PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; - PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; - PFN_vkCreateSampler vkCreateSampler = 0; - PFN_vkDestroySampler vkDestroySampler = 0; - PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; - PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; - PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; - PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; - PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; - PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; - PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; - PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; - PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; - PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; - PFN_vkCreateRenderPass vkCreateRenderPass = 0; - PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; - PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; - PFN_vkCreateCommandPool vkCreateCommandPool = 0; - PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; - PFN_vkResetCommandPool vkResetCommandPool = 0; - PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; - PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; - PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; - PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; - PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; - PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; - PFN_vkCmdSetViewport vkCmdSetViewport = 0; - PFN_vkCmdSetScissor vkCmdSetScissor = 0; - PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; - PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; - PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; - PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; - PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; - PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; - PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; - PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; - PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; - PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; - PFN_vkCmdDraw vkCmdDraw = 0; - PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; - PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; - PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; - PFN_vkCmdDispatch vkCmdDispatch = 0; - PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; - PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; - PFN_vkCmdCopyImage vkCmdCopyImage = 0; - PFN_vkCmdBlitImage vkCmdBlitImage = 0; - PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; - PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; - PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; - PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; - PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; - PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; - PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; - PFN_vkCmdResolveImage vkCmdResolveImage = 0; - PFN_vkCmdSetEvent vkCmdSetEvent = 0; - PFN_vkCmdResetEvent vkCmdResetEvent = 0; - PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; - PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; - PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; - PFN_vkCmdEndQuery vkCmdEndQuery = 0; - PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; - PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; - PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; - PFN_vkCmdPushConstants vkCmdPushConstants = 0; - PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; - PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; - PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; - PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; - - //=== VK_VERSION_1_1 === - PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; - PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; - PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; - PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; - PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; - PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; - PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; - PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; - PFN_vkTrimCommandPool vkTrimCommandPool = 0; - PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; - PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; - PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; - PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; - PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; - PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; - PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; - - //=== VK_VERSION_1_2 === - PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; - PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; - PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; - PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; - PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; - PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; - PFN_vkResetQueryPool vkResetQueryPool = 0; - PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; - PFN_vkWaitSemaphores vkWaitSemaphores = 0; - PFN_vkSignalSemaphore vkSignalSemaphore = 0; - PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; - PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; - PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; - - //=== VK_VERSION_1_3 === - PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; - PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; - PFN_vkSetPrivateData vkSetPrivateData = 0; - PFN_vkGetPrivateData vkGetPrivateData = 0; - PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; - PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; - PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; - PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; - PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; - PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; - PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; - PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; - PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; - PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; - PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; - PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; - PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; - PFN_vkCmdEndRendering vkCmdEndRendering = 0; - PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; - PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; - PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; - PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; - PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; - PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; - PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; - PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; - PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; - PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; - PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; - PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; - PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; - PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; - PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; - PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; - PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; - PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; - - //=== VK_AMD_buffer_marker === - PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; - - //=== VK_AMD_display_native_hdr === - PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; - - //=== VK_AMD_draw_indirect_count === - PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; - PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; - - //=== VK_AMD_shader_info === - PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; - -# if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_ANDROID_external_memory_android_hardware_buffer === - PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; - PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; -# else - PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; - PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; -# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - //=== VK_EXT_buffer_device_address === - PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; - - //=== VK_EXT_calibrated_timestamps === - PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; - - //=== VK_EXT_color_write_enable === - PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; - - //=== VK_EXT_conditional_rendering === - PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; - PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; - - //=== VK_EXT_debug_marker === - PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; - PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; - PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; - PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; - PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; - - //=== VK_EXT_debug_utils === - PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; - PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; - PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; - PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; - PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; - PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; - PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; - PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; - - //=== VK_EXT_discard_rectangles === - PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; - - //=== VK_EXT_display_control === - PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; - PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; - PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; - PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; - - //=== VK_EXT_extended_dynamic_state === - PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; - PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; - PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; - PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; - PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; - PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; - PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; - PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; - PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; - PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; - PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; - PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; - - //=== VK_EXT_extended_dynamic_state2 === - PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; - PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; - PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; - PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; - PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; - - //=== VK_EXT_external_memory_host === - PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; - PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; - PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; -# else - PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; - PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; - PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_EXT_hdr_metadata === - PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; - - //=== VK_EXT_host_query_reset === - PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; - - //=== VK_EXT_image_compression_control === - PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; - - //=== VK_EXT_image_drm_format_modifier === - PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; - - //=== VK_EXT_line_rasterization === - PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; - - //=== VK_EXT_mesh_shader === - PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; - PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; - PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; - # if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === - PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; + //=== VK_EXT_metal_objects === + PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; # else - PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; + PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; # endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_EXT_multi_draw === - PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; - PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; - - //=== VK_EXT_pageable_device_local_memory === - PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; - - //=== VK_EXT_pipeline_properties === - PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; - - //=== VK_EXT_private_data === - PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; - PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; - PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; - PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; + //=== VK_KHR_synchronization2 === + PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; + PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; + PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; + PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; + PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; + PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; + + //=== VK_EXT_descriptor_buffer === + PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0; + PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0; + PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0; + PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0; + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0; - //=== VK_EXT_sample_locations === - PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; + //=== VK_NV_fragment_shading_rate_enums === + PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; - //=== VK_EXT_shader_module_identifier === - PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; - PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; + //=== VK_EXT_mesh_shader === + PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; - //=== VK_EXT_transform_feedback === - PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; - PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; - PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; - PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; - PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; - PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; + //=== VK_KHR_copy_commands2 === + PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; + PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; + PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; + PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; + PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; + PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; - //=== VK_EXT_validation_cache === - PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; - PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; - PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; - PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; + //=== VK_EXT_device_fault === + PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; - //=== VK_EXT_vertex_input_dynamic_state === - PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; + //=== VK_EXT_vertex_input_dynamic_state === + PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; # if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === - PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; - PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; - PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; + //=== VK_FUCHSIA_external_memory === + PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; # else - PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; - PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; - PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; - PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; - PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; + PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_memory === - PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; + //=== VK_FUCHSIA_external_semaphore === + PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; + PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; # else - PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; - PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; + PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_semaphore === - PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; - PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; + //=== VK_FUCHSIA_buffer_collection === + PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; + PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; + PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; # else - PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; - PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; # endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_GOOGLE_display_timing === - PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; - PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; - - //=== VK_HUAWEI_invocation_mask === - PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; - - //=== VK_HUAWEI_subpass_shading === - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; - PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; - - //=== VK_INTEL_performance_query === - PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; - PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; - PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; - PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; - PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; - PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; - PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; - PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; - PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; - - //=== VK_KHR_acceleration_structure === - PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; - PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; - PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; - PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; - PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; - PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; - PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; - PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; - PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; - PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; - PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; - PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; - PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; - PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; - - //=== VK_KHR_bind_memory2 === - PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; - PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; - - //=== VK_KHR_buffer_device_address === - PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; - PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; - - //=== VK_KHR_copy_commands2 === - PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; - PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; - PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; - PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; - PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; - PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; - - //=== VK_KHR_create_renderpass2 === - PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; - PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; - PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; - PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; - - //=== VK_KHR_deferred_host_operations === - PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; - PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; - PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; - PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; - PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; - - //=== VK_KHR_descriptor_update_template === - PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; - PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; - PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; - PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; - - //=== VK_KHR_device_group === - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; - PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; - PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; - PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; - PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; - PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; - - //=== VK_KHR_display_swapchain === - PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; - - //=== VK_KHR_draw_indirect_count === - PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; - PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; - - //=== VK_KHR_dynamic_rendering === - PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; - PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; - - //=== VK_KHR_external_fence_fd === - PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; - PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_fence_win32 === - PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; - PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; -# else - PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_memory_fd === - PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; - PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_memory_win32 === - PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; - PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; -# else - PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_semaphore_fd === - PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; - PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_semaphore_win32 === - PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; - PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; -# else - PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_fragment_shading_rate === - PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; - - //=== VK_KHR_get_memory_requirements2 === - PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; - PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; - PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; - - //=== VK_KHR_maintenance1 === - PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; + //=== VK_HUAWEI_subpass_shading === + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; + PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; - //=== VK_KHR_maintenance3 === - PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; + //=== VK_HUAWEI_invocation_mask === + PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; - //=== VK_KHR_maintenance4 === - PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; - PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; + //=== VK_NV_external_memory_rdma === + PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; - //=== VK_KHR_performance_query === - PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; - PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; + //=== VK_EXT_pipeline_properties === + PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; - //=== VK_KHR_pipeline_executable_properties === - PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; - PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; - PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; + //=== VK_EXT_extended_dynamic_state2 === + PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; + PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; + PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; + PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; + PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; - //=== VK_KHR_present_wait === - PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; + //=== VK_EXT_color_write_enable === + PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; - //=== VK_KHR_push_descriptor === - PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + //=== VK_KHR_ray_tracing_maintenance1 === + PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; - //=== VK_KHR_ray_tracing_maintenance1 === - PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; + //=== VK_EXT_multi_draw === + PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; + PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; + + //=== VK_EXT_opacity_micromap === + PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0; + PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0; + PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0; + PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0; + PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0; + PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0; + PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0; + PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0; + PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0; + PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0; + PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0; + PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0; + PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0; + PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0; + + //=== VK_HUAWEI_cluster_culling_shader === + PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0; + PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0; - //=== VK_KHR_ray_tracing_pipeline === - PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; - PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; - PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; - PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; - PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; - PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; + //=== VK_EXT_pageable_device_local_memory === + PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; - //=== VK_KHR_sampler_ycbcr_conversion === - PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; - PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; + //=== VK_KHR_maintenance4 === + PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; - //=== VK_KHR_shared_presentable_image === - PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; + //=== VK_VALVE_descriptor_set_host_mapping === + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; + PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; + + //=== VK_NV_copy_memory_indirect === + PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; + PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; + + //=== VK_NV_memory_decompression === + PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; + PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; + + //=== VK_NV_device_generated_commands_compute === + PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; + PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; + PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; + + //=== VK_EXT_extended_dynamic_state3 === + PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; + PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0; + PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0; + PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0; + PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0; + PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0; + PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0; + PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0; + PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0; + PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0; + PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; + PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0; + PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0; + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0; + PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0; + PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0; + PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0; + PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0; + PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0; + PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0; + PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0; + PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0; + PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0; + PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0; + PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0; + PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0; + PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0; + PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0; + PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0; + PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0; + PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0; - //=== VK_KHR_swapchain === - PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; - PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; - PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; - PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; - PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + //=== VK_EXT_shader_module_identifier === + PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; + PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; + + //=== VK_NV_optical_flow === + PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0; + PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0; + PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; + PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; + + //=== VK_KHR_maintenance5 === + PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0; + PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0; + PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; + PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; + + //=== VK_AMD_anti_lag === + PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0; + + //=== VK_EXT_shader_object === + PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; + PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; + PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; + PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; + PFN_vkCmdSetDepthClampRangeEXT vkCmdSetDepthClampRangeEXT = 0; + + //=== VK_KHR_pipeline_binary === + PFN_vkCreatePipelineBinariesKHR vkCreatePipelineBinariesKHR = 0; + PFN_vkDestroyPipelineBinaryKHR vkDestroyPipelineBinaryKHR = 0; + PFN_vkGetPipelineKeyKHR vkGetPipelineKeyKHR = 0; + PFN_vkGetPipelineBinaryDataKHR vkGetPipelineBinaryDataKHR = 0; + PFN_vkReleaseCapturedPipelineDataKHR vkReleaseCapturedPipelineDataKHR = 0; - //=== VK_KHR_synchronization2 === - PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; - PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; - PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; - PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; - PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; - PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; - PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; - PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; + //=== VK_QCOM_tile_properties === + PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; + PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; - //=== VK_KHR_timeline_semaphore === - PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; - PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; - PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; + //=== VK_NV_cooperative_vector === + PFN_vkConvertCooperativeVectorMatrixNV vkConvertCooperativeVectorMatrixNV = 0; + PFN_vkCmdConvertCooperativeVectorMatrixNV vkCmdConvertCooperativeVectorMatrixNV = 0; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_decode_queue === - PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; -# else - PFN_dummy vkCmdDecodeVideoKHR_placeholder = 0; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_NV_low_latency2 === + PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0; + PFN_vkLatencySleepNV vkLatencySleepNV = 0; + PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV = 0; + PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0; + PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_encode_queue === - PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; -# else - PFN_dummy vkCmdEncodeVideoKHR_placeholder = 0; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_queue === - PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; - PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; - PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; - PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; - PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; - PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; - PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; - PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; - PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; - PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0; # else - PFN_dummy vkCreateVideoSessionKHR_placeholder = 0; - PFN_dummy vkDestroyVideoSessionKHR_placeholder = 0; - PFN_dummy vkGetVideoSessionMemoryRequirementsKHR_placeholder = 0; - PFN_dummy vkBindVideoSessionMemoryKHR_placeholder = 0; - PFN_dummy vkCreateVideoSessionParametersKHR_placeholder = 0; - PFN_dummy vkUpdateVideoSessionParametersKHR_placeholder = 0; - PFN_dummy vkDestroyVideoSessionParametersKHR_placeholder = 0; - PFN_dummy vkCmdBeginVideoCodingKHR_placeholder = 0; - PFN_dummy vkCmdEndVideoCodingKHR_placeholder = 0; - PFN_dummy vkCmdControlVideoCodingKHR_placeholder = 0; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - //=== VK_NVX_binary_import === - PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; - PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; - PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; - PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; - PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; - - //=== VK_NVX_image_view_handle === - PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; - PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; - - //=== VK_NV_clip_space_w_scaling === - PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; - - //=== VK_NV_device_diagnostic_checkpoints === - PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; - PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; - - //=== VK_NV_device_generated_commands === - PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; - PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; - PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; - PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; - PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; - PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; + PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - //=== VK_NV_external_memory_rdma === - PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; + //=== VK_KHR_line_rasterization === + PFN_vkCmdSetLineStippleKHR vkCmdSetLineStippleKHR = 0; + + //=== VK_KHR_calibrated_timestamps === + PFN_vkGetCalibratedTimestampsKHR vkGetCalibratedTimestampsKHR = 0; + + //=== VK_KHR_maintenance6 === + PFN_vkCmdBindDescriptorSets2KHR vkCmdBindDescriptorSets2KHR = 0; + PFN_vkCmdPushConstants2KHR vkCmdPushConstants2KHR = 0; + PFN_vkCmdPushDescriptorSet2KHR vkCmdPushDescriptorSet2KHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplate2KHR vkCmdPushDescriptorSetWithTemplate2KHR = 0; + PFN_vkCmdSetDescriptorBufferOffsets2EXT vkCmdSetDescriptorBufferOffsets2EXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = 0; + + //=== VK_NV_cluster_acceleration_structure === + PFN_vkGetClusterAccelerationStructureBuildSizesNV vkGetClusterAccelerationStructureBuildSizesNV = 0; + PFN_vkCmdBuildClusterAccelerationStructureIndirectNV vkCmdBuildClusterAccelerationStructureIndirectNV = 0; + + //=== VK_NV_partitioned_acceleration_structure === + PFN_vkGetPartitionedAccelerationStructuresBuildSizesNV vkGetPartitionedAccelerationStructuresBuildSizesNV = 0; + PFN_vkCmdBuildPartitionedAccelerationStructuresNV vkCmdBuildPartitionedAccelerationStructuresNV = 0; + + //=== VK_EXT_device_generated_commands === + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT vkGetGeneratedCommandsMemoryRequirementsEXT = 0; + PFN_vkCmdPreprocessGeneratedCommandsEXT vkCmdPreprocessGeneratedCommandsEXT = 0; + PFN_vkCmdExecuteGeneratedCommandsEXT vkCmdExecuteGeneratedCommandsEXT = 0; + PFN_vkCreateIndirectCommandsLayoutEXT vkCreateIndirectCommandsLayoutEXT = 0; + PFN_vkDestroyIndirectCommandsLayoutEXT vkDestroyIndirectCommandsLayoutEXT = 0; + PFN_vkCreateIndirectExecutionSetEXT vkCreateIndirectExecutionSetEXT = 0; + PFN_vkDestroyIndirectExecutionSetEXT vkDestroyIndirectExecutionSetEXT = 0; + PFN_vkUpdateIndirectExecutionSetPipelineEXT vkUpdateIndirectExecutionSetPipelineEXT = 0; + PFN_vkUpdateIndirectExecutionSetShaderEXT vkUpdateIndirectExecutionSetShaderEXT = 0; -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_external_memory_win32 === - PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + PFN_vkGetMemoryMetalHandleEXT vkGetMemoryMetalHandleEXT = 0; + PFN_vkGetMemoryMetalHandlePropertiesEXT vkGetMemoryMetalHandlePropertiesEXT = 0; # else - PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_NV_fragment_shading_rate_enums === - PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; - - //=== VK_NV_mesh_shader === - PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; - PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; - PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; - - //=== VK_NV_ray_tracing === - PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; - PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; - PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; - PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; - PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; - PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; - PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; - PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; - PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; - PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; - PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; - PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; - - //=== VK_NV_scissor_exclusive === - PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; - - //=== VK_NV_shading_rate_image === - PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; - PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; - PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; - - //=== VK_QCOM_tile_properties === - PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; - PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; + PFN_dummy vkGetMemoryMetalHandleEXT_placeholder = 0; + PFN_dummy vkGetMemoryMetalHandlePropertiesEXT_placeholder = 0; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + }; - //=== VK_VALVE_descriptor_set_host_mapping === - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; - PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; - }; + } // namespace detail //======================================== //=== RAII HANDLE forward declarations === @@ -2263,11 +2845,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_debug_report === class DebugReportCallbackEXT; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_queue === class VideoSessionKHR; class VideoSessionParametersKHR; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_NVX_binary_import === class CuModuleNVX; @@ -2294,25 +2874,53 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_device_generated_commands === class IndirectCommandsLayoutNV; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + class CudaModuleNV; + class CudaFunctionNV; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_buffer_collection === class BufferCollectionFUCHSIA; # endif /*VK_USE_PLATFORM_FUCHSIA*/ + //=== VK_EXT_opacity_micromap === + class MicromapEXT; + + //=== VK_NV_optical_flow === + class OpticalFlowSessionNV; + + //=== VK_EXT_shader_object === + class ShaderEXT; + + //=== VK_KHR_pipeline_binary === + class PipelineBinaryKHR; + + //=== VK_EXT_device_generated_commands === + class IndirectCommandsLayoutEXT; + class IndirectExecutionSetEXT; + //==================== //=== RAII HANDLES === //==================== + template + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false; + }; + class Context { public: # if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL Context() - : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher( + : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher( m_dynamicLoader.getProcAddress( "vkGetInstanceProcAddr" ) ) ) # else Context( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) - : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher( getInstanceProcAddr ) ) + : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher( getInstanceProcAddr ) ) # endif { } @@ -2330,7 +2938,8 @@ namespace VULKAN_HPP_NAMESPACE { } Context & operator=( Context const & ) = delete; - Context & operator =( Context && rhs ) VULKAN_HPP_NOEXCEPT + + Context & operator=( Context && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { @@ -2342,7 +2951,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return &*m_dispatcher; @@ -2358,9 +2967,11 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_VERSION_1_0 === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Instance - createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD std::vector enumerateInstanceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; @@ -2373,45 +2984,38 @@ namespace VULKAN_HPP_NAMESPACE private: # if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL - VULKAN_HPP_NAMESPACE::DynamicLoader m_dynamicLoader; + VULKAN_HPP_NAMESPACE::detail::DynamicLoader m_dynamicLoader; # endif - std::unique_ptr m_dispatcher; + std::unique_ptr m_dispatcher; }; class Instance { public: - using CType = VkInstance; + using CType = VkInstance; + using CppType = VULKAN_HPP_NAMESPACE::Instance; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eInstance; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) Instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context const & context, VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_allocator( static_cast( allocator ) ) { - VULKAN_HPP_NAMESPACE::Result result = - static_cast( context.getDispatcher()->vkCreateInstance( reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_instance ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateInstance" ); - } - m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr, - static_cast( m_instance ) ) ); + *this = context.createInstance( createInfo, allocator ); } +# endif Instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context const & context, VkInstance instance, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) : m_instance( instance ), m_allocator( static_cast( allocator ) ) { - m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr, - static_cast( m_instance ) ) ); + m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr, + static_cast( m_instance ) ) ); } Instance( std::nullptr_t ) {} @@ -2423,21 +3027,23 @@ namespace VULKAN_HPP_NAMESPACE Instance() = delete; Instance( Instance const & ) = delete; + Instance( Instance && rhs ) VULKAN_HPP_NOEXCEPT - : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + : m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) , m_dispatcher( rhs.m_dispatcher.release() ) { } + Instance & operator=( Instance const & ) = delete; - Instance & operator =( Instance && rhs ) VULKAN_HPP_NOEXCEPT + + Instance & operator=( Instance && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher.reset( rhs.m_dispatcher.release() ); + std::swap( m_instance, rhs.m_instance ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -2447,6 +3053,11 @@ namespace VULKAN_HPP_NAMESPACE return m_instance; } + operator VULKAN_HPP_NAMESPACE::Instance() const VULKAN_HPP_NOEXCEPT + { + return m_instance; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_instance ) @@ -2458,7 +3069,14 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::Instance release() + { + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_instance, nullptr ); + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return &*m_dispatcher; @@ -2473,9 +3091,11 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_VERSION_1_0 === - VULKAN_HPP_NODISCARD std::vector enumeratePhysicalDevices() const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + enumeratePhysicalDevices() const; - VULKAN_HPP_NODISCARD PFN_vkVoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PFN_VoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT; //=== VK_VERSION_1_1 === @@ -2483,58 +3103,72 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_display === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; # if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_XLIB_KHR*/ # if defined( VK_USE_PLATFORM_XCB_KHR ) //=== VK_KHR_xcb_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_XCB_KHR*/ # if defined( VK_USE_PLATFORM_WAYLAND_KHR ) //=== VK_KHR_wayland_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ # if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_KHR_android_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_win32_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_debug_report === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT - createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, @@ -2544,17 +3178,21 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_GGP*/ # if defined( VK_USE_PLATFORM_VI_NN ) //=== VK_NN_vi_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_VI_NN*/ //=== VK_KHR_device_group_creation === @@ -2564,24 +3202,30 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_IOS_MVK ) //=== VK_MVK_ios_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_IOS_MVK*/ # if defined( VK_USE_PLATFORM_MACOS_MVK ) //=== VK_MVK_macos_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_MACOS_MVK*/ //=== VK_EXT_debug_utils === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT - createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, @@ -2590,51 +3234,68 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_imagepipe_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_EXT_headless_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; # if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) //=== VK_EXT_directfb_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ # if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_SCREEN_QNX*/ private: - VULKAN_HPP_NAMESPACE::Instance m_instance = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - std::unique_ptr m_dispatcher; + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + std::unique_ptr m_dispatcher; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class PhysicalDevice { public: - using CType = VkPhysicalDevice; + using CType = VkPhysicalDevice; + using CppType = VULKAN_HPP_NAMESPACE::PhysicalDevice; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = @@ -2654,24 +3315,28 @@ namespace VULKAN_HPP_NAMESPACE } PhysicalDevice() = delete; + PhysicalDevice( PhysicalDevice const & rhs ) : m_physicalDevice( rhs.m_physicalDevice ), m_dispatcher( rhs.m_dispatcher ) {} + PhysicalDevice( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT - : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_physicalDevice( VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + PhysicalDevice & operator=( PhysicalDevice const & rhs ) { m_physicalDevice = rhs.m_physicalDevice; m_dispatcher = rhs.m_dispatcher; return *this; } + PhysicalDevice & operator=( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - m_physicalDevice = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -2681,13 +3346,24 @@ namespace VULKAN_HPP_NAMESPACE return m_physicalDevice; } + operator VULKAN_HPP_NAMESPACE::PhysicalDevice() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice; + } + void clear() VULKAN_HPP_NOEXCEPT { m_physicalDevice = nullptr; m_dispatcher = nullptr; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::PhysicalDevice release() + { + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_physicalDevice, nullptr ); + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -2718,9 +3394,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties() const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Device - createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD std::vector enumerateDeviceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; @@ -2739,23 +3417,24 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2() const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain getFeatures2() const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getFeatures2() const VULKAN_HPP_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2() const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain getProperties2() const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getProperties2() const VULKAN_HPP_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties2 getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const; VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2() const; @@ -2766,7 +3445,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2() const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain getMemoryProperties2() const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getMemoryProperties2() const VULKAN_HPP_NOEXCEPT; VULKAN_HPP_NODISCARD std::vector getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const; @@ -2806,7 +3485,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getDisplayPlanePropertiesKHR() const; - VULKAN_HPP_NODISCARD std::vector getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const; # if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === @@ -2835,18 +3516,21 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_queue === VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const; template - VULKAN_HPP_NODISCARD StructureChain getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const; VULKAN_HPP_NODISCARD std::vector getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + template + VULKAN_HPP_NODISCARD std::vector + getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const; //=== VK_NV_external_memory_capabilities === @@ -2863,23 +3547,24 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR() const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain getFeatures2KHR() const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getFeatures2KHR() const VULKAN_HPP_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR() const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain getProperties2KHR() const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getProperties2KHR() const VULKAN_HPP_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties2 getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const; VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2KHR() const; @@ -2890,7 +3575,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT; VULKAN_HPP_NODISCARD std::vector getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const; @@ -2910,7 +3595,9 @@ namespace VULKAN_HPP_NAMESPACE void acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ //=== VK_EXT_display_surface_counter === @@ -2937,7 +3624,7 @@ namespace VULKAN_HPP_NAMESPACE getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const; VULKAN_HPP_NODISCARD std::vector @@ -2962,7 +3649,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_calibrated_timestamps === - VULKAN_HPP_NODISCARD std::vector getCalibrateableTimeDomainsEXT() const; + VULKAN_HPP_NODISCARD std::vector getCalibrateableTimeDomainsEXT() const; //=== VK_KHR_fragment_shading_rate === @@ -2991,12 +3678,25 @@ namespace VULKAN_HPP_NAMESPACE void acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + //=== VK_KHR_video_encode_queue === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR + getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const; # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getWinrtDisplayNV( uint32_t deviceRelativeId ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getWinrtDisplayNV( uint32_t deviceRelativeId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ # if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) @@ -3013,90 +3713,91 @@ namespace VULKAN_HPP_NAMESPACE struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT; # endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + //=== VK_NV_optical_flow === + + VULKAN_HPP_NODISCARD std::vector + getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo ) const; + + //=== VK_NV_cooperative_vector === + + VULKAN_HPP_NODISCARD std::vector getCooperativeVectorPropertiesNV() const; + + //=== VK_KHR_cooperative_matrix === + + VULKAN_HPP_NODISCARD std::vector getCooperativeMatrixPropertiesKHR() const; + + //=== VK_KHR_calibrated_timestamps === + + VULKAN_HPP_NODISCARD std::vector getCalibrateableTimeDomainsKHR() const; + + //=== VK_NV_cooperative_matrix2 === + + VULKAN_HPP_NODISCARD std::vector + getCooperativeMatrixFlexibleDimensionsPropertiesNV() const; + private: - VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class PhysicalDevices : public std::vector { public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) PhysicalDevices( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance ) { - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * dispatcher = instance.getDispatcher(); - std::vector physicalDevices; - uint32_t physicalDeviceCount; - VULKAN_HPP_NAMESPACE::Result result; - do - { - result = static_cast( - dispatcher->vkEnumeratePhysicalDevices( static_cast( *instance ), &physicalDeviceCount, nullptr ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount ) - { - physicalDevices.resize( physicalDeviceCount ); - result = static_cast( - dispatcher->vkEnumeratePhysicalDevices( static_cast( *instance ), &physicalDeviceCount, physicalDevices.data() ) ); - } - } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); - this->reserve( physicalDeviceCount ); - for ( auto const & physicalDevice : physicalDevices ) - { - this->emplace_back( instance, physicalDevice ); - } - } - else - { - throwResultException( result, "vkEnumeratePhysicalDevices" ); - } + *this = instance.enumeratePhysicalDevices(); } +# endif PhysicalDevices( std::nullptr_t ) {} - PhysicalDevices() = delete; - PhysicalDevices( PhysicalDevices const & ) = delete; - PhysicalDevices( PhysicalDevices && rhs ) = default; + PhysicalDevices() = delete; + PhysicalDevices( PhysicalDevices const & ) = delete; + PhysicalDevices( PhysicalDevices && rhs ) = default; PhysicalDevices & operator=( PhysicalDevices const & ) = delete; - PhysicalDevices & operator=( PhysicalDevices && rhs ) = default; + PhysicalDevices & operator=( PhysicalDevices && rhs ) = default; + + private: + PhysicalDevices( std::vector && rhs ) + { + std::swap( *this, rhs ); + } }; class Device { public: - using CType = VkDevice; + using CType = VkDevice; + using CppType = VULKAN_HPP_NAMESPACE::Device; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDevice; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) Device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_allocator( static_cast( allocator ) ) - { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - physicalDevice.getDispatcher()->vkCreateDevice( static_cast( *physicalDevice ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_device ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateDevice" ); - } - m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr, - static_cast( m_device ) ) ); + { + *this = physicalDevice.createDevice( createInfo, allocator ); } +# endif Device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, VkDevice device, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) : m_device( device ), m_allocator( static_cast( allocator ) ) { - m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr, - static_cast( m_device ) ) ); + m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr, + static_cast( m_device ) ) ); } Device( std::nullptr_t ) {} @@ -3108,21 +3809,23 @@ namespace VULKAN_HPP_NAMESPACE Device() = delete; Device( Device const & ) = delete; + Device( Device && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) , m_dispatcher( rhs.m_dispatcher.release() ) { } + Device & operator=( Device const & ) = delete; - Device & operator =( Device && rhs ) VULKAN_HPP_NOEXCEPT + + Device & operator=( Device && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher.reset( rhs.m_dispatcher.release() ); + std::swap( m_device, rhs.m_device ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -3132,6 +3835,11 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } + operator VULKAN_HPP_NAMESPACE::Device() const VULKAN_HPP_NOEXCEPT + { + return m_device; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_device ) @@ -3143,7 +3851,14 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::Device release() + { + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_device, nullptr ); + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return &*m_dispatcher; @@ -3158,23 +3873,29 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_VERSION_1_0 === - VULKAN_HPP_NODISCARD PFN_vkVoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PFN_VoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; void waitIdle() const; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DeviceMemory - allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; void flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges ) const; void invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence - createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; void resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences ) const; @@ -3182,99 +3903,139 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Semaphore - createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Event - createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::QueryPool - createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Buffer - createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::BufferView - createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Image - createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ImageView - createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ShaderModule - createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PipelineCache - createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD std::vector + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type createGraphicsPipelines( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createGraphicsPipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD std::vector + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type createComputePipelines( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createComputePipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PipelineLayout - createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Sampler - createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout - createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorPool - createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD std::vector - allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const; void updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorCopies ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Framebuffer - createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass - createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CommandPool - createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD std::vector - allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const; //=== VK_VERSION_1_1 === @@ -3289,41 +4050,49 @@ namespace VULKAN_HPP_NAMESPACE getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; VULKAN_HPP_NODISCARD std::vector getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Queue getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion - createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate - createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate>::Type + createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; //=== VK_VERSION_1_2 === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass - createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const; @@ -3339,16 +4108,18 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_VERSION_1_3 === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot - createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - void setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + void setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data ) const; - VULKAN_HPP_NODISCARD uint64_t getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + VULKAN_HPP_NODISCARD uint64_t getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT; @@ -3356,24 +4127,50 @@ namespace VULKAN_HPP_NAMESPACE getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; VULKAN_HPP_NODISCARD std::vector getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const; + //=== VK_VERSION_1_4 === + + VULKAN_HPP_NODISCARD void * mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo ) const; + + void unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT; + + void copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo ) const; + + void copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo ) const; + + void copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo ) const; + + void transitionImageLayout( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const; + //=== VK_KHR_swapchain === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR - createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR getGroupPresentCapabilitiesKHR() const; @@ -3385,13 +4182,16 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_display_swapchain === - VULKAN_HPP_NODISCARD std::vector - createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR - createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_EXT_debug_marker === @@ -3399,32 +4199,40 @@ namespace VULKAN_HPP_NAMESPACE void debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_queue === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR - createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR - createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR>::Type + createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_NVX_binary_import === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX - createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX - createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_NVX_image_view_handle === VULKAN_HPP_NODISCARD uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD uint64_t getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT; + //=== VK_KHR_device_group === VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags @@ -3462,9 +4270,11 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_descriptor_update_template === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate - createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate>::Type + createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator @@ -3474,14 +4284,18 @@ namespace VULKAN_HPP_NAMESPACE void displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence - registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence - registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, - VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, + VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_EXT_hdr_metadata === @@ -3490,9 +4304,11 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_create_renderpass2 === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass - createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_fence_win32 === @@ -3527,26 +4343,45 @@ namespace VULKAN_HPP_NAMESPACE getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const; template - VULKAN_HPP_NODISCARD StructureChain getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const; VULKAN_HPP_NODISCARD struct AHardwareBuffer * getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const; # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + createExecutionGraphPipelinesAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createExecutionGraphPipelineAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_KHR_get_memory_requirements2 === VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT; VULKAN_HPP_NODISCARD std::vector @@ -3554,9 +4389,11 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_acceleration_structure === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR - createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR>::Type + createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, @@ -3599,11 +4436,31 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, VULKAN_HPP_NAMESPACE::ArrayProxy const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + //=== VK_KHR_ray_tracing_pipeline === + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createRayTracingPipelineKHR( + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + //=== VK_KHR_sampler_ycbcr_conversion === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion - createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator @@ -3617,35 +4474,42 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_validation_cache === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT - createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_NV_ray_tracing === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV - createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV>::Type + createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain getAccelerationStructureMemoryRequirementsNV( + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; void bindAccelerationStructureMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const; - VULKAN_HPP_NODISCARD std::vector + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_KHR_maintenance3 === @@ -3653,7 +4517,7 @@ namespace VULKAN_HPP_NAMESPACE getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; //=== VK_EXT_external_memory_host === @@ -3664,10 +4528,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_calibrated_timestamps === VULKAN_HPP_NODISCARD std::pair, uint64_t> - getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos ) const; + getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos ) const; VULKAN_HPP_NODISCARD std::pair - getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo ) const; + getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo ) const; //=== VK_KHR_timeline_semaphore === @@ -3681,8 +4545,10 @@ namespace VULKAN_HPP_NAMESPACE void uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL - acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL>::Type + acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PerformanceValueINTEL getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const; @@ -3711,8 +4577,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_deferred_host_operations === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR - createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_KHR_pipeline_executable_properties === @@ -3725,61 +4593,125 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const; + //=== VK_EXT_host_image_copy === + + void copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo ) const; + + void copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo ) const; + + void copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo ) const; + + void transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const; + + //=== VK_KHR_map_memory2 === + + VULKAN_HPP_NODISCARD void * mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo ) const; + + void unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo ) const; + + //=== VK_EXT_swapchain_maintenance1 === + + void releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo ) const; + //=== VK_NV_device_generated_commands === VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV - createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV>::Type + createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_EXT_private_data === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot - createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - void setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + void setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data ) const; - VULKAN_HPP_NODISCARD uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + VULKAN_HPP_NODISCARD uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT; + //=== VK_KHR_video_encode_queue === + + VULKAN_HPP_NODISCARD std::pair> + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const; + + template + VULKAN_HPP_NODISCARD std::pair, std::vector> + getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + # if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_objects === VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT; # endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_ray_tracing_pipeline === + //=== VK_EXT_descriptor_buffer === - VULKAN_HPP_NODISCARD std::vector createRayTracingPipelinesKHR( - VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + void getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, size_t dataSize, void * pDescriptor ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createRayTracingPipelineKHR( - VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + template + VULKAN_HPP_NODISCARD DescriptorType getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD DataType getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info ) const; + + template + VULKAN_HPP_NODISCARD DataType getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info ) const; + + template + VULKAN_HPP_NODISCARD DataType + getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info ) const; + + template + VULKAN_HPP_NODISCARD DataType getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info ) const; + template + VULKAN_HPP_NODISCARD DataType + getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info ) const; + + //=== VK_EXT_device_fault === + template + VULKAN_HPP_NODISCARD Result getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, + VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === @@ -3801,9 +4733,11 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_buffer_collection === - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA - createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA>::Type + createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_FUCHSIA*/ //=== VK_NV_external_memory_rdma === @@ -3815,20 +4749,60 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::BaseOutStructure getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo ) const; + //=== VK_EXT_opacity_micromap === + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const; + + template + VULKAN_HPP_NODISCARD std::vector + writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride ) const; + + template + VULKAN_HPP_NODISCARD DataType writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT + getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo ) const VULKAN_HPP_NOEXCEPT; + //=== VK_KHR_maintenance4 === VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT; VULKAN_HPP_NODISCARD std::vector @@ -3839,54 +4813,181 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference ) const VULKAN_HPP_NOEXCEPT; + //=== VK_NV_device_generated_commands_compute === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress + getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_shader_module_identifier === VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; - //=== VK_QCOM_tile_properties === + //=== VK_NV_optical_flow === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM - getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; - private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - std::unique_ptr m_dispatcher; - }; + //=== VK_KHR_maintenance5 === - class AccelerationStructureKHR - { - public: - using CType = VkAccelerationStructureKHR; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT; - public: - AccelerationStructureKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_AMD_anti_lag === + + void antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_shader_object === + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + //=== VK_KHR_pipeline_binary === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + createPipelineBinariesKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR getPipelineKeyKHR( + Optional pipelineCreateInfo VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + VULKAN_HPP_NODISCARD std::pair> + getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info ) const; + + void releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_QCOM_tile_properties === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM + getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_cooperative_vector === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result + convertCooperativeVectorMatrixNV( const VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV & info ) const; + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const; +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_calibrated_timestamps === + + VULKAN_HPP_NODISCARD std::pair, uint64_t> + getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos ) const; + + VULKAN_HPP_NODISCARD std::pair + getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo ) const; + + //=== VK_NV_cluster_acceleration_structure === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + getClusterAccelerationStructureBuildSizesNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_partitioned_acceleration_structure === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getPartitionedAccelerationStructuresBuildSizesNV( + const VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV & info ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_device_generated_commands === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT>::Type + createIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectExecutionSetEXT>::Type + createIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === + + VULKAN_HPP_NODISCARD void * getMemoryMetalHandleEXT( const VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT & getMetalHandleInfo ) const; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT + getMemoryMetalHandlePropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HandleType const & handle ) const; +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + std::unique_ptr m_dispatcher; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class AccelerationStructureKHR + { + public: + using CType = VkAccelerationStructureKHR; + using CppType = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + AccelerationStructureKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateAccelerationStructureKHR( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_accelerationStructure ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateAccelerationStructureKHR" ); - } + *this = device.createAccelerationStructureKHR( createInfo, allocator ); } +# endif AccelerationStructureKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkAccelerationStructureKHR accelerationStructure, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_accelerationStructure( accelerationStructure ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -3902,23 +5003,25 @@ namespace VULKAN_HPP_NAMESPACE AccelerationStructureKHR() = delete; AccelerationStructureKHR( AccelerationStructureKHR const & ) = delete; + AccelerationStructureKHR( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_accelerationStructure( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_accelerationStructure( VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + AccelerationStructureKHR & operator=( AccelerationStructureKHR const & ) = delete; - AccelerationStructureKHR & operator =( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT + + AccelerationStructureKHR & operator=( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_accelerationStructure = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_accelerationStructure, rhs.m_accelerationStructure ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -3928,6 +5031,11 @@ namespace VULKAN_HPP_NAMESPACE return m_accelerationStructure; } + operator VULKAN_HPP_NAMESPACE::AccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructure; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_accelerationStructure ) @@ -3942,12 +5050,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_accelerationStructure, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -3962,44 +5078,42 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR m_accelerationStructure = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR m_accelerationStructure = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class AccelerationStructureNV { public: - using CType = VkAccelerationStructureNV; + using CType = VkAccelerationStructureNV; + using CppType = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) AccelerationStructureNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateAccelerationStructureNV( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_accelerationStructure ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateAccelerationStructureNV" ); - } + *this = device.createAccelerationStructureNV( createInfo, allocator ); } +# endif AccelerationStructureNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkAccelerationStructureNV accelerationStructure, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_accelerationStructure( accelerationStructure ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -4015,23 +5129,25 @@ namespace VULKAN_HPP_NAMESPACE AccelerationStructureNV() = delete; AccelerationStructureNV( AccelerationStructureNV const & ) = delete; + AccelerationStructureNV( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_accelerationStructure( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_accelerationStructure( VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + AccelerationStructureNV & operator=( AccelerationStructureNV const & ) = delete; - AccelerationStructureNV & operator =( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT + + AccelerationStructureNV & operator=( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_accelerationStructure = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_accelerationStructure, rhs.m_accelerationStructure ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -4041,6 +5157,11 @@ namespace VULKAN_HPP_NAMESPACE return m_accelerationStructure; } + operator VULKAN_HPP_NAMESPACE::AccelerationStructureNV() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructure; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_accelerationStructure ) @@ -4055,12 +5176,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::AccelerationStructureNV release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_accelerationStructure, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -4083,44 +5212,42 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD DataType getHandle() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureNV m_accelerationStructure = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV m_accelerationStructure = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class Buffer { public: - using CType = VkBuffer; + using CType = VkBuffer; + using CppType = VULKAN_HPP_NAMESPACE::Buffer; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBuffer; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = - static_cast( device.getDispatcher()->vkCreateBuffer( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_buffer ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateBuffer" ); - } + *this = device.createBuffer( createInfo, allocator ); } +# endif Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkBuffer buffer, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_buffer( buffer ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -4136,23 +5263,25 @@ namespace VULKAN_HPP_NAMESPACE Buffer() = delete; Buffer( Buffer const & ) = delete; + Buffer( Buffer && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_buffer, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_buffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_buffer, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + Buffer & operator=( Buffer const & ) = delete; - Buffer & operator =( Buffer && rhs ) VULKAN_HPP_NOEXCEPT + + Buffer & operator=( Buffer && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_buffer = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_buffer, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_buffer, rhs.m_buffer ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -4162,6 +5291,11 @@ namespace VULKAN_HPP_NAMESPACE return m_buffer; } + operator VULKAN_HPP_NAMESPACE::Buffer() const VULKAN_HPP_NOEXCEPT + { + return m_buffer; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_buffer ) @@ -4175,12 +5309,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::Buffer release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_buffer, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -4201,45 +5343,43 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getMemoryRequirements() const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Buffer m_buffer = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Buffer m_buffer = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; # if defined( VK_USE_PLATFORM_FUCHSIA ) class BufferCollectionFUCHSIA { public: - using CType = VkBufferCollectionFUCHSIA; + using CType = VkBufferCollectionFUCHSIA; + using CppType = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) BufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateBufferCollectionFUCHSIA( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_collection ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateBufferCollectionFUCHSIA" ); - } + *this = device.createBufferCollectionFUCHSIA( createInfo, allocator ); } +# endif BufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkBufferCollectionFUCHSIA collection, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_collection( collection ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -4255,23 +5395,25 @@ namespace VULKAN_HPP_NAMESPACE BufferCollectionFUCHSIA() = delete; BufferCollectionFUCHSIA( BufferCollectionFUCHSIA const & ) = delete; + BufferCollectionFUCHSIA( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_collection( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_collection, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_collection( VULKAN_HPP_NAMESPACE::exchange( rhs.m_collection, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA const & ) = delete; - BufferCollectionFUCHSIA & operator =( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT + + BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_collection = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_collection, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_collection, rhs.m_collection ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -4281,6 +5423,11 @@ namespace VULKAN_HPP_NAMESPACE return m_collection; } + operator VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA() const VULKAN_HPP_NOEXCEPT + { + return m_collection; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_collection ) @@ -4295,12 +5442,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_collection, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -4323,45 +5478,44 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA getProperties() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA m_collection = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA m_collection = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; + # endif /*VK_USE_PLATFORM_FUCHSIA*/ class BufferView { public: - using CType = VkBufferView; + using CType = VkBufferView; + using CppType = VULKAN_HPP_NAMESPACE::BufferView; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateBufferView( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_bufferView ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateBufferView" ); - } + *this = device.createBufferView( createInfo, allocator ); } +# endif BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkBufferView bufferView, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_bufferView( bufferView ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -4377,23 +5531,25 @@ namespace VULKAN_HPP_NAMESPACE BufferView() = delete; BufferView( BufferView const & ) = delete; + BufferView( BufferView && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_bufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_bufferView, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_bufferView( VULKAN_HPP_NAMESPACE::exchange( rhs.m_bufferView, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + BufferView & operator=( BufferView const & ) = delete; - BufferView & operator =( BufferView && rhs ) VULKAN_HPP_NOEXCEPT + + BufferView & operator=( BufferView && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_bufferView = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_bufferView, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_bufferView, rhs.m_bufferView ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -4403,6 +5559,11 @@ namespace VULKAN_HPP_NAMESPACE return m_bufferView; } + operator VULKAN_HPP_NAMESPACE::BufferView() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_bufferView ) @@ -4416,12 +5577,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::BufferView release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_bufferView, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -4436,44 +5605,42 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::BufferView m_bufferView = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::BufferView m_bufferView = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class CommandPool { public: - using CType = VkCommandPool; + using CType = VkCommandPool; + using CppType = VULKAN_HPP_NAMESPACE::CommandPool; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateCommandPool( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_commandPool ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateCommandPool" ); - } + *this = device.createCommandPool( createInfo, allocator ); } +# endif CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkCommandPool commandPool, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_commandPool( commandPool ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -4489,23 +5656,25 @@ namespace VULKAN_HPP_NAMESPACE CommandPool() = delete; CommandPool( CommandPool const & ) = delete; + CommandPool( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_commandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_commandPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandPool, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + CommandPool & operator=( CommandPool const & ) = delete; - CommandPool & operator =( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT + + CommandPool & operator=( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_commandPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_commandPool, rhs.m_commandPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -4515,6 +5684,11 @@ namespace VULKAN_HPP_NAMESPACE return m_commandPool; } + operator VULKAN_HPP_NAMESPACE::CommandPool() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_commandPool ) @@ -4528,12 +5702,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::CommandPool release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_commandPool, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -4560,16 +5742,23 @@ namespace VULKAN_HPP_NAMESPACE void trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class CommandBuffer { public: - using CType = VkCommandBuffer; + using CType = VkCommandBuffer; + using CppType = VULKAN_HPP_NAMESPACE::CommandBuffer; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = @@ -4577,7 +5766,7 @@ namespace VULKAN_HPP_NAMESPACE public: CommandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkCommandBuffer commandBuffer, VkCommandPool commandPool ) - : m_device( *device ), m_commandPool( commandPool ), m_commandBuffer( commandBuffer ), m_dispatcher( device.getDispatcher() ) + : m_device( device ), m_commandPool( commandPool ), m_commandBuffer( commandBuffer ), m_dispatcher( device.getDispatcher() ) { } @@ -4590,23 +5779,25 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer() = delete; CommandBuffer( CommandBuffer const & ) = delete; + CommandBuffer( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_commandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ) ) - , m_commandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandBuffer, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_commandPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandPool, {} ) ) + , m_commandBuffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandBuffer, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + CommandBuffer & operator=( CommandBuffer const & ) = delete; - CommandBuffer & operator =( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT + + CommandBuffer & operator=( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_commandPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ); - m_commandBuffer = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandBuffer, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_commandPool, rhs.m_commandPool ); + std::swap( m_commandBuffer, rhs.m_commandBuffer ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -4616,6 +5807,11 @@ namespace VULKAN_HPP_NAMESPACE return m_commandBuffer; } + operator VULKAN_HPP_NAMESPACE::CommandBuffer() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_commandBuffer ) @@ -4629,12 +5825,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::CommandBuffer release() + { + m_device = nullptr; + m_commandPool = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_commandBuffer, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -4924,6 +6128,41 @@ namespace VULKAN_HPP_NAMESPACE void setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT; + //=== VK_VERSION_1_4 === + + void setLineStipple( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT; + + void bindIndexBuffer2( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT; + + void pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites ) const + VULKAN_HPP_NOEXCEPT; + + template + void pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data ) const VULKAN_HPP_NOEXCEPT; + + void setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo ) const VULKAN_HPP_NOEXCEPT; + + void setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo ) const + VULKAN_HPP_NOEXCEPT; + + void bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT; + + void pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT; + + void pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT; + + void pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo ) const + VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_debug_marker === void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT; @@ -4932,7 +6171,6 @@ namespace VULKAN_HPP_NAMESPACE void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_queue === void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const VULKAN_HPP_NOEXCEPT; @@ -4940,13 +6178,10 @@ namespace VULKAN_HPP_NAMESPACE void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const VULKAN_HPP_NOEXCEPT; void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const VULKAN_HPP_NOEXCEPT; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_decode_queue === - void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & frameInfo ) const VULKAN_HPP_NOEXCEPT; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo ) const VULKAN_HPP_NOEXCEPT; //=== VK_EXT_transform_feedback === @@ -5044,6 +6279,10 @@ namespace VULKAN_HPP_NAMESPACE void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, VULKAN_HPP_NAMESPACE::ArrayProxy const & discardRectangles ) const VULKAN_HPP_NOEXCEPT; + void setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT; + + void setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT; + //=== VK_KHR_create_renderpass2 === void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, @@ -5062,6 +6301,26 @@ namespace VULKAN_HPP_NAMESPACE void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize ) const VULKAN_HPP_NOEXCEPT; + + void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT; + + void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT; + + void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_sample_locations === void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT; @@ -5090,6 +6349,24 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT; + //=== VK_KHR_ray_tracing_pipeline === + + void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT; + + void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT; + + void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT; + //=== VK_NV_shading_rate_image === void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT; @@ -5161,6 +6438,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT; + void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT; + //=== VK_NV_mesh_shader === void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT; @@ -5179,6 +6461,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_scissor_exclusive === + void setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissorEnables ) const + VULKAN_HPP_NOEXCEPT; + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissors ) const VULKAN_HPP_NOEXCEPT; @@ -5200,6 +6486,13 @@ namespace VULKAN_HPP_NAMESPACE void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT; + //=== VK_KHR_dynamic_rendering_local_read === + + void setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo ) const VULKAN_HPP_NOEXCEPT; + + void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo ) const + VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_line_rasterization === void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT; @@ -5250,10 +6543,18 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_depth_bias_control === + + void setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo ) const VULKAN_HPP_NOEXCEPT; + //=== VK_KHR_video_encode_queue === void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + void cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo ) const VULKAN_HPP_NOEXCEPT; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_KHR_synchronization2 === @@ -5272,10 +6573,20 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT; - void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - uint32_t marker ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_descriptor_buffer === + + void bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindingInfos ) const + VULKAN_HPP_NOEXCEPT; + + void setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferIndices, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets ) const; + + void bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set ) const VULKAN_HPP_NOEXCEPT; //=== VK_NV_fragment_shading_rate_enums === @@ -5312,24 +6623,6 @@ namespace VULKAN_HPP_NAMESPACE void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT; - //=== VK_KHR_ray_tracing_pipeline === - - void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, - uint32_t width, - uint32_t height, - uint32_t depth ) const VULKAN_HPP_NOEXCEPT; - - void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, - VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT; - - void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT; - //=== VK_EXT_vertex_input_dynamic_state === void setVertexInputEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexBindingDescriptions, @@ -5375,78 +6668,260 @@ namespace VULKAN_HPP_NAMESPACE uint32_t firstInstance, Optional vertexOffset VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_opacity_micromap === + + void buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & infos ) const VULKAN_HPP_NOEXCEPT; + + void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT; + + void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const VULKAN_HPP_NOEXCEPT; + + void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT; + + void writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_HUAWEI_cluster_culling_shader === + + void drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT; + + void drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_copy_memory_indirect === + + void copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + void copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageSubresources ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_memory_decompression === + + void decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & decompressMemoryRegions ) const + VULKAN_HPP_NOEXCEPT; + + void decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_generated_commands_compute === + + void updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_extended_dynamic_state3 === + + void setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT; + + void setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT; + + void setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT; + + void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sampleMask ) const; + + void setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT; + + void setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT; + + void setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT; + + void setColorBlendEnableEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEnables ) const VULKAN_HPP_NOEXCEPT; + + void setColorBlendEquationEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEquations ) const + VULKAN_HPP_NOEXCEPT; + + void setColorWriteMaskEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteMasks ) const + VULKAN_HPP_NOEXCEPT; + + void setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT; + + void setRasterizationStreamEXT( uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT; + + void + setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT; + + void setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT; + + void setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT; + + void setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT; + + void setColorBlendAdvancedEXT( uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendAdvanced ) const + VULKAN_HPP_NOEXCEPT; + + void setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT; + + void setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT; + + void setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT; + + void setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT; + + void setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT; + + void setViewportSwizzleNV( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportSwizzles ) const + VULKAN_HPP_NOEXCEPT; + + void setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT; + + void setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT; + + void setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT; + + void setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT; + + void setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & coverageModulationTable ) const VULKAN_HPP_NOEXCEPT; + + void setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT; + + void setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT; + + void setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_optical_flow === + + void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance5 === + + void bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_shader_object === + + void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shaders ) const; + + void setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + Optional depthClampRange + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_cooperative_vector === + + void convertCooperativeVectorMatrixNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + + void setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_line_rasterization === + + void setLineStippleKHR( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance6 === + + void bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT; + + void pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT; + + void pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT; + + void pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo ) const + VULKAN_HPP_NOEXCEPT; + + void setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo ) const + VULKAN_HPP_NOEXCEPT; + + void bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_cluster_acceleration_structure === + + void buildClusterAccelerationStructureIndirectNV( const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureCommandsInfoNV & commandInfos ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_partitioned_acceleration_structure === + + void buildPartitionedAccelerationStructuresNV( const VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV & buildInfo ) const + VULKAN_HPP_NOEXCEPT; + + //=== VK_EXT_device_generated_commands === + + void preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer ) const VULKAN_HPP_NOEXCEPT; + + void executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT; + private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; - VULKAN_HPP_NAMESPACE::CommandBuffer m_commandBuffer = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; + VULKAN_HPP_NAMESPACE::CommandBuffer m_commandBuffer = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class CommandBuffers : public std::vector { public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) CommandBuffers( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) { - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); - std::vector commandBuffers( allocateInfo.commandBufferCount ); - VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkAllocateCommandBuffers( - static_cast( *device ), reinterpret_cast( &allocateInfo ), commandBuffers.data() ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - this->reserve( allocateInfo.commandBufferCount ); - for ( auto const & commandBuffer : commandBuffers ) - { - this->emplace_back( device, commandBuffer, static_cast( allocateInfo.commandPool ) ); - } - } - else - { - throwResultException( result, "vkAllocateCommandBuffers" ); - } + *this = device.allocateCommandBuffers( allocateInfo ); } +# endif CommandBuffers( std::nullptr_t ) {} - CommandBuffers() = delete; - CommandBuffers( CommandBuffers const & ) = delete; - CommandBuffers( CommandBuffers && rhs ) = default; + CommandBuffers() = delete; + CommandBuffers( CommandBuffers const & ) = delete; + CommandBuffers( CommandBuffers && rhs ) = default; CommandBuffers & operator=( CommandBuffers const & ) = delete; - CommandBuffers & operator=( CommandBuffers && rhs ) = default; + CommandBuffers & operator=( CommandBuffers && rhs ) = default; + + private: + CommandBuffers( std::vector && rhs ) + { + std::swap( *this, rhs ); + } }; class CuFunctionNVX { public: - using CType = VkCuFunctionNVX; + using CType = VkCuFunctionNVX; + using CppType = VULKAN_HPP_NAMESPACE::CuFunctionNVX; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) CuFunctionNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateCuFunctionNVX( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_function ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateCuFunctionNVX" ); - } + *this = device.createCuFunctionNVX( createInfo, allocator ); } +# endif CuFunctionNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkCuFunctionNVX function, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_function( function ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -5462,23 +6937,25 @@ namespace VULKAN_HPP_NAMESPACE CuFunctionNVX() = delete; CuFunctionNVX( CuFunctionNVX const & ) = delete; + CuFunctionNVX( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_function( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_function, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_function( VULKAN_HPP_NAMESPACE::exchange( rhs.m_function, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + CuFunctionNVX & operator=( CuFunctionNVX const & ) = delete; - CuFunctionNVX & operator =( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT + + CuFunctionNVX & operator=( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_function = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_function, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_function, rhs.m_function ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -5488,6 +6965,11 @@ namespace VULKAN_HPP_NAMESPACE return m_function; } + operator VULKAN_HPP_NAMESPACE::CuFunctionNVX() const VULKAN_HPP_NOEXCEPT + { + return m_function; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_function ) @@ -5501,12 +6983,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::CuFunctionNVX release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_function, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -5521,44 +7011,42 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::CuFunctionNVX m_function = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CuFunctionNVX m_function = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class CuModuleNVX { public: - using CType = VkCuModuleNVX; + using CType = VkCuModuleNVX; + using CppType = VULKAN_HPP_NAMESPACE::CuModuleNVX; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) CuModuleNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateCuModuleNVX( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_module ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateCuModuleNVX" ); - } + *this = device.createCuModuleNVX( createInfo, allocator ); } +# endif CuModuleNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkCuModuleNVX module, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_module( module ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -5574,23 +7062,25 @@ namespace VULKAN_HPP_NAMESPACE CuModuleNVX() = delete; CuModuleNVX( CuModuleNVX const & ) = delete; + CuModuleNVX( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_module( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_module, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_module( VULKAN_HPP_NAMESPACE::exchange( rhs.m_module, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + CuModuleNVX & operator=( CuModuleNVX const & ) = delete; - CuModuleNVX & operator =( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT + + CuModuleNVX & operator=( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_module = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_module, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_module, rhs.m_module ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -5600,6 +7090,11 @@ namespace VULKAN_HPP_NAMESPACE return m_module; } + operator VULKAN_HPP_NAMESPACE::CuModuleNVX() const VULKAN_HPP_NOEXCEPT + { + return m_module; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_module ) @@ -5613,12 +7108,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::CuModuleNVX release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_module, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -5633,157 +7136,428 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::CuModuleNVX m_module = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CuModuleNVX m_module = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; - class DebugReportCallbackEXT + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + class CudaFunctionNV { public: - using CType = VkDebugReportCallbackEXT; + using CType = VkCudaFunctionNV; + using CppType = VULKAN_HPP_NAMESPACE::CudaFunctionNV; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaFunctionNV; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaFunctionNV; public: - DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + CudaFunctionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - instance.getDispatcher()->vkCreateDebugReportCallbackEXT( static_cast( *instance ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_callback ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateDebugReportCallbackEXT" ); - } + *this = device.createCudaFunctionNV( createInfo, allocator ); } +# endif - DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VkDebugReportCallbackEXT callback, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) - , m_callback( callback ) + CudaFunctionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkCudaFunctionNV function, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_function( function ) , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) + , m_dispatcher( device.getDispatcher() ) { } - DebugReportCallbackEXT( std::nullptr_t ) {} + CudaFunctionNV( std::nullptr_t ) {} - ~DebugReportCallbackEXT() + ~CudaFunctionNV() { clear(); } - DebugReportCallbackEXT() = delete; - DebugReportCallbackEXT( DebugReportCallbackEXT const & ) = delete; - DebugReportCallbackEXT( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT - : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ) - , m_callback( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_callback, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + CudaFunctionNV() = delete; + CudaFunctionNV( CudaFunctionNV const & ) = delete; + + CudaFunctionNV( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_function( VULKAN_HPP_NAMESPACE::exchange( rhs.m_function, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } - DebugReportCallbackEXT & operator=( DebugReportCallbackEXT const & ) = delete; - DebugReportCallbackEXT & operator =( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT + + CudaFunctionNV & operator=( CudaFunctionNV const & ) = delete; + + CudaFunctionNV & operator=( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ); - m_callback = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_callback, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_function, rhs.m_function ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::CudaFunctionNV const & operator*() const VULKAN_HPP_NOEXCEPT { - return m_callback; + return m_function; + } + + operator VULKAN_HPP_NAMESPACE::CudaFunctionNV() const VULKAN_HPP_NOEXCEPT + { + return m_function; } void clear() VULKAN_HPP_NOEXCEPT { - if ( m_callback ) + if ( m_function ) { - getDispatcher()->vkDestroyDebugReportCallbackEXT( static_cast( m_instance ), - static_cast( m_callback ), - reinterpret_cast( m_allocator ) ); + getDispatcher()->vkDestroyCudaFunctionNV( + static_cast( m_device ), static_cast( m_function ), reinterpret_cast( m_allocator ) ); } - m_instance = nullptr; - m_callback = nullptr; + m_device = nullptr; + m_function = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; } - VULKAN_HPP_NAMESPACE::Instance getInstance() const + VULKAN_HPP_NAMESPACE::CudaFunctionNV release() { - return m_instance; + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_function, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV & rhs ) VULKAN_HPP_NOEXCEPT { - std::swap( m_instance, rhs.m_instance ); - std::swap( m_callback, rhs.m_callback ); + std::swap( m_device, rhs.m_device ); + std::swap( m_function, rhs.m_function ); std::swap( m_allocator, rhs.m_allocator ); std::swap( m_dispatcher, rhs.m_dispatcher ); } private: - VULKAN_HPP_NAMESPACE::Instance m_instance = {}; - VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT m_callback = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CudaFunctionNV m_function = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; - class DebugUtilsMessengerEXT + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + class CudaModuleNV { public: - using CType = VkDebugUtilsMessengerEXT; + using CType = VkCudaModuleNV; + using CppType = VULKAN_HPP_NAMESPACE::CudaModuleNV; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCudaModuleNV; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaModuleNV; public: - DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + CudaModuleNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createCudaModuleNV( createInfo, allocator ); + } +# endif + + CudaModuleNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkCudaModuleNV module, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_module( module ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + CudaModuleNV( std::nullptr_t ) {} + + ~CudaModuleNV() + { + clear(); + } + + CudaModuleNV() = delete; + CudaModuleNV( CudaModuleNV const & ) = delete; + + CudaModuleNV( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_module( VULKAN_HPP_NAMESPACE::exchange( rhs.m_module, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + CudaModuleNV & operator=( CudaModuleNV const & ) = delete; + + CudaModuleNV & operator=( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_module, rhs.m_module ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::CudaModuleNV const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_module; + } + + operator VULKAN_HPP_NAMESPACE::CudaModuleNV() const VULKAN_HPP_NOEXCEPT + { + return m_module; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_module ) + { + getDispatcher()->vkDestroyCudaModuleNV( + static_cast( m_device ), static_cast( m_module ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_module = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::CudaModuleNV release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_module, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_module, rhs.m_module ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_NV_cuda_kernel_launch === + + VULKAN_HPP_NODISCARD std::vector getCache() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CudaModuleNV m_module = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + class DebugReportCallbackEXT + { + public: + using CType = VkDebugReportCallbackEXT; + using CppType = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createDebugReportCallbackEXT( createInfo, allocator ); + } +# endif + + DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VkDebugReportCallbackEXT callback, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( instance ) + , m_callback( callback ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( instance.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - instance.getDispatcher()->vkCreateDebugUtilsMessengerEXT( static_cast( *instance ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_messenger ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + } + + DebugReportCallbackEXT( std::nullptr_t ) {} + + ~DebugReportCallbackEXT() + { + clear(); + } + + DebugReportCallbackEXT() = delete; + DebugReportCallbackEXT( DebugReportCallbackEXT const & ) = delete; + + DebugReportCallbackEXT( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_callback( VULKAN_HPP_NAMESPACE::exchange( rhs.m_callback, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + DebugReportCallbackEXT & operator=( DebugReportCallbackEXT const & ) = delete; + + DebugReportCallbackEXT & operator=( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_callback, rhs.m_callback ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_callback; + } + + operator VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT + { + return m_callback; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_callback ) { - throwResultException( result, "vkCreateDebugUtilsMessengerEXT" ); + getDispatcher()->vkDestroyDebugReportCallbackEXT( static_cast( m_instance ), + static_cast( m_callback ), + reinterpret_cast( m_allocator ) ); } + m_instance = nullptr; + m_callback = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT release() + { + m_instance = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_callback, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Instance getInstance() const + { + return m_instance; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; } + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_callback, rhs.m_callback ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT m_callback = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class DebugUtilsMessengerEXT + { + public: + using CType = VkDebugUtilsMessengerEXT; + using CppType = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createDebugUtilsMessengerEXT( createInfo, allocator ); + } +# endif + DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VkDebugUtilsMessengerEXT messenger, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) + : m_instance( instance ) , m_messenger( messenger ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( instance.getDispatcher() ) @@ -5799,23 +7573,25 @@ namespace VULKAN_HPP_NAMESPACE DebugUtilsMessengerEXT() = delete; DebugUtilsMessengerEXT( DebugUtilsMessengerEXT const & ) = delete; + DebugUtilsMessengerEXT( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT - : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ) - , m_messenger( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_messenger, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_messenger( VULKAN_HPP_NAMESPACE::exchange( rhs.m_messenger, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT const & ) = delete; - DebugUtilsMessengerEXT & operator =( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT + + DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ); - m_messenger = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_messenger, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_instance, rhs.m_instance ); + std::swap( m_messenger, rhs.m_messenger ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -5825,6 +7601,11 @@ namespace VULKAN_HPP_NAMESPACE return m_messenger; } + operator VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT + { + return m_messenger; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_messenger ) @@ -5839,12 +7620,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT release() + { + m_instance = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_messenger, nullptr ); + } + VULKAN_HPP_NAMESPACE::Instance getInstance() const { return m_instance; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -5859,42 +7648,41 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Instance m_instance = {}; - VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT m_messenger = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT m_messenger = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class DeferredOperationKHR { public: - using CType = VkDeferredOperationKHR; + using CType = VkDeferredOperationKHR; + using CppType = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) DeferredOperationKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateDeferredOperationKHR( static_cast( *device ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_operation ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateDeferredOperationKHR" ); - } + *this = device.createDeferredOperationKHR( allocator ); } +# endif DeferredOperationKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDeferredOperationKHR operation, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_operation( operation ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -5910,23 +7698,25 @@ namespace VULKAN_HPP_NAMESPACE DeferredOperationKHR() = delete; DeferredOperationKHR( DeferredOperationKHR const & ) = delete; + DeferredOperationKHR( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_operation( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_operation, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_operation( VULKAN_HPP_NAMESPACE::exchange( rhs.m_operation, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + DeferredOperationKHR & operator=( DeferredOperationKHR const & ) = delete; - DeferredOperationKHR & operator =( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT + + DeferredOperationKHR & operator=( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_operation = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_operation, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_operation, rhs.m_operation ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -5936,6 +7726,11 @@ namespace VULKAN_HPP_NAMESPACE return m_operation; } + operator VULKAN_HPP_NAMESPACE::DeferredOperationKHR() const VULKAN_HPP_NOEXCEPT + { + return m_operation; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_operation ) @@ -5950,12 +7745,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::DeferredOperationKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_operation, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -5978,44 +7781,42 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result join() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DeferredOperationKHR m_operation = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DeferredOperationKHR m_operation = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class DescriptorPool { public: - using CType = VkDescriptorPool; + using CType = VkDescriptorPool; + using CppType = VULKAN_HPP_NAMESPACE::DescriptorPool; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateDescriptorPool( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_descriptorPool ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateDescriptorPool" ); - } + *this = device.createDescriptorPool( createInfo, allocator ); } +# endif DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_descriptorPool( descriptorPool ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -6031,23 +7832,25 @@ namespace VULKAN_HPP_NAMESPACE DescriptorPool() = delete; DescriptorPool( DescriptorPool const & ) = delete; + DescriptorPool( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_descriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + DescriptorPool & operator=( DescriptorPool const & ) = delete; - DescriptorPool & operator =( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT + + DescriptorPool & operator=( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_descriptorPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorPool, rhs.m_descriptorPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -6057,6 +7860,11 @@ namespace VULKAN_HPP_NAMESPACE return m_descriptorPool; } + operator VULKAN_HPP_NAMESPACE::DescriptorPool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_descriptorPool ) @@ -6071,12 +7879,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::DescriptorPool release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_descriptorPool, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -6095,16 +7911,23 @@ namespace VULKAN_HPP_NAMESPACE void reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class DescriptorSet { public: - using CType = VkDescriptorSet; + using CType = VkDescriptorSet; + using CppType = VULKAN_HPP_NAMESPACE::DescriptorSet; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = @@ -6112,7 +7935,7 @@ namespace VULKAN_HPP_NAMESPACE public: DescriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDescriptorSet descriptorSet, VkDescriptorPool descriptorPool ) - : m_device( *device ), m_descriptorPool( descriptorPool ), m_descriptorSet( descriptorSet ), m_dispatcher( device.getDispatcher() ) + : m_device( device ), m_descriptorPool( descriptorPool ), m_descriptorSet( descriptorSet ), m_dispatcher( device.getDispatcher() ) { } @@ -6125,23 +7948,25 @@ namespace VULKAN_HPP_NAMESPACE DescriptorSet() = delete; DescriptorSet( DescriptorSet const & ) = delete; + DescriptorSet( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_descriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ) - , m_descriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSet, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ) + , m_descriptorSet( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSet, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + DescriptorSet & operator=( DescriptorSet const & ) = delete; - DescriptorSet & operator =( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT + + DescriptorSet & operator=( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_descriptorPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ); - m_descriptorSet = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSet, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorPool, rhs.m_descriptorPool ); + std::swap( m_descriptorSet, rhs.m_descriptorSet ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -6151,6 +7976,11 @@ namespace VULKAN_HPP_NAMESPACE return m_descriptorSet; } + operator VULKAN_HPP_NAMESPACE::DescriptorSet() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_descriptorSet ) @@ -6166,12 +7996,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } - VULKAN_HPP_NAMESPACE::Device getDevice() const + VULKAN_HPP_NAMESPACE::DescriptorSet release() + { + m_device = nullptr; + m_descriptorPool = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_descriptorSet, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -6200,77 +8038,68 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD void * getHostMappingVALVE() const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; - VULKAN_HPP_NAMESPACE::DescriptorSet m_descriptorSet = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet m_descriptorSet = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class DescriptorSets : public std::vector { public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) DescriptorSets( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) { - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); - std::vector descriptorSets( allocateInfo.descriptorSetCount ); - VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkAllocateDescriptorSets( - static_cast( *device ), reinterpret_cast( &allocateInfo ), descriptorSets.data() ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - this->reserve( allocateInfo.descriptorSetCount ); - for ( auto const & descriptorSet : descriptorSets ) - { - this->emplace_back( device, descriptorSet, static_cast( allocateInfo.descriptorPool ) ); - } - } - else - { - throwResultException( result, "vkAllocateDescriptorSets" ); - } + *this = device.allocateDescriptorSets( allocateInfo ); } +# endif DescriptorSets( std::nullptr_t ) {} - DescriptorSets() = delete; - DescriptorSets( DescriptorSets const & ) = delete; - DescriptorSets( DescriptorSets && rhs ) = default; + DescriptorSets() = delete; + DescriptorSets( DescriptorSets const & ) = delete; + DescriptorSets( DescriptorSets && rhs ) = default; DescriptorSets & operator=( DescriptorSets const & ) = delete; - DescriptorSets & operator=( DescriptorSets && rhs ) = default; + DescriptorSets & operator=( DescriptorSets && rhs ) = default; + + private: + DescriptorSets( std::vector && rhs ) + { + std::swap( *this, rhs ); + } }; class DescriptorSetLayout { public: - using CType = VkDescriptorSetLayout; + using CType = VkDescriptorSetLayout; + using CppType = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateDescriptorSetLayout( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_descriptorSetLayout ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateDescriptorSetLayout" ); - } + *this = device.createDescriptorSetLayout( createInfo, allocator ); } +# endif DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDescriptorSetLayout descriptorSetLayout, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_descriptorSetLayout( descriptorSetLayout ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -6286,23 +8115,25 @@ namespace VULKAN_HPP_NAMESPACE DescriptorSetLayout() = delete; DescriptorSetLayout( DescriptorSetLayout const & ) = delete; + DescriptorSetLayout( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_descriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorSetLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + DescriptorSetLayout & operator=( DescriptorSetLayout const & ) = delete; - DescriptorSetLayout & operator =( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT + + DescriptorSetLayout & operator=( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_descriptorSetLayout = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorSetLayout, rhs.m_descriptorSetLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -6312,6 +8143,11 @@ namespace VULKAN_HPP_NAMESPACE return m_descriptorSetLayout; } + operator VULKAN_HPP_NAMESPACE::DescriptorSetLayout() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_descriptorSetLayout ) @@ -6326,12 +8162,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::DescriptorSetLayout release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_descriptorSetLayout, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -6345,45 +8189,49 @@ namespace VULKAN_HPP_NAMESPACE std::swap( m_dispatcher, rhs.m_dispatcher ); } + //=== VK_EXT_descriptor_buffer === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getSizeEXT() const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getBindingOffsetEXT( uint32_t binding ) const VULKAN_HPP_NOEXCEPT; + private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DescriptorSetLayout m_descriptorSetLayout = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorSetLayout m_descriptorSetLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class DescriptorUpdateTemplate { public: - using CType = VkDescriptorUpdateTemplate; + using CType = VkDescriptorUpdateTemplate; + using CppType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) DescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateDescriptorUpdateTemplate( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_descriptorUpdateTemplate ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateDescriptorUpdateTemplate" ); - } + *this = device.createDescriptorUpdateTemplate( createInfo, allocator ); } +# endif DescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_descriptorUpdateTemplate( descriptorUpdateTemplate ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -6399,23 +8247,25 @@ namespace VULKAN_HPP_NAMESPACE DescriptorUpdateTemplate() = delete; DescriptorUpdateTemplate( DescriptorUpdateTemplate const & ) = delete; + DescriptorUpdateTemplate( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_descriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate const & ) = delete; - DescriptorUpdateTemplate & operator =( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT + + DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_descriptorUpdateTemplate = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorUpdateTemplate, rhs.m_descriptorUpdateTemplate ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -6425,6 +8275,11 @@ namespace VULKAN_HPP_NAMESPACE return m_descriptorUpdateTemplate; } + operator VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_descriptorUpdateTemplate ) @@ -6439,12 +8294,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_descriptorUpdateTemplate, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -6459,44 +8322,42 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate m_descriptorUpdateTemplate = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate m_descriptorUpdateTemplate = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class DeviceMemory { public: - using CType = VkDeviceMemory; + using CType = VkDeviceMemory; + using CppType = VULKAN_HPP_NAMESPACE::DeviceMemory; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = - static_cast( device.getDispatcher()->vkAllocateMemory( static_cast( *device ), - reinterpret_cast( &allocateInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_memory ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkAllocateMemory" ); - } + *this = device.allocateMemory( allocateInfo, allocator ); } +# endif DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDeviceMemory memory, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_memory( memory ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -6512,23 +8373,25 @@ namespace VULKAN_HPP_NAMESPACE DeviceMemory() = delete; DeviceMemory( DeviceMemory const & ) = delete; + DeviceMemory( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_memory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_memory, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_memory( VULKAN_HPP_NAMESPACE::exchange( rhs.m_memory, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + DeviceMemory & operator=( DeviceMemory const & ) = delete; - DeviceMemory & operator =( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT + + DeviceMemory & operator=( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_memory = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_memory, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_memory, rhs.m_memory ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -6538,6 +8401,11 @@ namespace VULKAN_HPP_NAMESPACE return m_memory; } + operator VULKAN_HPP_NAMESPACE::DeviceMemory() const VULKAN_HPP_NOEXCEPT + { + return m_memory; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_memory ) @@ -6551,12 +8419,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::DeviceMemory release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_memory, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -6591,61 +8467,56 @@ namespace VULKAN_HPP_NAMESPACE void setPriorityEXT( float priority ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory m_memory = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory m_memory = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class DisplayKHR { public: - using CType = VkDisplayKHR; + using CType = VkDisplayKHR; + using CppType = VULKAN_HPP_NAMESPACE::DisplayKHR; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, int32_t drmFd, uint32_t connectorId ) - : m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( physicalDevice.getDispatcher()->vkGetDrmDisplayEXT( - static_cast( *physicalDevice ), drmFd, connectorId, reinterpret_cast( &m_display ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkGetDrmDisplayEXT" ); - } + *this = physicalDevice.getDrmDisplayEXT( drmFd, connectorId ); } +# endif -# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, Display & dpy, RROutput rrOutput ) - : m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( physicalDevice.getDispatcher()->vkGetRandROutputDisplayEXT( - static_cast( *physicalDevice ), &dpy, rrOutput, reinterpret_cast( &m_display ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkGetRandROutputDisplayEXT" ); - } + *this = physicalDevice.getRandROutputDisplayEXT( dpy, rrOutput ); } -# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ +# endif -# if defined( VK_USE_PLATFORM_WIN32_KHR ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_WIN32_KHR ) DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, uint32_t deviceRelativeId ) - : m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( physicalDevice.getDispatcher()->vkGetWinrtDisplayNV( - static_cast( *physicalDevice ), deviceRelativeId, reinterpret_cast( &m_display ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkGetWinrtDisplayNV" ); - } + *this = physicalDevice.getWinrtDisplayNV( deviceRelativeId ); } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, VkDisplayKHR display ) - : m_physicalDevice( *physicalDevice ), m_display( display ), m_dispatcher( physicalDevice.getDispatcher() ) + : m_physicalDevice( physicalDevice ), m_display( display ), m_dispatcher( physicalDevice.getDispatcher() ) { } @@ -6658,21 +8529,23 @@ namespace VULKAN_HPP_NAMESPACE DisplayKHR() = delete; DisplayKHR( DisplayKHR const & ) = delete; + DisplayKHR( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT - : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) - , m_display( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_display, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_physicalDevice( VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) + , m_display( VULKAN_HPP_NAMESPACE::exchange( rhs.m_display, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + DisplayKHR & operator=( DisplayKHR const & ) = delete; - DisplayKHR & operator =( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT + + DisplayKHR & operator=( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_physicalDevice = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ); - m_display = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_display, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_display, rhs.m_display ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -6682,6 +8555,11 @@ namespace VULKAN_HPP_NAMESPACE return m_display; } + operator VULKAN_HPP_NAMESPACE::DisplayKHR() const VULKAN_HPP_NOEXCEPT + { + return m_display; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_display ) @@ -6693,12 +8571,19 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::DisplayKHR release() + { + m_physicalDevice = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_display, nullptr ); + } + VULKAN_HPP_NAMESPACE::PhysicalDevice getPhysicalDevice() const { return m_physicalDevice; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -6715,14 +8600,18 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getModeProperties() const; - VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR - createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_KHR_get_display_properties2 === VULKAN_HPP_NODISCARD std::vector getModeProperties2() const; + template + VULKAN_HPP_NODISCARD std::vector getModeProperties2() const; + # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === @@ -6730,81 +8619,61 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VK_USE_PLATFORM_WIN32_KHR*/ private: - VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; - VULKAN_HPP_NAMESPACE::DisplayKHR m_display = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::DisplayKHR m_display = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class DisplayKHRs : public std::vector { public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) DisplayKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, uint32_t planeIndex ) { - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * dispatcher = physicalDevice.getDispatcher(); - std::vector displays; - uint32_t displayCount; - VULKAN_HPP_NAMESPACE::Result result; - do - { - result = static_cast( - dispatcher->vkGetDisplayPlaneSupportedDisplaysKHR( static_cast( *physicalDevice ), planeIndex, &displayCount, nullptr ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount ) - { - displays.resize( displayCount ); - result = static_cast( dispatcher->vkGetDisplayPlaneSupportedDisplaysKHR( - static_cast( *physicalDevice ), planeIndex, &displayCount, displays.data() ) ); - } - } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - VULKAN_HPP_ASSERT( displayCount <= displays.size() ); - this->reserve( displayCount ); - for ( auto const & displayKHR : displays ) - { - this->emplace_back( physicalDevice, displayKHR ); - } - } - else - { - throwResultException( result, "vkGetDisplayPlaneSupportedDisplaysKHR" ); - } + *this = physicalDevice.getDisplayPlaneSupportedDisplaysKHR( planeIndex ); } +# endif DisplayKHRs( std::nullptr_t ) {} - DisplayKHRs() = delete; - DisplayKHRs( DisplayKHRs const & ) = delete; - DisplayKHRs( DisplayKHRs && rhs ) = default; + DisplayKHRs() = delete; + DisplayKHRs( DisplayKHRs const & ) = delete; + DisplayKHRs( DisplayKHRs && rhs ) = default; DisplayKHRs & operator=( DisplayKHRs const & ) = delete; - DisplayKHRs & operator=( DisplayKHRs && rhs ) = default; + DisplayKHRs & operator=( DisplayKHRs && rhs ) = default; + + private: + DisplayKHRs( std::vector && rhs ) + { + std::swap( *this, rhs ); + } }; class DisplayModeKHR { public: - using CType = VkDisplayModeKHR; + using CType = VkDisplayModeKHR; + using CppType = VULKAN_HPP_NAMESPACE::DisplayModeKHR; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) DisplayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_physicalDevice( display.getPhysicalDevice() ), m_dispatcher( display.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( display.getDispatcher()->vkCreateDisplayModeKHR( - static_cast( display.getPhysicalDevice() ), - static_cast( *display ), - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &m_displayModeKHR ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateDisplayModeKHR" ); - } + *this = display.createMode( createInfo, allocator ); } +# endif DisplayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, VkDisplayModeKHR displayModeKHR ) : m_physicalDevice( display.getPhysicalDevice() ), m_displayModeKHR( displayModeKHR ), m_dispatcher( display.getDispatcher() ) @@ -6819,26 +8688,30 @@ namespace VULKAN_HPP_NAMESPACE } DisplayModeKHR() = delete; + DisplayModeKHR( DisplayModeKHR const & rhs ) : m_displayModeKHR( rhs.m_displayModeKHR ), m_dispatcher( rhs.m_dispatcher ) {} + DisplayModeKHR( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT - : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) - , m_displayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_physicalDevice( VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) + , m_displayModeKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + DisplayModeKHR & operator=( DisplayModeKHR const & rhs ) { m_displayModeKHR = rhs.m_displayModeKHR; m_dispatcher = rhs.m_dispatcher; return *this; } + DisplayModeKHR & operator=( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - m_physicalDevice = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ); - m_displayModeKHR = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_displayModeKHR, rhs.m_displayModeKHR ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -6848,6 +8721,11 @@ namespace VULKAN_HPP_NAMESPACE return m_displayModeKHR; } + operator VULKAN_HPP_NAMESPACE::DisplayModeKHR() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR; + } + void clear() VULKAN_HPP_NOEXCEPT { m_physicalDevice = nullptr; @@ -6855,7 +8733,14 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::DisplayModeKHR release() + { + m_physicalDevice = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_displayModeKHR, nullptr ); + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -6873,43 +8758,41 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR getDisplayPlaneCapabilities( uint32_t planeIndex ) const; private: - VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; - VULKAN_HPP_NAMESPACE::DisplayModeKHR m_displayModeKHR = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR m_displayModeKHR = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class Event { public: - using CType = VkEvent; + using CType = VkEvent; + using CppType = VULKAN_HPP_NAMESPACE::Event; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = - static_cast( device.getDispatcher()->vkCreateEvent( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_event ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateEvent" ); - } + *this = device.createEvent( createInfo, allocator ); } +# endif Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkEvent event, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_event( event ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -6925,23 +8808,25 @@ namespace VULKAN_HPP_NAMESPACE Event() = delete; Event( Event const & ) = delete; + Event( Event && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_event, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_event( VULKAN_HPP_NAMESPACE::exchange( rhs.m_event, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + Event & operator=( Event const & ) = delete; - Event & operator =( Event && rhs ) VULKAN_HPP_NOEXCEPT + + Event & operator=( Event && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_event = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_event, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_event, rhs.m_event ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -6951,6 +8836,11 @@ namespace VULKAN_HPP_NAMESPACE return m_event; } + operator VULKAN_HPP_NAMESPACE::Event() const VULKAN_HPP_NOEXCEPT + { + return m_event; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_event ) @@ -6964,12 +8854,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::Event release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_event, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -6992,82 +8890,61 @@ namespace VULKAN_HPP_NAMESPACE void reset() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Event m_event = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Event m_event = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class Fence { public: - using CType = VkFence; + using CType = VkFence; + using CppType = VULKAN_HPP_NAMESPACE::Fence; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFence; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = - static_cast( device.getDispatcher()->vkCreateFence( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_fence ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateFence" ); - } + *this = device.createFence( createInfo, allocator ); } +# endif +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkRegisterDeviceEventEXT( static_cast( *device ), - reinterpret_cast( &deviceEventInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_fence ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkRegisterDeviceEventEXT" ); - } + *this = device.registerEventEXT( deviceEventInfo, allocator ); } +# endif +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkRegisterDisplayEventEXT( static_cast( *device ), - static_cast( *display ), - reinterpret_cast( &displayEventInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_fence ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkRegisterDisplayEventEXT" ); - } + *this = device.registerDisplayEventEXT( display, displayEventInfo, allocator ); } +# endif Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkFence fence, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_fence( fence ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -7083,23 +8960,25 @@ namespace VULKAN_HPP_NAMESPACE Fence() = delete; Fence( Fence const & ) = delete; + Fence( Fence && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_fence, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_fence( VULKAN_HPP_NAMESPACE::exchange( rhs.m_fence, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + Fence & operator=( Fence const & ) = delete; - Fence & operator =( Fence && rhs ) VULKAN_HPP_NOEXCEPT + + Fence & operator=( Fence && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_fence = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_fence, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_fence, rhs.m_fence ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -7109,6 +8988,11 @@ namespace VULKAN_HPP_NAMESPACE return m_fence; } + operator VULKAN_HPP_NAMESPACE::Fence() const VULKAN_HPP_NOEXCEPT + { + return m_fence; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_fence ) @@ -7122,12 +9006,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::Fence release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_fence, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7146,44 +9038,42 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Fence m_fence = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Fence m_fence = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class Framebuffer { public: - using CType = VkFramebuffer; + using CType = VkFramebuffer; + using CppType = VULKAN_HPP_NAMESPACE::Framebuffer; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateFramebuffer( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_framebuffer ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateFramebuffer" ); - } + *this = device.createFramebuffer( createInfo, allocator ); } +# endif Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkFramebuffer framebuffer, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_framebuffer( framebuffer ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -7199,23 +9089,25 @@ namespace VULKAN_HPP_NAMESPACE Framebuffer() = delete; Framebuffer( Framebuffer const & ) = delete; + Framebuffer( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_framebuffer, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_framebuffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_framebuffer, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + Framebuffer & operator=( Framebuffer const & ) = delete; - Framebuffer & operator =( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT + + Framebuffer & operator=( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_framebuffer = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_framebuffer, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_framebuffer, rhs.m_framebuffer ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -7225,6 +9117,11 @@ namespace VULKAN_HPP_NAMESPACE return m_framebuffer; } + operator VULKAN_HPP_NAMESPACE::Framebuffer() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_framebuffer ) @@ -7238,12 +9135,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::Framebuffer release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_framebuffer, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7262,44 +9167,42 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getTilePropertiesQCOM() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Framebuffer m_framebuffer = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Framebuffer m_framebuffer = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; class Image { public: - using CType = VkImage; + using CType = VkImage; + using CppType = VULKAN_HPP_NAMESPACE::Image; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImage; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = - static_cast( device.getDispatcher()->vkCreateImage( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_image ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateImage" ); - } + *this = device.createImage( createInfo, allocator ); } +# endif Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkImage image, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_image( image ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -7315,23 +9218,25 @@ namespace VULKAN_HPP_NAMESPACE Image() = delete; Image( Image const & ) = delete; + Image( Image && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_image, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_image( VULKAN_HPP_NAMESPACE::exchange( rhs.m_image, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + Image & operator=( Image const & ) = delete; - Image & operator =( Image && rhs ) VULKAN_HPP_NOEXCEPT + + Image & operator=( Image && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_image = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_image, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_image, rhs.m_image ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -7341,6 +9246,11 @@ namespace VULKAN_HPP_NAMESPACE return m_image; } + operator VULKAN_HPP_NAMESPACE::Image() const VULKAN_HPP_NOEXCEPT + { + return m_image; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_image ) @@ -7354,12 +9264,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::Image release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_image, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7384,58 +9302,74 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT; + //=== VK_VERSION_1_4 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getSubresourceLayout2( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getSubresourceLayout2( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_image_drm_format_modifier === VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT getDrmFormatModifierPropertiesEXT() const; - //=== VK_EXT_image_compression_control === + //=== VK_EXT_host_image_copy === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance5 === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT - getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; template - VULKAN_HPP_NODISCARD StructureChain - getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Image m_image = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Image m_image = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; - class ImageView + template <> + struct isVulkanRAIIHandleType { - public: - using CType = VkImageView; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class ImageView + { + public: + using CType = VkImageView; + using CppType = VULKAN_HPP_NAMESPACE::ImageView; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView; public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = - static_cast( device.getDispatcher()->vkCreateImageView( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_imageView ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateImageView" ); - } + *this = device.createImageView( createInfo, allocator ); } +# endif ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkImageView imageView, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + : m_device( device ) , m_imageView( imageView ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) @@ -7451,23 +9385,25 @@ namespace VULKAN_HPP_NAMESPACE ImageView() = delete; ImageView( ImageView const & ) = delete; + ImageView( ImageView && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_imageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_imageView, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_imageView( VULKAN_HPP_NAMESPACE::exchange( rhs.m_imageView, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } + ImageView & operator=( ImageView const & ) = delete; - ImageView & operator =( ImageView && rhs ) VULKAN_HPP_NOEXCEPT + + ImageView & operator=( ImageView && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_imageView = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_imageView, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_imageView, rhs.m_imageView ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -7477,6 +9413,11 @@ namespace VULKAN_HPP_NAMESPACE return m_imageView; } + operator VULKAN_HPP_NAMESPACE::ImageView() const VULKAN_HPP_NOEXCEPT + { + return m_imageView; + } + void clear() VULKAN_HPP_NOEXCEPT { if ( m_imageView ) @@ -7490,12 +9431,20 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::ImageView release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_imageView, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7514,81 +9463,86 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX getAddressNVX() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::ImageView m_imageView = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ImageView m_imageView = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; - class IndirectCommandsLayoutNV + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class IndirectCommandsLayoutEXT { public: - using CType = VkIndirectCommandsLayoutNV; + using CType = VkIndirectCommandsLayoutEXT; + using CppType = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutEXT; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + IndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateIndirectCommandsLayoutNV( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_indirectCommandsLayout ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateIndirectCommandsLayoutNV" ); - } + *this = device.createIndirectCommandsLayoutEXT( createInfo, allocator ); } +# endif - IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VkIndirectCommandsLayoutNV indirectCommandsLayout, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + IndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkIndirectCommandsLayoutEXT indirectCommandsLayout, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) , m_indirectCommandsLayout( indirectCommandsLayout ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) { } - IndirectCommandsLayoutNV( std::nullptr_t ) {} + IndirectCommandsLayoutEXT( std::nullptr_t ) {} - ~IndirectCommandsLayoutNV() + ~IndirectCommandsLayoutEXT() { clear(); } - IndirectCommandsLayoutNV() = delete; - IndirectCommandsLayoutNV( IndirectCommandsLayoutNV const & ) = delete; - IndirectCommandsLayoutNV( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_indirectCommandsLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + IndirectCommandsLayoutEXT() = delete; + IndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT const & ) = delete; + + IndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_indirectCommandsLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } - IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV const & ) = delete; - IndirectCommandsLayoutNV & operator =( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT + + IndirectCommandsLayoutEXT & operator=( IndirectCommandsLayoutEXT const & ) = delete; + + IndirectCommandsLayoutEXT & operator=( IndirectCommandsLayoutEXT && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_indirectCommandsLayout = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_indirectCommandsLayout, rhs.m_indirectCommandsLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayout; + } + + operator VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT() const VULKAN_HPP_NOEXCEPT { return m_indirectCommandsLayout; } @@ -7597,9 +9551,9 @@ namespace VULKAN_HPP_NAMESPACE { if ( m_indirectCommandsLayout ) { - getDispatcher()->vkDestroyIndirectCommandsLayoutNV( static_cast( m_device ), - static_cast( m_indirectCommandsLayout ), - reinterpret_cast( m_allocator ) ); + getDispatcher()->vkDestroyIndirectCommandsLayoutEXT( static_cast( m_device ), + static_cast( m_indirectCommandsLayout ), + reinterpret_cast( m_allocator ) ); } m_device = nullptr; m_indirectCommandsLayout = nullptr; @@ -7607,18 +9561,26 @@ namespace VULKAN_HPP_NAMESPACE m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_indirectCommandsLayout, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT & rhs ) VULKAN_HPP_NOEXCEPT { std::swap( m_device, rhs.m_device ); std::swap( m_indirectCommandsLayout, rhs.m_indirectCommandsLayout ); @@ -7627,84 +9589,110 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV m_indirectCommandsLayout = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT m_indirectCommandsLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; - class PerformanceConfigurationINTEL + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class IndirectCommandsLayoutNV { public: - using CType = VkPerformanceConfigurationINTEL; + using CType = VkIndirectCommandsLayoutNV; + using CppType = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) - : m_device( *device ), m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkAcquirePerformanceConfigurationINTEL( static_cast( *device ), - reinterpret_cast( &acquireInfo ), - reinterpret_cast( &m_configuration ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkAcquirePerformanceConfigurationINTEL" ); - } + *this = device.createIndirectCommandsLayoutNV( createInfo, allocator ); } +# endif - PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkPerformanceConfigurationINTEL configuration ) - : m_device( *device ), m_configuration( configuration ), m_dispatcher( device.getDispatcher() ) + IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_indirectCommandsLayout( indirectCommandsLayout ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) { } - PerformanceConfigurationINTEL( std::nullptr_t ) {} + IndirectCommandsLayoutNV( std::nullptr_t ) {} - ~PerformanceConfigurationINTEL() + ~IndirectCommandsLayoutNV() { clear(); } - PerformanceConfigurationINTEL() = delete; - PerformanceConfigurationINTEL( PerformanceConfigurationINTEL const & ) = delete; - PerformanceConfigurationINTEL( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_configuration( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_configuration, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + IndirectCommandsLayoutNV() = delete; + IndirectCommandsLayoutNV( IndirectCommandsLayoutNV const & ) = delete; + + IndirectCommandsLayoutNV( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_indirectCommandsLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } - PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL const & ) = delete; - PerformanceConfigurationINTEL & operator =( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT + + IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV const & ) = delete; + + IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_configuration = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_configuration, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_indirectCommandsLayout, rhs.m_indirectCommandsLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const & operator*() const VULKAN_HPP_NOEXCEPT { - return m_configuration; + return m_indirectCommandsLayout; + } + + operator VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayout; } void clear() VULKAN_HPP_NOEXCEPT { - if ( m_configuration ) + if ( m_indirectCommandsLayout ) { - getDispatcher()->vkReleasePerformanceConfigurationINTEL( static_cast( m_device ), - static_cast( m_configuration ) ); + getDispatcher()->vkDestroyIndirectCommandsLayoutNV( static_cast( m_device ), + static_cast( m_indirectCommandsLayout ), + reinterpret_cast( m_allocator ) ); } - m_device = nullptr; - m_configuration = nullptr; - m_dispatcher = nullptr; + m_device = nullptr; + m_indirectCommandsLayout = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_indirectCommandsLayout, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -7712,110 +9700,125 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV & rhs ) VULKAN_HPP_NOEXCEPT { std::swap( m_device, rhs.m_device ); - std::swap( m_configuration, rhs.m_configuration ); + std::swap( m_indirectCommandsLayout, rhs.m_indirectCommandsLayout ); + std::swap( m_allocator, rhs.m_allocator ); std::swap( m_dispatcher, rhs.m_dispatcher ); } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL m_configuration = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV m_indirectCommandsLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; - class PipelineCache + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class IndirectExecutionSetEXT { public: - using CType = VkPipelineCache; + using CType = VkIndirectExecutionSetEXT; + using CppType = VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectExecutionSetEXT; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + IndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreatePipelineCache( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_pipelineCache ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreatePipelineCache" ); - } + *this = device.createIndirectExecutionSetEXT( createInfo, allocator ); } +# endif - PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VkPipelineCache pipelineCache, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_pipelineCache( pipelineCache ) + IndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkIndirectExecutionSetEXT indirectExecutionSet, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_indirectExecutionSet( indirectExecutionSet ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) { } - PipelineCache( std::nullptr_t ) {} + IndirectExecutionSetEXT( std::nullptr_t ) {} - ~PipelineCache() + ~IndirectExecutionSetEXT() { clear(); } - PipelineCache() = delete; - PipelineCache( PipelineCache const & ) = delete; - PipelineCache( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_pipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineCache, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + IndirectExecutionSetEXT() = delete; + IndirectExecutionSetEXT( IndirectExecutionSetEXT const & ) = delete; + + IndirectExecutionSetEXT( IndirectExecutionSetEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_indirectExecutionSet( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectExecutionSet, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } - PipelineCache & operator=( PipelineCache const & ) = delete; - PipelineCache & operator =( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT + + IndirectExecutionSetEXT & operator=( IndirectExecutionSetEXT const & ) = delete; + + IndirectExecutionSetEXT & operator=( IndirectExecutionSetEXT && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_pipelineCache = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineCache, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_indirectExecutionSet, rhs.m_indirectExecutionSet ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::PipelineCache const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT const & operator*() const VULKAN_HPP_NOEXCEPT { - return m_pipelineCache; + return m_indirectExecutionSet; + } + + operator VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT() const VULKAN_HPP_NOEXCEPT + { + return m_indirectExecutionSet; } void clear() VULKAN_HPP_NOEXCEPT { - if ( m_pipelineCache ) + if ( m_indirectExecutionSet ) { - getDispatcher()->vkDestroyPipelineCache( static_cast( m_device ), - static_cast( m_pipelineCache ), - reinterpret_cast( m_allocator ) ); + getDispatcher()->vkDestroyIndirectExecutionSetEXT( static_cast( m_device ), + static_cast( m_indirectExecutionSet ), + reinterpret_cast( m_allocator ) ); } - m_device = nullptr; - m_pipelineCache = nullptr; - m_allocator = nullptr; - m_dispatcher = nullptr; + m_device = nullptr; + m_indirectExecutionSet = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_indirectExecutionSet, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -7823,201 +9826,132 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectExecutionSetEXT & rhs ) VULKAN_HPP_NOEXCEPT { std::swap( m_device, rhs.m_device ); - std::swap( m_pipelineCache, rhs.m_pipelineCache ); + std::swap( m_indirectExecutionSet, rhs.m_indirectExecutionSet ); std::swap( m_allocator, rhs.m_allocator ); std::swap( m_dispatcher, rhs.m_dispatcher ); } - //=== VK_VERSION_1_0 === + //=== VK_EXT_device_generated_commands === - VULKAN_HPP_NODISCARD std::vector getData() const; + void updatePipeline( VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites ) const + VULKAN_HPP_NOEXCEPT; - void merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const; + void updateShader( VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites ) const + VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::PipelineCache m_pipelineCache = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT m_indirectExecutionSet = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; - class Pipeline + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class MicromapEXT { public: - using CType = VkPipeline; + using CType = VkMicromapEXT; + using CppType = VULKAN_HPP_NAMESPACE::MicromapEXT; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - m_constructorSuccessCode = static_cast( - getDispatcher()->vkCreateComputePipelines( static_cast( *device ), - pipelineCache ? static_cast( **pipelineCache ) : 0, - 1, - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_pipeline ) ) ); - if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && - ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) - { - throwResultException( m_constructorSuccessCode, "vkCreateComputePipelines" ); - } + *this = device.createMicromapEXT( createInfo, allocator ); } +# endif - Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) + MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkMicromapEXT micromap, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_micromap( micromap ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) { - m_constructorSuccessCode = static_cast( - getDispatcher()->vkCreateGraphicsPipelines( static_cast( *device ), - pipelineCache ? static_cast( **pipelineCache ) : 0, - 1, - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_pipeline ) ) ); - if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && - ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) - { - throwResultException( m_constructorSuccessCode, "vkCreateGraphicsPipelines" ); - } } - Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) + MicromapEXT( std::nullptr_t ) {} + + ~MicromapEXT() { - m_constructorSuccessCode = static_cast( - getDispatcher()->vkCreateRayTracingPipelinesKHR( static_cast( *device ), - deferredOperation ? static_cast( **deferredOperation ) : 0, - pipelineCache ? static_cast( **pipelineCache ) : 0, - 1, - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_pipeline ) ) ); - if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && - ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) && - ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) && - ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) - { - throwResultException( m_constructorSuccessCode, "vkCreateRayTracingPipelinesKHR" ); - } + clear(); } - Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) + MicromapEXT() = delete; + MicromapEXT( MicromapEXT const & ) = delete; + + MicromapEXT( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_micromap( VULKAN_HPP_NAMESPACE::exchange( rhs.m_micromap, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + MicromapEXT & operator=( MicromapEXT const & ) = delete; + + MicromapEXT & operator=( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT { - m_constructorSuccessCode = static_cast( - getDispatcher()->vkCreateRayTracingPipelinesNV( static_cast( *device ), - pipelineCache ? static_cast( **pipelineCache ) : 0, - 1, - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_pipeline ) ) ); - if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && - ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + if ( this != &rhs ) { - throwResultException( m_constructorSuccessCode, "vkCreateRayTracingPipelinesNV" ); + std::swap( m_device, rhs.m_device ); + std::swap( m_micromap, rhs.m_micromap ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } + return *this; } - Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VkPipeline pipeline, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr, - VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess ) - : m_device( *device ) - , m_pipeline( pipeline ) - , m_allocator( static_cast( allocator ) ) - , m_constructorSuccessCode( successCode ) - , m_dispatcher( device.getDispatcher() ) + VULKAN_HPP_NAMESPACE::MicromapEXT const & operator*() const VULKAN_HPP_NOEXCEPT { + return m_micromap; } - Pipeline( std::nullptr_t ) {} - - ~Pipeline() + operator VULKAN_HPP_NAMESPACE::MicromapEXT() const VULKAN_HPP_NOEXCEPT { - clear(); - } - - Pipeline() = delete; - Pipeline( Pipeline const & ) = delete; - Pipeline( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipeline, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) - { - } - Pipeline & operator=( Pipeline const & ) = delete; - Pipeline & operator =( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT - { - if ( this != &rhs ) - { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_pipeline = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipeline, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); - } - return *this; - } - - VULKAN_HPP_NAMESPACE::Pipeline const & operator*() const VULKAN_HPP_NOEXCEPT - { - return m_pipeline; + return m_micromap; } void clear() VULKAN_HPP_NOEXCEPT { - if ( m_pipeline ) + if ( m_micromap ) { - getDispatcher()->vkDestroyPipeline( - static_cast( m_device ), static_cast( m_pipeline ), reinterpret_cast( m_allocator ) ); + getDispatcher()->vkDestroyMicromapEXT( + static_cast( m_device ), static_cast( m_micromap ), reinterpret_cast( m_allocator ) ); } - m_device = nullptr; - m_pipeline = nullptr; - m_allocator = nullptr; - m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; - m_dispatcher = nullptr; + m_device = nullptr; + m_micromap = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; } - VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const + VULKAN_HPP_NAMESPACE::MicromapEXT release() { - return m_constructorSuccessCode; + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_micromap, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -8025,274 +9959,125 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::MicromapEXT & rhs ) VULKAN_HPP_NOEXCEPT { std::swap( m_device, rhs.m_device ); - std::swap( m_pipeline, rhs.m_pipeline ); + std::swap( m_micromap, rhs.m_micromap ); std::swap( m_allocator, rhs.m_allocator ); - std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); std::swap( m_dispatcher, rhs.m_dispatcher ); } - //=== VK_AMD_shader_info === - - VULKAN_HPP_NODISCARD std::vector getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, - VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const; - - //=== VK_NV_ray_tracing === - - template - VULKAN_HPP_NODISCARD std::vector getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const; - - template - VULKAN_HPP_NODISCARD DataType getRayTracingShaderGroupHandleNV( uint32_t firstGroup, uint32_t groupCount ) const; - - void compileDeferredNV( uint32_t shader ) const; - - //=== VK_KHR_ray_tracing_pipeline === - - template - VULKAN_HPP_NODISCARD std::vector getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const; - - template - VULKAN_HPP_NODISCARD DataType getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const; - - template - VULKAN_HPP_NODISCARD std::vector - getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const; - - template - VULKAN_HPP_NODISCARD DataType getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const; - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize - getRayTracingShaderGroupStackSizeKHR( uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT; - private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Pipeline m_pipeline = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT m_micromap = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; - class Pipelines : public std::vector + template <> + struct isVulkanRAIIHandleType { - public: - Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - { - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); - std::vector pipelines( createInfos.size() ); - VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkCreateComputePipelines( - static_cast( *device ), - pipelineCache ? static_cast( **pipelineCache ) : 0, - createInfos.size(), - reinterpret_cast( createInfos.data() ), - reinterpret_cast( static_cast( allocator ) ), - pipelines.data() ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) - { - this->reserve( createInfos.size() ); - for ( auto const & pipeline : pipelines ) - { - this->emplace_back( device, pipeline, allocator, result ); - } - } - else - { - throwResultException( result, "vkCreateComputePipelines" ); - } - } - - Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - { - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); - std::vector pipelines( createInfos.size() ); - VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkCreateGraphicsPipelines( - static_cast( *device ), - pipelineCache ? static_cast( **pipelineCache ) : 0, - createInfos.size(), - reinterpret_cast( createInfos.data() ), - reinterpret_cast( static_cast( allocator ) ), - pipelines.data() ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) - { - this->reserve( createInfos.size() ); - for ( auto const & pipeline : pipelines ) - { - this->emplace_back( device, pipeline, allocator, result ); - } - } - else - { - throwResultException( result, "vkCreateGraphicsPipelines" ); - } - } - - Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - { - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); - std::vector pipelines( createInfos.size() ); - VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkCreateRayTracingPipelinesKHR( - static_cast( *device ), - deferredOperation ? static_cast( **deferredOperation ) : 0, - pipelineCache ? static_cast( **pipelineCache ) : 0, - createInfos.size(), - reinterpret_cast( createInfos.data() ), - reinterpret_cast( static_cast( allocator ) ), - pipelines.data() ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) || - ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) - { - this->reserve( createInfos.size() ); - for ( auto const & pipeline : pipelines ) - { - this->emplace_back( device, pipeline, allocator, result ); - } - } - else - { - throwResultException( result, "vkCreateRayTracingPipelinesKHR" ); - } - } - - Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - { - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); - std::vector pipelines( createInfos.size() ); - VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkCreateRayTracingPipelinesNV( - static_cast( *device ), - pipelineCache ? static_cast( **pipelineCache ) : 0, - createInfos.size(), - reinterpret_cast( createInfos.data() ), - reinterpret_cast( static_cast( allocator ) ), - pipelines.data() ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) - { - this->reserve( createInfos.size() ); - for ( auto const & pipeline : pipelines ) - { - this->emplace_back( device, pipeline, allocator, result ); - } - } - else - { - throwResultException( result, "vkCreateRayTracingPipelinesNV" ); - } - } - - Pipelines( std::nullptr_t ) {} - - Pipelines() = delete; - Pipelines( Pipelines const & ) = delete; - Pipelines( Pipelines && rhs ) = default; - Pipelines & operator=( Pipelines const & ) = delete; - Pipelines & operator=( Pipelines && rhs ) = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - class PipelineLayout + class OpticalFlowSessionNV { public: - using CType = VkPipelineLayout; + using CType = VkOpticalFlowSessionNV; + using CppType = VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreatePipelineLayout( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_pipelineLayout ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreatePipelineLayout" ); - } + *this = device.createOpticalFlowSessionNV( createInfo, allocator ); } +# endif - PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VkPipelineLayout pipelineLayout, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_pipelineLayout( pipelineLayout ) + OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkOpticalFlowSessionNV session, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_session( session ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) { } - PipelineLayout( std::nullptr_t ) {} + OpticalFlowSessionNV( std::nullptr_t ) {} - ~PipelineLayout() + ~OpticalFlowSessionNV() { clear(); } - PipelineLayout() = delete; - PipelineLayout( PipelineLayout const & ) = delete; - PipelineLayout( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_pipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + OpticalFlowSessionNV() = delete; + OpticalFlowSessionNV( OpticalFlowSessionNV const & ) = delete; + + OpticalFlowSessionNV( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_session( VULKAN_HPP_NAMESPACE::exchange( rhs.m_session, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } - PipelineLayout & operator=( PipelineLayout const & ) = delete; - PipelineLayout & operator =( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT + + OpticalFlowSessionNV & operator=( OpticalFlowSessionNV const & ) = delete; + + OpticalFlowSessionNV & operator=( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_pipelineLayout = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_session, rhs.m_session ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::PipelineLayout const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV const & operator*() const VULKAN_HPP_NOEXCEPT { - return m_pipelineLayout; + return m_session; + } + + operator VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV() const VULKAN_HPP_NOEXCEPT + { + return m_session; } void clear() VULKAN_HPP_NOEXCEPT { - if ( m_pipelineLayout ) + if ( m_session ) { - getDispatcher()->vkDestroyPipelineLayout( static_cast( m_device ), - static_cast( m_pipelineLayout ), - reinterpret_cast( m_allocator ) ); + getDispatcher()->vkDestroyOpticalFlowSessionNV( static_cast( m_device ), + static_cast( m_session ), + reinterpret_cast( m_allocator ) ); } - m_device = nullptr; - m_pipelineLayout = nullptr; - m_allocator = nullptr; - m_dispatcher = nullptr; + m_device = nullptr; + m_session = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_session, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -8300,112 +10085,120 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineLayout & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV & rhs ) VULKAN_HPP_NOEXCEPT { std::swap( m_device, rhs.m_device ); - std::swap( m_pipelineLayout, rhs.m_pipelineLayout ); + std::swap( m_session, rhs.m_session ); std::swap( m_allocator, rhs.m_allocator ); std::swap( m_dispatcher, rhs.m_dispatcher ); } + //=== VK_NV_optical_flow === + + void bindImage( VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout ) const; + private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout m_pipelineLayout = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV m_session = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; - class PrivateDataSlot + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class PerformanceConfigurationINTEL { public: - using CType = VkPrivateDataSlot; + using CType = VkPerformanceConfigurationINTEL; + using CppType = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreatePrivateDataSlot( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_privateDataSlot ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreatePrivateDataSlot" ); - } + *this = device.acquirePerformanceConfigurationINTEL( acquireInfo ); } +# endif - PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VkPrivateDataSlot privateDataSlot, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_privateDataSlot( privateDataSlot ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) + PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkPerformanceConfigurationINTEL configuration ) + : m_device( device ), m_configuration( configuration ), m_dispatcher( device.getDispatcher() ) { } - PrivateDataSlot( std::nullptr_t ) {} + PerformanceConfigurationINTEL( std::nullptr_t ) {} - ~PrivateDataSlot() + ~PerformanceConfigurationINTEL() { clear(); } - PrivateDataSlot() = delete; - PrivateDataSlot( PrivateDataSlot const & ) = delete; - PrivateDataSlot( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_privateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + PerformanceConfigurationINTEL() = delete; + PerformanceConfigurationINTEL( PerformanceConfigurationINTEL const & ) = delete; + + PerformanceConfigurationINTEL( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_configuration( VULKAN_HPP_NAMESPACE::exchange( rhs.m_configuration, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } - PrivateDataSlot & operator=( PrivateDataSlot const & ) = delete; - PrivateDataSlot & operator =( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT + + PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL const & ) = delete; + + PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_privateDataSlot = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_configuration, rhs.m_configuration ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::PrivateDataSlot const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const & operator*() const VULKAN_HPP_NOEXCEPT { - return m_privateDataSlot; + return m_configuration; + } + + operator VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT + { + return m_configuration; } void clear() VULKAN_HPP_NOEXCEPT { - if ( m_privateDataSlot ) + if ( m_configuration ) { - getDispatcher()->vkDestroyPrivateDataSlot( static_cast( m_device ), - static_cast( m_privateDataSlot ), - reinterpret_cast( m_allocator ) ); + getDispatcher()->vkReleasePerformanceConfigurationINTEL( static_cast( m_device ), + static_cast( m_configuration ) ); } - m_device = nullptr; - m_privateDataSlot = nullptr; - m_allocator = nullptr; - m_dispatcher = nullptr; + m_device = nullptr; + m_configuration = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL release() + { + m_device = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_configuration, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -8413,111 +10206,123 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL & rhs ) VULKAN_HPP_NOEXCEPT { std::swap( m_device, rhs.m_device ); - std::swap( m_privateDataSlot, rhs.m_privateDataSlot ); - std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_configuration, rhs.m_configuration ); std::swap( m_dispatcher, rhs.m_dispatcher ); } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::PrivateDataSlot m_privateDataSlot = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL m_configuration = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; - class QueryPool + template <> + struct isVulkanRAIIHandleType { - public: - using CType = VkQueryPool; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool; + class PipelineCache + { + public: + using CType = VkPipelineCache; + using CppType = VULKAN_HPP_NAMESPACE::PipelineCache; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache; public: - QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - VULKAN_HPP_NAMESPACE::Result result = - static_cast( device.getDispatcher()->vkCreateQueryPool( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_queryPool ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateQueryPool" ); - } + *this = device.createPipelineCache( createInfo, allocator ); } +# endif - QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VkQueryPool queryPool, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_queryPool( queryPool ) + PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_pipelineCache( pipelineCache ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) { } - QueryPool( std::nullptr_t ) {} + PipelineCache( std::nullptr_t ) {} - ~QueryPool() + ~PipelineCache() { clear(); } - QueryPool() = delete; - QueryPool( QueryPool const & ) = delete; - QueryPool( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_queryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queryPool, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + PipelineCache() = delete; + PipelineCache( PipelineCache const & ) = delete; + + PipelineCache( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipelineCache( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineCache, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } - QueryPool & operator=( QueryPool const & ) = delete; - QueryPool & operator =( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT + + PipelineCache & operator=( PipelineCache const & ) = delete; + + PipelineCache & operator=( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_queryPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queryPool, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_pipelineCache, rhs.m_pipelineCache ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::QueryPool const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::PipelineCache const & operator*() const VULKAN_HPP_NOEXCEPT { - return m_queryPool; + return m_pipelineCache; + } + + operator VULKAN_HPP_NAMESPACE::PipelineCache() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache; } void clear() VULKAN_HPP_NOEXCEPT { - if ( m_queryPool ) + if ( m_pipelineCache ) { - getDispatcher()->vkDestroyQueryPool( - static_cast( m_device ), static_cast( m_queryPool ), reinterpret_cast( m_allocator ) ); + getDispatcher()->vkDestroyPipelineCache( static_cast( m_device ), + static_cast( m_pipelineCache ), + reinterpret_cast( m_allocator ) ); } + m_device = nullptr; + m_pipelineCache = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::PipelineCache release() + { m_device = nullptr; - m_queryPool = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_pipelineCache, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -8525,281 +10330,427 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::QueryPool & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache & rhs ) VULKAN_HPP_NOEXCEPT { std::swap( m_device, rhs.m_device ); - std::swap( m_queryPool, rhs.m_queryPool ); + std::swap( m_pipelineCache, rhs.m_pipelineCache ); std::swap( m_allocator, rhs.m_allocator ); std::swap( m_dispatcher, rhs.m_dispatcher ); } //=== VK_VERSION_1_0 === - template - VULKAN_HPP_NODISCARD std::pair> - getResults( uint32_t firstQuery, - uint32_t queryCount, - size_t dataSize, - VULKAN_HPP_NAMESPACE::DeviceSize stride, - VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; - - template - VULKAN_HPP_NODISCARD std::pair - getResult( uint32_t firstQuery, - uint32_t queryCount, - VULKAN_HPP_NAMESPACE::DeviceSize stride, - VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; - - //=== VK_VERSION_1_2 === - - void reset( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT; - - //=== VK_EXT_host_query_reset === + VULKAN_HPP_NODISCARD std::vector getData() const; - void resetEXT( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT; + void merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::QueryPool m_queryPool = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PipelineCache m_pipelineCache = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; - class Queue + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Pipeline { public: - using CType = VkQueue; + using CType = VkPipeline; + using CppType = VULKAN_HPP_NAMESPACE::Pipeline; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline; public: - Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, uint32_t queueFamilyIndex, uint32_t queueIndex ) - : m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - getDispatcher()->vkGetDeviceQueue( static_cast( *device ), queueFamilyIndex, queueIndex, reinterpret_cast( &m_queue ) ); + *this = device.createComputePipeline( pipelineCache, createInfo, allocator ); } +# endif - Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) - : m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - getDispatcher()->vkGetDeviceQueue2( - static_cast( *device ), reinterpret_cast( &queueInfo ), reinterpret_cast( &m_queue ) ); + *this = device.createExecutionGraphPipelineAMDX( pipelineCache, createInfo, allocator ); } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif - Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkQueue queue ) : m_queue( queue ), m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { + *this = device.createGraphicsPipeline( pipelineCache, createInfo, allocator ); } +# endif - Queue( std::nullptr_t ) {} +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createRayTracingPipelineKHR( deferredOperation, pipelineCache, createInfo, allocator ); + } +# endif - ~Queue() +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - clear(); + *this = device.createRayTracingPipelineNV( pipelineCache, createInfo, allocator ); } +# endif - Queue() = delete; - Queue( Queue const & rhs ) : m_queue( rhs.m_queue ), m_dispatcher( rhs.m_dispatcher ) {} - Queue( Queue && rhs ) VULKAN_HPP_NOEXCEPT - : m_queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queue, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPipeline pipeline, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr, + VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess ) + : m_device( device ) + , m_pipeline( pipeline ) + , m_allocator( static_cast( allocator ) ) + , m_constructorSuccessCode( successCode ) + , m_dispatcher( device.getDispatcher() ) { } - Queue & operator=( Queue const & rhs ) + + Pipeline( std::nullptr_t ) {} + + ~Pipeline() { - m_queue = rhs.m_queue; - m_dispatcher = rhs.m_dispatcher; - return *this; + clear(); } - Queue & operator=( Queue && rhs ) VULKAN_HPP_NOEXCEPT + + Pipeline() = delete; + Pipeline( Pipeline const & ) = delete; + + Pipeline( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipeline( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipeline, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + Pipeline & operator=( Pipeline const & ) = delete; + + Pipeline & operator=( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - m_queue = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queue, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_pipeline, rhs.m_pipeline ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::Queue const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::Pipeline const & operator*() const VULKAN_HPP_NOEXCEPT { - return m_queue; + return m_pipeline; + } + + operator VULKAN_HPP_NAMESPACE::Pipeline() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline; } void clear() VULKAN_HPP_NOEXCEPT { - m_queue = nullptr; - m_dispatcher = nullptr; + if ( m_pipeline ) + { + getDispatcher()->vkDestroyPipeline( + static_cast( m_device ), static_cast( m_pipeline ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_pipeline = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Pipeline release() + { + m_device = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_pipeline, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const + { + return m_constructorSuccessCode; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline & rhs ) VULKAN_HPP_NOEXCEPT { - std::swap( m_queue, rhs.m_queue ); + std::swap( m_device, rhs.m_device ); + std::swap( m_pipeline, rhs.m_pipeline ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); std::swap( m_dispatcher, rhs.m_dispatcher ); } - //=== VK_VERSION_1_0 === - - void submit( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, - VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + //=== VK_AMD_shader_info === - void waitIdle() const; + VULKAN_HPP_NODISCARD std::vector getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const; - void bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfo, - VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === - //=== VK_VERSION_1_3 === + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX getExecutionGraphScratchSizeAMDX() const; - void submit2( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, - VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + VULKAN_HPP_NODISCARD uint32_t getExecutionGraphNodeIndexAMDX( const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo ) const; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_KHR_swapchain === + //=== VK_KHR_ray_tracing_pipeline === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const; + template + VULKAN_HPP_NODISCARD std::vector getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const; - //=== VK_EXT_debug_utils === + template + VULKAN_HPP_NODISCARD DataType getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const; - void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD std::vector + getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const; - void endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD DataType getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const; - void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize + getRayTracingShaderGroupStackSizeKHR( uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT; - //=== VK_NV_device_diagnostic_checkpoints === + //=== VK_NV_ray_tracing === - VULKAN_HPP_NODISCARD std::vector getCheckpointDataNV() const; + template + VULKAN_HPP_NODISCARD std::vector getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const; - //=== VK_INTEL_performance_query === + template + VULKAN_HPP_NODISCARD DataType getRayTracingShaderGroupHandleNV( uint32_t firstGroup, uint32_t groupCount ) const; - void setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const; + void compileDeferredNV( uint32_t shader ) const; - //=== VK_KHR_synchronization2 === + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Pipeline m_pipeline = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; - void submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, - VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; - - VULKAN_HPP_NODISCARD std::vector getCheckpointData2NV() const; - - private: - VULKAN_HPP_NAMESPACE::Queue m_queue = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - class RenderPass + class Pipelines : public std::vector { public: - using CType = VkRenderPass; +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createComputePipelines( pipelineCache, createInfos, allocator ); + } +# endif - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass; +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createExecutionGraphPipelinesAMDX( pipelineCache, createInfos, allocator ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ +# endif - public: - RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateRenderPass( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_renderPass ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateRenderPass" ); - } + *this = device.createGraphicsPipelines( pipelineCache, createInfos, allocator ); } +# endif - RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateRenderPass2( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_renderPass ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateRenderPass2" ); - } + *this = device.createRayTracingPipelinesKHR( deferredOperation, pipelineCache, createInfos, allocator ); } +# endif - RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VkRenderPass renderPass, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_renderPass( renderPass ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createRayTracingPipelinesNV( pipelineCache, createInfos, allocator ); + } +# endif + + Pipelines( std::nullptr_t ) {} + + Pipelines() = delete; + Pipelines( Pipelines const & ) = delete; + Pipelines( Pipelines && rhs ) = default; + Pipelines & operator=( Pipelines const & ) = delete; + Pipelines & operator=( Pipelines && rhs ) = default; + + private: + Pipelines( std::vector && rhs ) + { + std::swap( *this, rhs ); + } + }; + + class PipelineBinaryKHR + { + public: + using CType = VkPipelineBinaryKHR; + using CppType = VULKAN_HPP_NAMESPACE::PipelineBinaryKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineBinaryKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + PipelineBinaryKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPipelineBinaryKHR pipelineBinary, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr, + VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess ) + : m_device( device ) + , m_pipelineBinary( pipelineBinary ) , m_allocator( static_cast( allocator ) ) + , m_constructorSuccessCode( successCode ) , m_dispatcher( device.getDispatcher() ) { } - RenderPass( std::nullptr_t ) {} + PipelineBinaryKHR( std::nullptr_t ) {} - ~RenderPass() + ~PipelineBinaryKHR() { clear(); } - RenderPass() = delete; - RenderPass( RenderPass const & ) = delete; - RenderPass( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_renderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_renderPass, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + PipelineBinaryKHR() = delete; + PipelineBinaryKHR( PipelineBinaryKHR const & ) = delete; + + PipelineBinaryKHR( PipelineBinaryKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipelineBinary( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineBinary, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } - RenderPass & operator=( RenderPass const & ) = delete; - RenderPass & operator =( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT + + PipelineBinaryKHR & operator=( PipelineBinaryKHR const & ) = delete; + + PipelineBinaryKHR & operator=( PipelineBinaryKHR && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_renderPass = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_renderPass, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_pipelineBinary, rhs.m_pipelineBinary ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::RenderPass const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR const & operator*() const VULKAN_HPP_NOEXCEPT { - return m_renderPass; + return m_pipelineBinary; + } + + operator VULKAN_HPP_NAMESPACE::PipelineBinaryKHR() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineBinary; } void clear() VULKAN_HPP_NOEXCEPT { - if ( m_renderPass ) + if ( m_pipelineBinary ) { - getDispatcher()->vkDestroyRenderPass( - static_cast( m_device ), static_cast( m_renderPass ), reinterpret_cast( m_allocator ) ); + getDispatcher()->vkDestroyPipelineBinaryKHR( static_cast( m_device ), + static_cast( m_pipelineBinary ), + reinterpret_cast( m_allocator ) ); } - m_device = nullptr; - m_renderPass = nullptr; - m_allocator = nullptr; - m_dispatcher = nullptr; + m_device = nullptr; + m_pipelineBinary = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_pipelineBinary, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const + { + return m_constructorSuccessCode; } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -8807,119 +10758,154 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineBinaryKHR & rhs ) VULKAN_HPP_NOEXCEPT { std::swap( m_device, rhs.m_device ); - std::swap( m_renderPass, rhs.m_renderPass ); + std::swap( m_pipelineBinary, rhs.m_pipelineBinary ); std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); std::swap( m_dispatcher, rhs.m_dispatcher ); } - //=== VK_VERSION_1_0 === + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR m_pipelineBinary = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity() const VULKAN_HPP_NOEXCEPT; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; - //=== VK_HUAWEI_subpass_shading === + class PipelineBinaryKHRs : public std::vector + { + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + PipelineBinaryKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createPipelineBinariesKHR( createInfo, allocator ); + } +# endif + + PipelineBinaryKHRs( std::nullptr_t ) {} - VULKAN_HPP_NODISCARD std::pair getSubpassShadingMaxWorkgroupSizeHUAWEI() const; + PipelineBinaryKHRs() = delete; + PipelineBinaryKHRs( PipelineBinaryKHRs const & ) = delete; + PipelineBinaryKHRs( PipelineBinaryKHRs && rhs ) = default; + PipelineBinaryKHRs & operator=( PipelineBinaryKHRs const & ) = delete; + PipelineBinaryKHRs & operator=( PipelineBinaryKHRs && rhs ) = default; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::RenderPass m_renderPass = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + PipelineBinaryKHRs( std::vector && rhs ) + { + std::swap( *this, rhs ); + } }; - class Sampler + class PipelineLayout { public: - using CType = VkSampler; + using CType = VkPipelineLayout; + using CppType = VULKAN_HPP_NAMESPACE::PipelineLayout; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout; public: - Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - VULKAN_HPP_NAMESPACE::Result result = - static_cast( device.getDispatcher()->vkCreateSampler( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_sampler ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateSampler" ); - } + *this = device.createPipelineLayout( createInfo, allocator ); } +# endif - Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VkSampler sampler, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_sampler( sampler ) + PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPipelineLayout pipelineLayout, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_pipelineLayout( pipelineLayout ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) { } - Sampler( std::nullptr_t ) {} + PipelineLayout( std::nullptr_t ) {} - ~Sampler() + ~PipelineLayout() { clear(); } - Sampler() = delete; - Sampler( Sampler const & ) = delete; - Sampler( Sampler && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_sampler, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + PipelineLayout() = delete; + PipelineLayout( PipelineLayout const & ) = delete; + + PipelineLayout( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipelineLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } - Sampler & operator=( Sampler const & ) = delete; - Sampler & operator =( Sampler && rhs ) VULKAN_HPP_NOEXCEPT + + PipelineLayout & operator=( PipelineLayout const & ) = delete; + + PipelineLayout & operator=( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_sampler = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_sampler, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_pipelineLayout, rhs.m_pipelineLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::Sampler const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::PipelineLayout const & operator*() const VULKAN_HPP_NOEXCEPT { - return m_sampler; + return m_pipelineLayout; + } + + operator VULKAN_HPP_NAMESPACE::PipelineLayout() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout; } void clear() VULKAN_HPP_NOEXCEPT { - if ( m_sampler ) + if ( m_pipelineLayout ) { - getDispatcher()->vkDestroySampler( - static_cast( m_device ), static_cast( m_sampler ), reinterpret_cast( m_allocator ) ); + getDispatcher()->vkDestroyPipelineLayout( static_cast( m_device ), + static_cast( m_pipelineLayout ), + reinterpret_cast( m_allocator ) ); } + m_device = nullptr; + m_pipelineLayout = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::PipelineLayout release() + { m_device = nullptr; - m_sampler = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_pipelineLayout, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -8927,224 +10913,250 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Sampler & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineLayout & rhs ) VULKAN_HPP_NOEXCEPT { std::swap( m_device, rhs.m_device ); - std::swap( m_sampler, rhs.m_sampler ); + std::swap( m_pipelineLayout, rhs.m_pipelineLayout ); std::swap( m_allocator, rhs.m_allocator ); std::swap( m_dispatcher, rhs.m_dispatcher ); } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Sampler m_sampler = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout m_pipelineLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; - class SamplerYcbcrConversion + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class PrivateDataSlot { public: - using CType = VkSamplerYcbcrConversion; + using CType = VkPrivateDataSlot; + using CppType = VULKAN_HPP_NAMESPACE::PrivateDataSlot; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateSamplerYcbcrConversion( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_ycbcrConversion ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateSamplerYcbcrConversion" ); - } + *this = device.createPrivateDataSlot( createInfo, allocator ); } +# endif - SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VkSamplerYcbcrConversion ycbcrConversion, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_ycbcrConversion( ycbcrConversion ) + PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPrivateDataSlot privateDataSlot, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_privateDataSlot( privateDataSlot ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) { } - SamplerYcbcrConversion( std::nullptr_t ) {} + PrivateDataSlot( std::nullptr_t ) {} - ~SamplerYcbcrConversion() + ~PrivateDataSlot() { clear(); } - SamplerYcbcrConversion() = delete; - SamplerYcbcrConversion( SamplerYcbcrConversion const & ) = delete; - SamplerYcbcrConversion( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_ycbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_ycbcrConversion, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + PrivateDataSlot() = delete; + PrivateDataSlot( PrivateDataSlot const & ) = delete; + + PrivateDataSlot( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_privateDataSlot( VULKAN_HPP_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } - SamplerYcbcrConversion & operator=( SamplerYcbcrConversion const & ) = delete; - SamplerYcbcrConversion & operator =( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT + + PrivateDataSlot & operator=( PrivateDataSlot const & ) = delete; + + PrivateDataSlot & operator=( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_ycbcrConversion = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_ycbcrConversion, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_privateDataSlot, rhs.m_privateDataSlot ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::PrivateDataSlot const & operator*() const VULKAN_HPP_NOEXCEPT { - return m_ycbcrConversion; + return m_privateDataSlot; + } + + operator VULKAN_HPP_NAMESPACE::PrivateDataSlot() const VULKAN_HPP_NOEXCEPT + { + return m_privateDataSlot; } void clear() VULKAN_HPP_NOEXCEPT { - if ( m_ycbcrConversion ) + if ( m_privateDataSlot ) { - getDispatcher()->vkDestroySamplerYcbcrConversion( static_cast( m_device ), - static_cast( m_ycbcrConversion ), - reinterpret_cast( m_allocator ) ); + getDispatcher()->vkDestroyPrivateDataSlot( static_cast( m_device ), + static_cast( m_privateDataSlot ), + reinterpret_cast( m_allocator ) ); } m_device = nullptr; - m_ycbcrConversion = nullptr; + m_privateDataSlot = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; } + VULKAN_HPP_NAMESPACE::PrivateDataSlot release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_privateDataSlot, nullptr ); + } + VULKAN_HPP_NAMESPACE::Device getDevice() const { return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot & rhs ) VULKAN_HPP_NOEXCEPT { std::swap( m_device, rhs.m_device ); - std::swap( m_ycbcrConversion, rhs.m_ycbcrConversion ); + std::swap( m_privateDataSlot, rhs.m_privateDataSlot ); std::swap( m_allocator, rhs.m_allocator ); std::swap( m_dispatcher, rhs.m_dispatcher ); } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion m_ycbcrConversion = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PrivateDataSlot m_privateDataSlot = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; - class Semaphore + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class QueryPool { public: - using CType = VkSemaphore; + using CType = VkQueryPool; + using CppType = VULKAN_HPP_NAMESPACE::QueryPool; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool; public: - Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = - static_cast( device.getDispatcher()->vkCreateSemaphore( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_semaphore ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateSemaphore" ); - } + *this = device.createQueryPool( createInfo, allocator ); } +# endif - Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VkSemaphore semaphore, + QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkQueryPool queryPool, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_semaphore( semaphore ) + : m_device( device ) + , m_queryPool( queryPool ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) { } - Semaphore( std::nullptr_t ) {} + QueryPool( std::nullptr_t ) {} - ~Semaphore() + ~QueryPool() { clear(); } - Semaphore() = delete; - Semaphore( Semaphore const & ) = delete; - Semaphore( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_semaphore, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + QueryPool() = delete; + QueryPool( QueryPool const & ) = delete; + + QueryPool( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_queryPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_queryPool, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } - Semaphore & operator=( Semaphore const & ) = delete; - Semaphore & operator =( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT + + QueryPool & operator=( QueryPool const & ) = delete; + + QueryPool & operator=( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_semaphore = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_semaphore, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_queryPool, rhs.m_queryPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::Semaphore const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::QueryPool const & operator*() const VULKAN_HPP_NOEXCEPT { - return m_semaphore; + return m_queryPool; + } + + operator VULKAN_HPP_NAMESPACE::QueryPool() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool; } void clear() VULKAN_HPP_NOEXCEPT { - if ( m_semaphore ) + if ( m_queryPool ) { - getDispatcher()->vkDestroySemaphore( - static_cast( m_device ), static_cast( m_semaphore ), reinterpret_cast( m_allocator ) ); + getDispatcher()->vkDestroyQueryPool( + static_cast( m_device ), static_cast( m_queryPool ), reinterpret_cast( m_allocator ) ); } m_device = nullptr; - m_semaphore = nullptr; + m_queryPool = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::QueryPool release() + { + m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_queryPool, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -9152,625 +11164,571 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Semaphore & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::QueryPool & rhs ) VULKAN_HPP_NOEXCEPT { std::swap( m_device, rhs.m_device ); - std::swap( m_semaphore, rhs.m_semaphore ); + std::swap( m_queryPool, rhs.m_queryPool ); std::swap( m_allocator, rhs.m_allocator ); std::swap( m_dispatcher, rhs.m_dispatcher ); } + //=== VK_VERSION_1_0 === + + template + VULKAN_HPP_NODISCARD std::pair> + getResults( uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + template + VULKAN_HPP_NODISCARD std::pair + getResult( uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + //=== VK_VERSION_1_2 === - VULKAN_HPP_NODISCARD uint64_t getCounterValue() const; + void reset( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT; - //=== VK_KHR_timeline_semaphore === + //=== VK_EXT_host_query_reset === - VULKAN_HPP_NODISCARD uint64_t getCounterValueKHR() const; + void resetEXT( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Semaphore m_semaphore = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::QueryPool m_queryPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; - class ShaderModule + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Queue { public: - using CType = VkShaderModule; + using CType = VkQueue; + using CppType = VULKAN_HPP_NAMESPACE::Queue; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue; public: - ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, uint32_t queueFamilyIndex, uint32_t queueIndex ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateShaderModule( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_shaderModule ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateShaderModule" ); - } + *this = device.getQueue( queueFamilyIndex, queueIndex ); } +# endif - ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VkShaderModule shaderModule, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_shaderModule( shaderModule ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) { + *this = device.getQueue2( queueInfo ); } +# endif - ShaderModule( std::nullptr_t ) {} + Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkQueue queue ) : m_queue( queue ), m_dispatcher( device.getDispatcher() ) + { + } - ~ShaderModule() + Queue( std::nullptr_t ) {} + + ~Queue() { clear(); } - ShaderModule() = delete; - ShaderModule( ShaderModule const & ) = delete; - ShaderModule( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_shaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shaderModule, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + Queue() = delete; + + Queue( Queue const & rhs ) : m_queue( rhs.m_queue ), m_dispatcher( rhs.m_dispatcher ) {} + + Queue( Queue && rhs ) VULKAN_HPP_NOEXCEPT + : m_queue( VULKAN_HPP_NAMESPACE::exchange( rhs.m_queue, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } - ShaderModule & operator=( ShaderModule const & ) = delete; - ShaderModule & operator =( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT + + Queue & operator=( Queue const & rhs ) + { + m_queue = rhs.m_queue; + m_dispatcher = rhs.m_dispatcher; + return *this; + } + + Queue & operator=( Queue && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_shaderModule = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shaderModule, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_queue, rhs.m_queue ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::ShaderModule const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::Queue const & operator*() const VULKAN_HPP_NOEXCEPT { - return m_shaderModule; + return m_queue; + } + + operator VULKAN_HPP_NAMESPACE::Queue() const VULKAN_HPP_NOEXCEPT + { + return m_queue; } void clear() VULKAN_HPP_NOEXCEPT { - if ( m_shaderModule ) - { - getDispatcher()->vkDestroyShaderModule( - static_cast( m_device ), static_cast( m_shaderModule ), reinterpret_cast( m_allocator ) ); - } - m_device = nullptr; - m_shaderModule = nullptr; - m_allocator = nullptr; - m_dispatcher = nullptr; + m_queue = nullptr; + m_dispatcher = nullptr; } - VULKAN_HPP_NAMESPACE::Device getDevice() const + VULKAN_HPP_NAMESPACE::Queue release() { - return m_device; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_queue, nullptr ); } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderModule & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue & rhs ) VULKAN_HPP_NOEXCEPT { - std::swap( m_device, rhs.m_device ); - std::swap( m_shaderModule, rhs.m_shaderModule ); - std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_queue, rhs.m_queue ); std::swap( m_dispatcher, rhs.m_dispatcher ); } - //=== VK_EXT_shader_module_identifier === + //=== VK_VERSION_1_0 === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getIdentifierEXT() const VULKAN_HPP_NOEXCEPT; + void submit( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + void waitIdle() const; + + void bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_VERSION_1_3 === + + void submit2( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_KHR_swapchain === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const; + + //=== VK_EXT_debug_utils === + + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT; + + void endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT; + + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_NV_device_diagnostic_checkpoints === + + VULKAN_HPP_NODISCARD std::vector getCheckpointDataNV() const; + + VULKAN_HPP_NODISCARD std::vector getCheckpointData2NV() const; + + //=== VK_INTEL_performance_query === + + void setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const; + + //=== VK_KHR_synchronization2 === + + void submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; + + //=== VK_NV_low_latency2 === + + void notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::ShaderModule m_shaderModule = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Queue m_queue = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; - class SurfaceKHR + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class RenderPass { public: - using CType = VkSurfaceKHR; + using CType = VkRenderPass; + using CppType = VULKAN_HPP_NAMESPACE::RenderPass; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass; public: -# if defined( VK_USE_PLATFORM_ANDROID_KHR ) - SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - instance.getDispatcher()->vkCreateAndroidSurfaceKHR( static_cast( *instance ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_surface ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateAndroidSurfaceKHR" ); - } + *this = device.createRenderPass( createInfo, allocator ); } -# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# endif -# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) - SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - instance.getDispatcher()->vkCreateDirectFBSurfaceEXT( static_cast( *instance ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_surface ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateDirectFBSurfaceEXT" ); - } + *this = device.createRenderPass2( createInfo, allocator ); } -# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ +# endif - SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, + RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkRenderPass renderPass, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) + : m_device( device ) + , m_renderPass( renderPass ) , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) + , m_dispatcher( device.getDispatcher() ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - instance.getDispatcher()->vkCreateDisplayPlaneSurfaceKHR( static_cast( *instance ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_surface ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateDisplayPlaneSurfaceKHR" ); - } } - SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) - { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - instance.getDispatcher()->vkCreateHeadlessSurfaceEXT( static_cast( *instance ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_surface ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateHeadlessSurfaceEXT" ); - } - } + RenderPass( std::nullptr_t ) {} -# if defined( VK_USE_PLATFORM_IOS_MVK ) - SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) + ~RenderPass() { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - instance.getDispatcher()->vkCreateIOSSurfaceMVK( static_cast( *instance ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_surface ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateIOSSurfaceMVK" ); - } + clear(); } -# endif /*VK_USE_PLATFORM_IOS_MVK*/ -# if defined( VK_USE_PLATFORM_FUCHSIA ) - SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) + RenderPass() = delete; + RenderPass( RenderPass const & ) = delete; + + RenderPass( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_renderPass( VULKAN_HPP_NAMESPACE::exchange( rhs.m_renderPass, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - instance.getDispatcher()->vkCreateImagePipeSurfaceFUCHSIA( static_cast( *instance ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_surface ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateImagePipeSurfaceFUCHSIA" ); - } } -# endif /*VK_USE_PLATFORM_FUCHSIA*/ -# if defined( VK_USE_PLATFORM_MACOS_MVK ) - SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) + RenderPass & operator=( RenderPass const & ) = delete; + + RenderPass & operator=( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - instance.getDispatcher()->vkCreateMacOSSurfaceMVK( static_cast( *instance ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_surface ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + if ( this != &rhs ) { - throwResultException( result, "vkCreateMacOSSurfaceMVK" ); + std::swap( m_device, rhs.m_device ); + std::swap( m_renderPass, rhs.m_renderPass ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } + return *this; } -# endif /*VK_USE_PLATFORM_MACOS_MVK*/ -# if defined( VK_USE_PLATFORM_METAL_EXT ) - SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) + VULKAN_HPP_NAMESPACE::RenderPass const & operator*() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - instance.getDispatcher()->vkCreateMetalSurfaceEXT( static_cast( *instance ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_surface ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateMetalSurfaceEXT" ); - } + return m_renderPass; } -# endif /*VK_USE_PLATFORM_METAL_EXT*/ -# if defined( VK_USE_PLATFORM_SCREEN_QNX ) - SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) + operator VULKAN_HPP_NAMESPACE::RenderPass() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - instance.getDispatcher()->vkCreateScreenSurfaceQNX( static_cast( *instance ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_surface ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateScreenSurfaceQNX" ); - } + return m_renderPass; } -# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ -# if defined( VK_USE_PLATFORM_GGP ) - SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) + void clear() VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - instance.getDispatcher()->vkCreateStreamDescriptorSurfaceGGP( static_cast( *instance ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_surface ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + if ( m_renderPass ) { - throwResultException( result, "vkCreateStreamDescriptorSurfaceGGP" ); + getDispatcher()->vkDestroyRenderPass( + static_cast( m_device ), static_cast( m_renderPass ), reinterpret_cast( m_allocator ) ); } + m_device = nullptr; + m_renderPass = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; } -# endif /*VK_USE_PLATFORM_GGP*/ -# if defined( VK_USE_PLATFORM_VI_NN ) - SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) + VULKAN_HPP_NAMESPACE::RenderPass release() { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - instance.getDispatcher()->vkCreateViSurfaceNN( static_cast( *instance ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_surface ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateViSurfaceNN" ); - } + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_renderPass, nullptr ); } -# endif /*VK_USE_PLATFORM_VI_NN*/ -# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) - SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) + VULKAN_HPP_NAMESPACE::Device getDevice() const { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - instance.getDispatcher()->vkCreateWaylandSurfaceKHR( static_cast( *instance ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_surface ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateWaylandSurfaceKHR" ); - } + return m_device; } -# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - instance.getDispatcher()->vkCreateWin32SurfaceKHR( static_cast( *instance ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_surface ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateWin32SurfaceKHR" ); - } + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ -# if defined( VK_USE_PLATFORM_XCB_KHR ) - SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass & rhs ) VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - instance.getDispatcher()->vkCreateXcbSurfaceKHR( static_cast( *instance ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_surface ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateXcbSurfaceKHR" ); - } + std::swap( m_device, rhs.m_device ); + std::swap( m_renderPass, rhs.m_renderPass ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } -# endif /*VK_USE_PLATFORM_XCB_KHR*/ -# if defined( VK_USE_PLATFORM_XLIB_KHR ) - SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity() const VULKAN_HPP_NOEXCEPT; + + //=== VK_HUAWEI_subpass_shading === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getSubpassShadingMaxWorkgroupSizeHUAWEI() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::RenderPass m_renderPass = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class Sampler + { + public: + using CType = VkSampler; + using CppType = VULKAN_HPP_NAMESPACE::Sampler; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - instance.getDispatcher()->vkCreateXlibSurfaceKHR( static_cast( *instance ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_surface ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateXlibSurfaceKHR" ); - } + *this = device.createSampler( createInfo, allocator ); } -# endif /*VK_USE_PLATFORM_XLIB_KHR*/ +# endif - SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, - VkSurfaceKHR surface, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_instance( *instance ) - , m_surface( surface ) + Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkSampler sampler, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_sampler( sampler ) , m_allocator( static_cast( allocator ) ) - , m_dispatcher( instance.getDispatcher() ) + , m_dispatcher( device.getDispatcher() ) { } - SurfaceKHR( std::nullptr_t ) {} + Sampler( std::nullptr_t ) {} - ~SurfaceKHR() + ~Sampler() { clear(); } - SurfaceKHR() = delete; - SurfaceKHR( SurfaceKHR const & ) = delete; - SurfaceKHR( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT - : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ) - , m_surface( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_surface, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + Sampler() = delete; + Sampler( Sampler const & ) = delete; + + Sampler( Sampler && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_sampler( VULKAN_HPP_NAMESPACE::exchange( rhs.m_sampler, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } - SurfaceKHR & operator=( SurfaceKHR const & ) = delete; - SurfaceKHR & operator =( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT + + Sampler & operator=( Sampler const & ) = delete; + + Sampler & operator=( Sampler && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ); - m_surface = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_surface, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_sampler, rhs.m_sampler ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::SurfaceKHR const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::Sampler const & operator*() const VULKAN_HPP_NOEXCEPT { - return m_surface; + return m_sampler; + } + + operator VULKAN_HPP_NAMESPACE::Sampler() const VULKAN_HPP_NOEXCEPT + { + return m_sampler; } void clear() VULKAN_HPP_NOEXCEPT { - if ( m_surface ) + if ( m_sampler ) { - getDispatcher()->vkDestroySurfaceKHR( - static_cast( m_instance ), static_cast( m_surface ), reinterpret_cast( m_allocator ) ); + getDispatcher()->vkDestroySampler( + static_cast( m_device ), static_cast( m_sampler ), reinterpret_cast( m_allocator ) ); } - m_instance = nullptr; - m_surface = nullptr; + m_device = nullptr; + m_sampler = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; } - VULKAN_HPP_NAMESPACE::Instance getInstance() const + VULKAN_HPP_NAMESPACE::Sampler release() { - return m_instance; + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_sampler, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Sampler & rhs ) VULKAN_HPP_NOEXCEPT { - std::swap( m_instance, rhs.m_instance ); - std::swap( m_surface, rhs.m_surface ); + std::swap( m_device, rhs.m_device ); + std::swap( m_sampler, rhs.m_sampler ); std::swap( m_allocator, rhs.m_allocator ); std::swap( m_dispatcher, rhs.m_dispatcher ); } private: - VULKAN_HPP_NAMESPACE::Instance m_instance = {}; - VULKAN_HPP_NAMESPACE::SurfaceKHR m_surface = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Sampler m_sampler = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; - class SwapchainKHR + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class SamplerYcbcrConversion { public: - using CType = VkSwapchainKHR; + using CType = VkSamplerYcbcrConversion; + using CppType = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion; public: - SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateSwapchainKHR( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_swapchain ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateSwapchainKHR" ); - } + *this = device.createSamplerYcbcrConversion( createInfo, allocator ); } +# endif - SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VkSwapchainKHR swapchain, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_swapchain( swapchain ) + SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkSamplerYcbcrConversion ycbcrConversion, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_ycbcrConversion( ycbcrConversion ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) { } - SwapchainKHR( std::nullptr_t ) {} + SamplerYcbcrConversion( std::nullptr_t ) {} - ~SwapchainKHR() + ~SamplerYcbcrConversion() { clear(); } - SwapchainKHR() = delete; - SwapchainKHR( SwapchainKHR const & ) = delete; - SwapchainKHR( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_swapchain( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_swapchain, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + SamplerYcbcrConversion() = delete; + SamplerYcbcrConversion( SamplerYcbcrConversion const & ) = delete; + + SamplerYcbcrConversion( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_ycbcrConversion( VULKAN_HPP_NAMESPACE::exchange( rhs.m_ycbcrConversion, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } - SwapchainKHR & operator=( SwapchainKHR const & ) = delete; - SwapchainKHR & operator =( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT + + SamplerYcbcrConversion & operator=( SamplerYcbcrConversion const & ) = delete; + + SamplerYcbcrConversion & operator=( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_swapchain = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_swapchain, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_ycbcrConversion, rhs.m_ycbcrConversion ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::SwapchainKHR const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const & operator*() const VULKAN_HPP_NOEXCEPT { - return m_swapchain; + return m_ycbcrConversion; + } + + operator VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT + { + return m_ycbcrConversion; } void clear() VULKAN_HPP_NOEXCEPT { - if ( m_swapchain ) + if ( m_ycbcrConversion ) { - getDispatcher()->vkDestroySwapchainKHR( - static_cast( m_device ), static_cast( m_swapchain ), reinterpret_cast( m_allocator ) ); + getDispatcher()->vkDestroySamplerYcbcrConversion( static_cast( m_device ), + static_cast( m_ycbcrConversion ), + reinterpret_cast( m_allocator ) ); } + m_device = nullptr; + m_ycbcrConversion = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion release() + { m_device = nullptr; - m_swapchain = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_ycbcrConversion, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -9778,189 +11736,124 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion & rhs ) VULKAN_HPP_NOEXCEPT { std::swap( m_device, rhs.m_device ); - std::swap( m_swapchain, rhs.m_swapchain ); + std::swap( m_ycbcrConversion, rhs.m_ycbcrConversion ); std::swap( m_allocator, rhs.m_allocator ); std::swap( m_dispatcher, rhs.m_dispatcher ); } - //=== VK_KHR_swapchain === - - VULKAN_HPP_NODISCARD std::vector getImages() const; - - VULKAN_HPP_NODISCARD std::pair - acquireNextImage( uint64_t timeout, - VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, - VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; - - //=== VK_EXT_display_control === - - VULKAN_HPP_NODISCARD uint64_t getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const; - - //=== VK_GOOGLE_display_timing === - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE getRefreshCycleDurationGOOGLE() const; - - VULKAN_HPP_NODISCARD std::vector getPastPresentationTimingGOOGLE() const; - - //=== VK_KHR_shared_presentable_image === - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const; - - //=== VK_AMD_display_native_hdr === - - void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT; - - //=== VK_KHR_present_wait === - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForPresent( uint64_t presentId, uint64_t timeout ) const; - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - - void acquireFullScreenExclusiveModeEXT() const; - - void releaseFullScreenExclusiveModeEXT() const; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::SwapchainKHR m_swapchain = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion m_ycbcrConversion = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; - class SwapchainKHRs : public std::vector + template <> + struct isVulkanRAIIHandleType { - public: - SwapchainKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - { - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); - std::vector swapchains( createInfos.size() ); - VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkCreateSharedSwapchainsKHR( - static_cast( *device ), - createInfos.size(), - reinterpret_cast( createInfos.data() ), - reinterpret_cast( static_cast( allocator ) ), - swapchains.data() ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - this->reserve( createInfos.size() ); - for ( auto const & swapchainKHR : swapchains ) - { - this->emplace_back( device, swapchainKHR, allocator ); - } - } - else - { - throwResultException( result, "vkCreateSharedSwapchainsKHR" ); - } - } - - SwapchainKHRs( std::nullptr_t ) {} - - SwapchainKHRs() = delete; - SwapchainKHRs( SwapchainKHRs const & ) = delete; - SwapchainKHRs( SwapchainKHRs && rhs ) = default; - SwapchainKHRs & operator=( SwapchainKHRs const & ) = delete; - SwapchainKHRs & operator=( SwapchainKHRs && rhs ) = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; - class ValidationCacheEXT + class Semaphore { public: - using CType = VkValidationCacheEXT; + using CType = VkSemaphore; + using CppType = VULKAN_HPP_NAMESPACE::Semaphore; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore; public: - ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateValidationCacheEXT( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_validationCache ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateValidationCacheEXT" ); - } + *this = device.createSemaphore( createInfo, allocator ); } +# endif - ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VkValidationCacheEXT validationCache, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_validationCache( validationCache ) + Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkSemaphore semaphore, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_semaphore( semaphore ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) { } - ValidationCacheEXT( std::nullptr_t ) {} + Semaphore( std::nullptr_t ) {} - ~ValidationCacheEXT() + ~Semaphore() { clear(); } - ValidationCacheEXT() = delete; - ValidationCacheEXT( ValidationCacheEXT const & ) = delete; - ValidationCacheEXT( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_validationCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_validationCache, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + Semaphore() = delete; + Semaphore( Semaphore const & ) = delete; + + Semaphore( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_semaphore( VULKAN_HPP_NAMESPACE::exchange( rhs.m_semaphore, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } - ValidationCacheEXT & operator=( ValidationCacheEXT const & ) = delete; - ValidationCacheEXT & operator =( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT + + Semaphore & operator=( Semaphore const & ) = delete; + + Semaphore & operator=( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_validationCache = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_validationCache, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_semaphore, rhs.m_semaphore ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::ValidationCacheEXT const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::Semaphore const & operator*() const VULKAN_HPP_NOEXCEPT { - return m_validationCache; + return m_semaphore; + } + + operator VULKAN_HPP_NAMESPACE::Semaphore() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore; } void clear() VULKAN_HPP_NOEXCEPT { - if ( m_validationCache ) + if ( m_semaphore ) { - getDispatcher()->vkDestroyValidationCacheEXT( static_cast( m_device ), - static_cast( m_validationCache ), - reinterpret_cast( m_allocator ) ); + getDispatcher()->vkDestroySemaphore( + static_cast( m_device ), static_cast( m_semaphore ), reinterpret_cast( m_allocator ) ); } - m_device = nullptr; - m_validationCache = nullptr; - m_allocator = nullptr; - m_dispatcher = nullptr; + m_device = nullptr; + m_semaphore = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::Semaphore release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_semaphore, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -9968,119 +11861,143 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Semaphore & rhs ) VULKAN_HPP_NOEXCEPT { std::swap( m_device, rhs.m_device ); - std::swap( m_validationCache, rhs.m_validationCache ); + std::swap( m_semaphore, rhs.m_semaphore ); std::swap( m_allocator, rhs.m_allocator ); std::swap( m_dispatcher, rhs.m_dispatcher ); } - //=== VK_EXT_validation_cache === + //=== VK_VERSION_1_2 === - void merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const; + VULKAN_HPP_NODISCARD uint64_t getCounterValue() const; - VULKAN_HPP_NODISCARD std::vector getData() const; + //=== VK_KHR_timeline_semaphore === + + VULKAN_HPP_NODISCARD uint64_t getCounterValueKHR() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::ValidationCacheEXT m_validationCache = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Semaphore m_semaphore = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - class VideoSessionKHR + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class ShaderEXT { public: - using CType = VkVideoSessionKHR; + using CType = VkShaderEXT; + using CppType = VULKAN_HPP_NAMESPACE::ShaderEXT; - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateVideoSessionKHR( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_videoSession ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateVideoSessionKHR" ); - } + *this = device.createShaderEXT( createInfo, allocator ); } +# endif - VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VkVideoSessionKHR videoSession, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_videoSession( videoSession ) + ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkShaderEXT shader, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr, + VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess ) + : m_device( device ) + , m_shader( shader ) , m_allocator( static_cast( allocator ) ) + , m_constructorSuccessCode( successCode ) , m_dispatcher( device.getDispatcher() ) { } - VideoSessionKHR( std::nullptr_t ) {} + ShaderEXT( std::nullptr_t ) {} - ~VideoSessionKHR() + ~ShaderEXT() { clear(); } - VideoSessionKHR() = delete; - VideoSessionKHR( VideoSessionKHR const & ) = delete; - VideoSessionKHR( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_videoSession( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSession, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + ShaderEXT() = delete; + ShaderEXT( ShaderEXT const & ) = delete; + + ShaderEXT( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_shader( VULKAN_HPP_NAMESPACE::exchange( rhs.m_shader, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } - VideoSessionKHR & operator=( VideoSessionKHR const & ) = delete; - VideoSessionKHR & operator =( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT + + ShaderEXT & operator=( ShaderEXT const & ) = delete; + + ShaderEXT & operator=( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_videoSession = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSession, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_shader, rhs.m_shader ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::VideoSessionKHR const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::ShaderEXT const & operator*() const VULKAN_HPP_NOEXCEPT { - return m_videoSession; + return m_shader; + } + + operator VULKAN_HPP_NAMESPACE::ShaderEXT() const VULKAN_HPP_NOEXCEPT + { + return m_shader; } void clear() VULKAN_HPP_NOEXCEPT { - if ( m_videoSession ) + if ( m_shader ) { - getDispatcher()->vkDestroyVideoSessionKHR( static_cast( m_device ), - static_cast( m_videoSession ), - reinterpret_cast( m_allocator ) ); + getDispatcher()->vkDestroyShaderEXT( + static_cast( m_device ), static_cast( m_shader ), reinterpret_cast( m_allocator ) ); } - m_device = nullptr; - m_videoSession = nullptr; - m_allocator = nullptr; - m_dispatcher = nullptr; + m_device = nullptr; + m_shader = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::ShaderEXT release() + { + m_device = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_shader, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const + { + return m_constructorSuccessCode; } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -10088,120 +12005,157 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT & rhs ) VULKAN_HPP_NOEXCEPT { std::swap( m_device, rhs.m_device ); - std::swap( m_videoSession, rhs.m_videoSession ); + std::swap( m_shader, rhs.m_shader ); std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); std::swap( m_dispatcher, rhs.m_dispatcher ); } - //=== VK_KHR_video_queue === - - VULKAN_HPP_NODISCARD std::vector getMemoryRequirements() const; + //=== VK_EXT_shader_object === - void bindMemory( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos ) const; + VULKAN_HPP_NODISCARD std::vector getBinaryData() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::VideoSessionKHR m_videoSession = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ShaderEXT m_shader = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - class VideoSessionParametersKHR + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class ShaderEXTs : public std::vector { public: - using CType = VkVideoSessionParametersKHR; +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + ShaderEXTs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createShadersEXT( createInfos, allocator ); + } +# endif - static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR; + ShaderEXTs( std::nullptr_t ) {} + + ShaderEXTs() = delete; + ShaderEXTs( ShaderEXTs const & ) = delete; + ShaderEXTs( ShaderEXTs && rhs ) = default; + ShaderEXTs & operator=( ShaderEXTs const & ) = delete; + ShaderEXTs & operator=( ShaderEXTs && rhs ) = default; + + private: + ShaderEXTs( std::vector && rhs ) + { + std::swap( *this, rhs ); + } + }; + + class ShaderModule + { + public: + using CType = VkShaderModule; + using CppType = VULKAN_HPP_NAMESPACE::ShaderModule; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule; static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule; public: - VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_allocator( static_cast( allocator ) ) - , m_dispatcher( device.getDispatcher() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - VULKAN_HPP_NAMESPACE::Result result = static_cast( - device.getDispatcher()->vkCreateVideoSessionParametersKHR( static_cast( *device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( m_allocator ), - reinterpret_cast( &m_videoSessionParameters ) ) ); - if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - throwResultException( result, "vkCreateVideoSessionParametersKHR" ); - } + *this = device.createShaderModule( createInfo, allocator ); } +# endif - VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, - VkVideoSessionParametersKHR videoSessionParameters, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) - : m_device( *device ) - , m_videoSessionParameters( videoSessionParameters ) + ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkShaderModule shaderModule, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_shaderModule( shaderModule ) , m_allocator( static_cast( allocator ) ) , m_dispatcher( device.getDispatcher() ) { } - VideoSessionParametersKHR( std::nullptr_t ) {} + ShaderModule( std::nullptr_t ) {} - ~VideoSessionParametersKHR() + ~ShaderModule() { clear(); } - VideoSessionParametersKHR() = delete; - VideoSessionParametersKHR( VideoSessionParametersKHR const & ) = delete; - VideoSessionParametersKHR( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_videoSessionParameters( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSessionParameters, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + ShaderModule() = delete; + ShaderModule( ShaderModule const & ) = delete; + + ShaderModule( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_shaderModule( VULKAN_HPP_NAMESPACE::exchange( rhs.m_shaderModule, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } - VideoSessionParametersKHR & operator=( VideoSessionParametersKHR const & ) = delete; - VideoSessionParametersKHR & operator =( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT + + ShaderModule & operator=( ShaderModule const & ) = delete; + + ShaderModule & operator=( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_videoSessionParameters = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSessionParameters, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_shaderModule, rhs.m_shaderModule ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } - VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const & operator*() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::ShaderModule const & operator*() const VULKAN_HPP_NOEXCEPT { - return m_videoSessionParameters; + return m_shaderModule; + } + + operator VULKAN_HPP_NAMESPACE::ShaderModule() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule; } void clear() VULKAN_HPP_NOEXCEPT { - if ( m_videoSessionParameters ) + if ( m_shaderModule ) { - getDispatcher()->vkDestroyVideoSessionParametersKHR( static_cast( m_device ), - static_cast( m_videoSessionParameters ), - reinterpret_cast( m_allocator ) ); + getDispatcher()->vkDestroyShaderModule( + static_cast( m_device ), static_cast( m_shaderModule ), reinterpret_cast( m_allocator ) ); } - m_device = nullptr; - m_videoSessionParameters = nullptr; - m_allocator = nullptr; - m_dispatcher = nullptr; + m_device = nullptr; + m_shaderModule = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::ShaderModule release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_shaderModule, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -10209,1283 +12163,6004 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; } - void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR & rhs ) VULKAN_HPP_NOEXCEPT + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderModule & rhs ) VULKAN_HPP_NOEXCEPT { std::swap( m_device, rhs.m_device ); - std::swap( m_videoSessionParameters, rhs.m_videoSessionParameters ); + std::swap( m_shaderModule, rhs.m_shaderModule ); std::swap( m_allocator, rhs.m_allocator ); std::swap( m_dispatcher, rhs.m_dispatcher ); } - //=== VK_KHR_video_queue === + //=== VK_EXT_shader_module_identifier === - void update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getIdentifierEXT() const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR m_videoSessionParameters = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ShaderModule m_shaderModule = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - //=========================== - //=== COMMAND Definitions === - //=========================== - - //=== VK_VERSION_1_0 === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Instance - Context::createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + template <> + struct isVulkanRAIIHandleType { - return VULKAN_HPP_RAII_NAMESPACE::Instance( *this, createInfo, allocator ); - } - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Instance::enumeratePhysicalDevices() const - { - return VULKAN_HPP_RAII_NAMESPACE::PhysicalDevices( *this ); - } + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures() const VULKAN_HPP_NOEXCEPT + class SurfaceKHR { - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; - getDispatcher()->vkGetPhysicalDeviceFeatures( static_cast( m_physicalDevice ), - reinterpret_cast( &features ) ); + public: + using CType = VkSurfaceKHR; + using CppType = VULKAN_HPP_NAMESPACE::SurfaceKHR; - return features; - } + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties - PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; - getDispatcher()->vkGetPhysicalDeviceFormatProperties( - static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createAndroidSurfaceKHR( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# endif - return formatProperties; - } +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createDirectFBSurfaceEXT( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ +# endif - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties - PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, - VULKAN_HPP_NAMESPACE::ImageType type, - VULKAN_HPP_NAMESPACE::ImageTiling tiling, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags ) const - { - VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; - VkResult result = getDispatcher()->vkGetPhysicalDeviceImageFormatProperties( static_cast( m_physicalDevice ), - static_cast( format ), - static_cast( type ), - static_cast( tiling ), - static_cast( usage ), - static_cast( flags ), - reinterpret_cast( &imageFormatProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createDisplayPlaneSurfaceKHR( createInfo, allocator ); + } +# endif - return imageFormatProperties; - } +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createHeadlessSurfaceEXT( createInfo, allocator ); + } +# endif - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties() const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; - getDispatcher()->vkGetPhysicalDeviceProperties( static_cast( m_physicalDevice ), - reinterpret_cast( &properties ) ); +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_IOS_MVK ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createIOSSurfaceMVK( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_IOS_MVK*/ +# endif - return properties; - } +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_FUCHSIA ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createImagePipeSurfaceFUCHSIA( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ +# endif - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties() const - { - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( static_cast( m_physicalDevice ), - &queueFamilyPropertyCount, - reinterpret_cast( queueFamilyProperties.data() ) ); +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createMacOSSurfaceMVK( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ +# endif - VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); - if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_METAL_EXT ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - queueFamilyProperties.resize( queueFamilyPropertyCount ); + *this = instance.createMetalSurfaceEXT( createInfo, allocator ); } - return queueFamilyProperties; - } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ +# endif - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties() const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; - getDispatcher()->vkGetPhysicalDeviceMemoryProperties( static_cast( m_physicalDevice ), - reinterpret_cast( &memoryProperties ) ); +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createScreenSurfaceQNX( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ +# endif - return memoryProperties; - } +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_GGP ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createStreamDescriptorSurfaceGGP( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_GGP*/ +# endif - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT - { - PFN_vkVoidFunction result = getDispatcher()->vkGetInstanceProcAddr( static_cast( m_instance ), name.c_str() ); +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_VI_NN ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createViSurfaceNN( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_VI_NN*/ +# endif - return result; - } +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createWaylandSurfaceKHR( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +# endif - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT - { - PFN_vkVoidFunction result = getDispatcher()->vkGetDeviceProcAddr( static_cast( m_device ), name.c_str() ); +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createWin32SurfaceKHR( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# endif - return result; - } +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_XCB_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = instance.createXcbSurfaceKHR( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_XCB_KHR*/ +# endif - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Device - PhysicalDevice::createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::Device( *this, createInfo, allocator ); - } - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Context::enumerateInstanceExtensionProperties( Optional layerName ) const - { - std::vector properties; - uint32_t propertyCount; - VkResult result; - do +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - result = getDispatcher()->vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) - { - properties.resize( propertyCount ); - result = getDispatcher()->vkEnumerateInstanceExtensionProperties( - layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceExtensionProperties" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) + *this = instance.createXlibSurfaceKHR( createInfo, allocator ); + } +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ +# endif + + SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, + VkSurfaceKHR surface, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_instance( instance ) + , m_surface( surface ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( instance.getDispatcher() ) { - properties.resize( propertyCount ); } - return properties; - } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName ) const - { - std::vector properties; - uint32_t propertyCount; - VkResult result; - do + SurfaceKHR( std::nullptr_t ) {} + + ~SurfaceKHR() { - result = getDispatcher()->vkEnumerateDeviceExtensionProperties( - static_cast( m_physicalDevice ), layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) - { - properties.resize( propertyCount ); - result = getDispatcher()->vkEnumerateDeviceExtensionProperties( static_cast( m_physicalDevice ), - layerName ? layerName->c_str() : nullptr, - &propertyCount, - reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) + clear(); + } + + SurfaceKHR() = delete; + SurfaceKHR( SurfaceKHR const & ) = delete; + + SurfaceKHR( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_surface( VULKAN_HPP_NAMESPACE::exchange( rhs.m_surface, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { - properties.resize( propertyCount ); } - return properties; - } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Context::enumerateInstanceLayerProperties() const - { - std::vector properties; - uint32_t propertyCount; - VkResult result; - do + SurfaceKHR & operator=( SurfaceKHR const & ) = delete; + + SurfaceKHR & operator=( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT { - result = getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) + if ( this != &rhs ) { - properties.resize( propertyCount ); - result = getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ); + std::swap( m_instance, rhs.m_instance ); + std::swap( m_surface, rhs.m_surface ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceLayerProperties" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) + return *this; + } + + VULKAN_HPP_NAMESPACE::SurfaceKHR const & operator*() const VULKAN_HPP_NOEXCEPT { - properties.resize( propertyCount ); + return m_surface; } - return properties; - } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::enumerateDeviceLayerProperties() const - { - std::vector properties; - uint32_t propertyCount; - VkResult result; - do + operator VULKAN_HPP_NAMESPACE::SurfaceKHR() const VULKAN_HPP_NOEXCEPT + { + return m_surface; + } + + void clear() VULKAN_HPP_NOEXCEPT { - result = getDispatcher()->vkEnumerateDeviceLayerProperties( static_cast( m_physicalDevice ), &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) + if ( m_surface ) { - properties.resize( propertyCount ); - result = getDispatcher()->vkEnumerateDeviceLayerProperties( - static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ); + getDispatcher()->vkDestroySurfaceKHR( + static_cast( m_instance ), static_cast( m_surface ), reinterpret_cast( m_allocator ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) + m_instance = nullptr; + m_surface = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::SurfaceKHR release() { - properties.resize( propertyCount ); + m_instance = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_surface, nullptr ); } - return properties; - } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const - { - return VULKAN_HPP_RAII_NAMESPACE::Queue( *this, queueFamilyIndex, queueIndex ); - } + VULKAN_HPP_NAMESPACE::Instance getInstance() const + { + return m_instance; + } - VULKAN_HPP_INLINE void Queue::submit( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, - VULKAN_HPP_NAMESPACE::Fence fence ) const - { - VkResult result = getDispatcher()->vkQueueSubmit( - static_cast( m_queue ), submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); - } + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } - VULKAN_HPP_INLINE void Queue::waitIdle() const - { - VkResult result = getDispatcher()->vkQueueWaitIdle( static_cast( m_queue ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); - } + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_instance, rhs.m_instance ); + std::swap( m_surface, rhs.m_surface ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } - VULKAN_HPP_INLINE void Device::waitIdle() const - { - VkResult result = getDispatcher()->vkDeviceWaitIdle( static_cast( m_device ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); - } + private: + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR m_surface = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; + }; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DeviceMemory - Device::allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + template <> + struct isVulkanRAIIHandleType { - return VULKAN_HPP_RAII_NAMESPACE::DeviceMemory( *this, allocateInfo, allocator ); - } + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * DeviceMemory::mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::DeviceSize size, - VULKAN_HPP_NAMESPACE::MemoryMapFlags flags ) const + class SwapchainKHR { - void * pData; - VkResult result = getDispatcher()->vkMapMemory( static_cast( m_device ), - static_cast( m_memory ), - static_cast( offset ), - static_cast( size ), - static_cast( flags ), - &pData ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::mapMemory" ); + public: + using CType = VkSwapchainKHR; + using CppType = VULKAN_HPP_NAMESPACE::SwapchainKHR; - return pData; - } + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR; - VULKAN_HPP_INLINE void DeviceMemory::unmapMemory() const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkUnmapMemory( static_cast( m_device ), static_cast( m_memory ) ); - } + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createSwapchainKHR( createInfo, allocator ); + } +# endif - VULKAN_HPP_INLINE void - Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges ) const - { - VkResult result = getDispatcher()->vkFlushMappedMemoryRanges( - static_cast( m_device ), memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); - } + SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkSwapchainKHR swapchain, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_swapchain( swapchain ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } - VULKAN_HPP_INLINE void - Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges ) const - { - VkResult result = getDispatcher()->vkInvalidateMappedMemoryRanges( - static_cast( m_device ), memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); - } + SwapchainKHR( std::nullptr_t ) {} - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DeviceMemory::getCommitment() const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; - getDispatcher()->vkGetDeviceMemoryCommitment( - static_cast( m_device ), static_cast( m_memory ), reinterpret_cast( &committedMemoryInBytes ) ); + ~SwapchainKHR() + { + clear(); + } - return committedMemoryInBytes; - } + SwapchainKHR() = delete; + SwapchainKHR( SwapchainKHR const & ) = delete; - VULKAN_HPP_INLINE void Buffer::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const - { - VkResult result = getDispatcher()->vkBindBufferMemory( static_cast( m_device ), - static_cast( m_buffer ), - static_cast( memory ), - static_cast( memoryOffset ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Buffer::bindMemory" ); - } + SwapchainKHR( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_swapchain( VULKAN_HPP_NAMESPACE::exchange( rhs.m_swapchain, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } - VULKAN_HPP_INLINE void Image::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const - { - VkResult result = getDispatcher()->vkBindImageMemory( - static_cast( m_device ), static_cast( m_image ), static_cast( memory ), static_cast( memoryOffset ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Image::bindMemory" ); - } + SwapchainKHR & operator=( SwapchainKHR const & ) = delete; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Buffer::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; - getDispatcher()->vkGetBufferMemoryRequirements( - static_cast( m_device ), static_cast( m_buffer ), reinterpret_cast( &memoryRequirements ) ); + SwapchainKHR & operator=( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_swapchain, rhs.m_swapchain ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } - return memoryRequirements; - } + VULKAN_HPP_NAMESPACE::SwapchainKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_swapchain; + } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Image::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; - getDispatcher()->vkGetImageMemoryRequirements( - static_cast( m_device ), static_cast( m_image ), reinterpret_cast( &memoryRequirements ) ); + operator VULKAN_HPP_NAMESPACE::SwapchainKHR() const VULKAN_HPP_NOEXCEPT + { + return m_swapchain; + } - return memoryRequirements; - } + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_swapchain ) + { + getDispatcher()->vkDestroySwapchainKHR( + static_cast( m_device ), static_cast( m_swapchain ), reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_swapchain = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Image::getSparseMemoryRequirements() const - { - std::vector sparseMemoryRequirements; - uint32_t sparseMemoryRequirementCount; - getDispatcher()->vkGetImageSparseMemoryRequirements( - static_cast( m_device ), static_cast( m_image ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - getDispatcher()->vkGetImageSparseMemoryRequirements( static_cast( m_device ), - static_cast( m_image ), - &sparseMemoryRequirementCount, - reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_NAMESPACE::SwapchainKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_swapchain, nullptr ); + } - VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); - if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + VULKAN_HPP_NAMESPACE::Device getDevice() const { - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + return m_device; } - return sparseMemoryRequirements; - } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, - VULKAN_HPP_NAMESPACE::ImageType type, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, - VULKAN_HPP_NAMESPACE::ImageTiling tiling ) const - { - std::vector properties; - uint32_t propertyCount; - getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( static_cast( m_physicalDevice ), - static_cast( format ), - static_cast( type ), - static_cast( samples ), - static_cast( usage ), - static_cast( tiling ), - &propertyCount, - nullptr ); - properties.resize( propertyCount ); - getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( static_cast( m_physicalDevice ), - static_cast( format ), - static_cast( type ), - static_cast( samples ), - static_cast( usage ), - static_cast( tiling ), - &propertyCount, - reinterpret_cast( properties.data() ) ); - - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { - properties.resize( propertyCount ); + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; } - return properties; - } - VULKAN_HPP_INLINE void Queue::bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfo, - VULKAN_HPP_NAMESPACE::Fence fence ) const - { - VkResult result = getDispatcher()->vkQueueBindSparse( - static_cast( m_queue ), bindInfo.size(), reinterpret_cast( bindInfo.data() ), static_cast( fence ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); - } + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_swapchain, rhs.m_swapchain ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence - Device::createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, createInfo, allocator ); - } + //=== VK_KHR_swapchain === - VULKAN_HPP_INLINE void Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences ) const - { - VkResult result = getDispatcher()->vkResetFences( static_cast( m_device ), fences.size(), reinterpret_cast( fences.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); - } + VULKAN_HPP_NODISCARD std::vector getImages() const; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Fence::getStatus() const - { - VkResult result = getDispatcher()->vkGetFenceStatus( static_cast( m_device ), static_cast( m_fence ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Fence::getStatus", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + VULKAN_HPP_NODISCARD std::pair + acquireNextImage( uint64_t timeout, + VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; - return static_cast( result ); - } + //=== VK_EXT_display_control === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitForFences( - VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout ) const - { - VkResult result = getDispatcher()->vkWaitForFences( - static_cast( m_device ), fences.size(), reinterpret_cast( fences.data() ), static_cast( waitAll ), timeout ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + VULKAN_HPP_NODISCARD uint64_t getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const; - return static_cast( result ); - } + //=== VK_GOOGLE_display_timing === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Semaphore - Device::createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::Semaphore( *this, createInfo, allocator ); - } + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE getRefreshCycleDurationGOOGLE() const; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Event - Device::createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::Event( *this, createInfo, allocator ); - } + VULKAN_HPP_NODISCARD std::vector getPastPresentationTimingGOOGLE() const; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Event::getStatus() const - { - VkResult result = getDispatcher()->vkGetEventStatus( static_cast( m_device ), static_cast( m_event ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Event::getStatus", - { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); + //=== VK_KHR_shared_presentable_image === - return static_cast( result ); - } + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const; - VULKAN_HPP_INLINE void Event::set() const - { - VkResult result = getDispatcher()->vkSetEvent( static_cast( m_device ), static_cast( m_event ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Event::set" ); - } + //=== VK_AMD_display_native_hdr === - VULKAN_HPP_INLINE void Event::reset() const - { - VkResult result = getDispatcher()->vkResetEvent( static_cast( m_device ), static_cast( m_event ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Event::reset" ); - } + void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::QueryPool - Device::createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::QueryPool( *this, createInfo, allocator ); - } + //=== VK_KHR_present_wait === - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair> QueryPool::getResults( - uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const - { - VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); - std::vector data( dataSize / sizeof( DataType ) ); - VkResult result = getDispatcher()->vkGetQueryPoolResults( static_cast( m_device ), - static_cast( m_queryPool ), - firstQuery, - queryCount, - data.size() * sizeof( DataType ), - reinterpret_cast( data.data() ), - static_cast( stride ), - static_cast( flags ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResults", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); - - return std::make_pair( static_cast( result ), data ); - } + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForPresent( uint64_t presentId, uint64_t timeout ) const; - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair QueryPool::getResult( - uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const - { - DataType data; - VkResult result = getDispatcher()->vkGetQueryPoolResults( static_cast( m_device ), - static_cast( m_queryPool ), - firstQuery, - queryCount, - sizeof( DataType ), - reinterpret_cast( &data ), - static_cast( stride ), - static_cast( flags ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResult", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === - return std::make_pair( static_cast( result ), data ); - } + void acquireFullScreenExclusiveModeEXT() const; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Buffer - Device::createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::Buffer( *this, createInfo, allocator ); - } + void releaseFullScreenExclusiveModeEXT() const; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::BufferView - Device::createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::BufferView( *this, createInfo, allocator ); - } + //=== VK_NV_low_latency2 === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Image - Device::createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::Image( *this, createInfo, allocator ); - } + void setLatencySleepModeNV( const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout - Image::getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::SubresourceLayout layout; - getDispatcher()->vkGetImageSubresourceLayout( static_cast( m_device ), - static_cast( m_image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); + void latencySleepNV( const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo ) const VULKAN_HPP_NOEXCEPT; - return layout; - } + void setLatencyMarkerNV( const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ImageView - Device::createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::ImageView( *this, createInfo, allocator ); - } + VULKAN_HPP_NODISCARD std::vector getLatencyTimingsNV() const; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ShaderModule - Device::createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::ShaderModule( *this, createInfo, allocator ); - } + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR m_swapchain = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PipelineCache - Device::createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + template <> + struct isVulkanRAIIHandleType { - return VULKAN_HPP_RAII_NAMESPACE::PipelineCache( *this, createInfo, allocator ); - } + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PipelineCache::getData() const + class SwapchainKHRs : public std::vector { - std::vector data; - size_t dataSize; - VkResult result; - do - { - result = - getDispatcher()->vkGetPipelineCacheData( static_cast( m_device ), static_cast( m_pipelineCache ), &dataSize, nullptr ); - if ( ( result == VK_SUCCESS ) && dataSize ) - { - data.resize( dataSize ); - result = getDispatcher()->vkGetPipelineCacheData( - static_cast( m_device ), static_cast( m_pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::getData" ); - VULKAN_HPP_ASSERT( dataSize <= data.size() ); - if ( dataSize < data.size() ) + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + SwapchainKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) { - data.resize( dataSize ); + *this = device.createSharedSwapchainsKHR( createInfos, allocator ); } - return data; - } - - VULKAN_HPP_INLINE void PipelineCache::merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const - { - VkResult result = getDispatcher()->vkMergePipelineCaches( static_cast( m_device ), - static_cast( m_pipelineCache ), - srcCaches.size(), - reinterpret_cast( srcCaches.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::merge" ); - } +# endif - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::createGraphicsPipelines( - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator ); - } + SwapchainKHRs( std::nullptr_t ) {} - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createGraphicsPipeline( - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator ); - } + SwapchainKHRs() = delete; + SwapchainKHRs( SwapchainKHRs const & ) = delete; + SwapchainKHRs( SwapchainKHRs && rhs ) = default; + SwapchainKHRs & operator=( SwapchainKHRs const & ) = delete; + SwapchainKHRs & operator=( SwapchainKHRs && rhs ) = default; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::createComputePipelines( - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator ); - } + private: + SwapchainKHRs( std::vector && rhs ) + { + std::swap( *this, rhs ); + } + }; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline - Device::createComputePipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + class ValidationCacheEXT { - return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator ); - } + public: + using CType = VkValidationCacheEXT; + using CppType = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PipelineLayout - Device::createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::PipelineLayout( *this, createInfo, allocator ); - } + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Sampler - Device::createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::Sampler( *this, createInfo, allocator ); - } + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createValidationCacheEXT( createInfo, allocator ); + } +# endif - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout - Device::createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout( *this, createInfo, allocator ); - } + ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkValidationCacheEXT validationCache, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_validationCache( validationCache ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorPool - Device::createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::DescriptorPool( *this, createInfo, allocator ); - } + ValidationCacheEXT( std::nullptr_t ) {} - VULKAN_HPP_INLINE void DescriptorPool::reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkResetDescriptorPool( - static_cast( m_device ), static_cast( m_descriptorPool ), static_cast( flags ) ); - } + ~ValidationCacheEXT() + { + clear(); + } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Device::allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const - { - return VULKAN_HPP_RAII_NAMESPACE::DescriptorSets( *this, allocateInfo ); - } + ValidationCacheEXT() = delete; + ValidationCacheEXT( ValidationCacheEXT const & ) = delete; - VULKAN_HPP_INLINE void Device::updateDescriptorSets( - VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, - VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorCopies ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkUpdateDescriptorSets( static_cast( m_device ), - descriptorWrites.size(), - reinterpret_cast( descriptorWrites.data() ), - descriptorCopies.size(), - reinterpret_cast( descriptorCopies.data() ) ); - } + ValidationCacheEXT( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_validationCache( VULKAN_HPP_NAMESPACE::exchange( rhs.m_validationCache, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Framebuffer - Device::createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::Framebuffer( *this, createInfo, allocator ); - } + ValidationCacheEXT & operator=( ValidationCacheEXT const & ) = delete; - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass - Device::createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator ); - } + ValidationCacheEXT & operator=( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_validationCache, rhs.m_validationCache ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D RenderPass::getRenderAreaGranularity() const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_NAMESPACE::Extent2D granularity; - getDispatcher()->vkGetRenderAreaGranularity( - static_cast( m_device ), static_cast( m_renderPass ), reinterpret_cast( &granularity ) ); + VULKAN_HPP_NAMESPACE::ValidationCacheEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_validationCache; + } - return granularity; - } + operator VULKAN_HPP_NAMESPACE::ValidationCacheEXT() const VULKAN_HPP_NOEXCEPT + { + return m_validationCache; + } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CommandPool - Device::createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::CommandPool( *this, createInfo, allocator ); - } + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_validationCache ) + { + getDispatcher()->vkDestroyValidationCacheEXT( static_cast( m_device ), + static_cast( m_validationCache ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_validationCache = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::ValidationCacheEXT release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_validationCache, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_validationCache, rhs.m_validationCache ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_EXT_validation_cache === + + void merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const; + + VULKAN_HPP_NODISCARD std::vector getData() const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheEXT m_validationCache = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class VideoSessionKHR + { + public: + using CType = VkVideoSessionKHR; + using CppType = VULKAN_HPP_NAMESPACE::VideoSessionKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createVideoSessionKHR( createInfo, allocator ); + } +# endif + + VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkVideoSessionKHR videoSession, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_videoSession( videoSession ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + VideoSessionKHR( std::nullptr_t ) {} + + ~VideoSessionKHR() + { + clear(); + } + + VideoSessionKHR() = delete; + VideoSessionKHR( VideoSessionKHR const & ) = delete; + + VideoSessionKHR( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_videoSession( VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSession, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + VideoSessionKHR & operator=( VideoSessionKHR const & ) = delete; + + VideoSessionKHR & operator=( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_videoSession, rhs.m_videoSession ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::VideoSessionKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_videoSession; + } + + operator VULKAN_HPP_NAMESPACE::VideoSessionKHR() const VULKAN_HPP_NOEXCEPT + { + return m_videoSession; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_videoSession ) + { + getDispatcher()->vkDestroyVideoSessionKHR( static_cast( m_device ), + static_cast( m_videoSession ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_videoSession = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::VideoSessionKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_videoSession, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_videoSession, rhs.m_videoSession ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_KHR_video_queue === + + VULKAN_HPP_NODISCARD std::vector getMemoryRequirements() const; + + void bindMemory( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos ) const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::VideoSessionKHR m_videoSession = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + class VideoSessionParametersKHR + { + public: + using CType = VkVideoSessionParametersKHR; + using CppType = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createVideoSessionParametersKHR( createInfo, allocator ); + } +# endif + + VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkVideoSessionParametersKHR videoSessionParameters, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_videoSessionParameters( videoSessionParameters ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + VideoSessionParametersKHR( std::nullptr_t ) {} + + ~VideoSessionParametersKHR() + { + clear(); + } + + VideoSessionParametersKHR() = delete; + VideoSessionParametersKHR( VideoSessionParametersKHR const & ) = delete; + + VideoSessionParametersKHR( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_videoSessionParameters( VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSessionParameters, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + VideoSessionParametersKHR & operator=( VideoSessionParametersKHR const & ) = delete; + + VideoSessionParametersKHR & operator=( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_videoSessionParameters, rhs.m_videoSessionParameters ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParameters; + } + + operator VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR() const VULKAN_HPP_NOEXCEPT + { + return m_videoSessionParameters; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_videoSessionParameters ) + { + getDispatcher()->vkDestroyVideoSessionParametersKHR( static_cast( m_device ), + static_cast( m_videoSessionParameters ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_videoSessionParameters = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_videoSessionParameters, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_videoSessionParameters, rhs.m_videoSessionParameters ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_KHR_video_queue === + + void update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR m_videoSessionParameters = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // operators to compare vk::raii-handles +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + template ::value, bool>::type = 0> + auto operator<=>( T const & a, T const & b ) VULKAN_HPP_NOEXCEPT + { + return *a <=> *b; + } +# else + template ::value, bool>::type = 0> + bool operator==( T const & a, T const & b ) VULKAN_HPP_NOEXCEPT + { + return *a == *b; + } + + template ::value, bool>::type = 0> + bool operator!=( T const & a, T const & b ) VULKAN_HPP_NOEXCEPT + { + return *a != *b; + } + + template ::value, bool>::type = 0> + bool operator<( T const & a, T const & b ) VULKAN_HPP_NOEXCEPT + { + return *a < *b; + } +# endif + + template ::value, bool>::type = 0> + bool operator==( const T & v, std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + return !*v; + } + + template ::value, bool>::type = 0> + bool operator==( std::nullptr_t, const T & v ) VULKAN_HPP_NOEXCEPT + { + return !*v; + } + + template ::value, bool>::type = 0> + bool operator!=( const T & v, std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + return *v; + } + + template ::value, bool>::type = 0> + bool operator!=( std::nullptr_t, const T & v ) VULKAN_HPP_NOEXCEPT + { + return *v; + } + + //=========================== + //=== COMMAND Definitions === + //=========================== + + //=== VK_VERSION_1_0 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Context::createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Instance instance; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateInstance( + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &instance ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Context::createInstance" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance( *this, *reinterpret_cast( &instance ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + Instance::enumeratePhysicalDevices() const + { + std::vector physicalDevices; + uint32_t physicalDeviceCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkEnumeratePhysicalDevices( static_cast( m_instance ), &physicalDeviceCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = static_cast( getDispatcher()->vkEnumeratePhysicalDevices( + static_cast( m_instance ), &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncomplete ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::enumeratePhysicalDevices" ); +# endif + } + + std::vector physicalDevicesRAII; + physicalDevicesRAII.reserve( physicalDevices.size() ); + for ( auto & physicalDevice : physicalDevices ) + { + physicalDevicesRAII.emplace_back( *this, *reinterpret_cast( &physicalDevice ) ); + } + return physicalDevicesRAII; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures && "Function requires " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; + getDispatcher()->vkGetPhysicalDeviceFeatures( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + + return features; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties + PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties && "Function requires " ); + + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; + getDispatcher()->vkGetPhysicalDeviceFormatProperties( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return formatProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties + PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceImageFormatProperties( static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); + + return imageFormatProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties && "Function requires " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; + getDispatcher()->vkGetPhysicalDeviceProperties( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties && + "Function requires " ); + + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties && "Function requires " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; + getDispatcher()->vkGetPhysicalDeviceMemoryProperties( static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + + return memoryProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PFN_VoidFunction Instance::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetInstanceProcAddr && "Function requires " ); + + PFN_vkVoidFunction result = getDispatcher()->vkGetInstanceProcAddr( static_cast( m_instance ), name.c_str() ); + + return result; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PFN_VoidFunction Device::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceProcAddr && "Function requires " ); + + PFN_vkVoidFunction result = getDispatcher()->vkGetDeviceProcAddr( static_cast( m_device ), name.c_str() ); + + return result; + } + + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + PhysicalDevice::createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Device device; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDevice( + static_cast( m_physicalDevice ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &device ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "PhysicalDevice::createDevice" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device( *this, *reinterpret_cast( &device ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Context::enumerateInstanceExtensionProperties( Optional layerName ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateInstanceExtensionProperties && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkEnumerateInstanceExtensionProperties( + layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateDeviceExtensionProperties && "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkEnumerateDeviceExtensionProperties( + static_cast( m_physicalDevice ), layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + getDispatcher()->vkEnumerateDeviceExtensionProperties( static_cast( m_physicalDevice ), + layerName ? layerName->c_str() : nullptr, + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Context::enumerateInstanceLayerProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateInstanceLayerProperties && "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::enumerateDeviceLayerProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateDeviceLayerProperties && "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkEnumerateDeviceLayerProperties( static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkEnumerateDeviceLayerProperties( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Queue queue; + getDispatcher()->vkGetDeviceQueue( static_cast( m_device ), queueFamilyIndex, queueIndex, reinterpret_cast( &queue ) ); + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue( *this, *reinterpret_cast( &queue ) ); + } + + VULKAN_HPP_INLINE void Queue::submit( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueSubmit( + static_cast( m_queue ), submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); + } + + VULKAN_HPP_INLINE void Queue::waitIdle() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueWaitIdle && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueWaitIdle( static_cast( m_queue ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); + } + + VULKAN_HPP_INLINE void Device::waitIdle() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDeviceWaitIdle && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkDeviceWaitIdle( static_cast( m_device ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DeviceMemory memory; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkAllocateMemory( + static_cast( m_device ), + reinterpret_cast( &allocateInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &memory ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::allocateMemory" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceMemory( *this, *reinterpret_cast( &memory ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * DeviceMemory::mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkMapMemory && "Function requires " ); + + void * pData; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkMapMemory( static_cast( m_device ), + static_cast( m_memory ), + static_cast( offset ), + static_cast( size ), + static_cast( flags ), + &pData ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::mapMemory" ); + + return pData; + } + + VULKAN_HPP_INLINE void DeviceMemory::unmapMemory() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUnmapMemory && "Function requires " ); + + getDispatcher()->vkUnmapMemory( static_cast( m_device ), static_cast( m_memory ) ); + } + + VULKAN_HPP_INLINE void + Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkFlushMappedMemoryRanges && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkFlushMappedMemoryRanges( + static_cast( m_device ), memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); + } + + VULKAN_HPP_INLINE void + Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkInvalidateMappedMemoryRanges && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkInvalidateMappedMemoryRanges( + static_cast( m_device ), memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DeviceMemory::getCommitment() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryCommitment && "Function requires " ); + + VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; + getDispatcher()->vkGetDeviceMemoryCommitment( + static_cast( m_device ), static_cast( m_memory ), reinterpret_cast( &committedMemoryInBytes ) ); + + return committedMemoryInBytes; + } + + VULKAN_HPP_INLINE void Buffer::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkBindBufferMemory( static_cast( m_device ), + static_cast( m_buffer ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Buffer::bindMemory" ); + } + + VULKAN_HPP_INLINE void Image::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkBindImageMemory( static_cast( m_device ), + static_cast( m_image ), + static_cast( memory ), + static_cast( memoryOffset ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Image::bindMemory" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Buffer::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements && "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + getDispatcher()->vkGetBufferMemoryRequirements( + static_cast( m_device ), static_cast( m_buffer ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Image::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements && "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + getDispatcher()->vkGetImageMemoryRequirements( + static_cast( m_device ), static_cast( m_image ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Image::getSparseMemoryRequirements() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements && "Function requires " ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetImageSparseMemoryRequirements( + static_cast( m_device ), static_cast( m_image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + getDispatcher()->vkGetImageSparseMemoryRequirements( static_cast( m_device ), + static_cast( m_image ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageTiling tiling ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( samples ), + static_cast( usage ), + static_cast( tiling ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_INLINE void Queue::bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfo, + VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueBindSparse && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueBindSparse( + static_cast( m_queue ), bindInfo.size(), reinterpret_cast( bindInfo.data() ), static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); + } + + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateFence( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createFence" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence( *this, *reinterpret_cast( &fence ), allocator ); + } + + VULKAN_HPP_INLINE void Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetFences && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkResetFences( static_cast( m_device ), fences.size(), reinterpret_cast( fences.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Fence::getStatus() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceStatus && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetFenceStatus( static_cast( m_device ), static_cast( m_fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Fence::getStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + + return static_cast( result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitForFences( + VULKAN_HPP_NAMESPACE::ArrayProxy const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWaitForFences && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkWaitForFences( + static_cast( m_device ), fences.size(), reinterpret_cast( fences.data() ), static_cast( waitAll ), timeout ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + + return static_cast( result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Semaphore semaphore; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateSemaphore( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &semaphore ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSemaphore" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Semaphore( *this, *reinterpret_cast( &semaphore ), allocator ); + } + + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Event event; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateEvent( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &event ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createEvent" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Event( *this, *reinterpret_cast( &event ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Event::getStatus() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetEventStatus && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetEventStatus( static_cast( m_device ), static_cast( m_event ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Event::getStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); + + return static_cast( result ); + } + + VULKAN_HPP_INLINE void Event::set() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetEvent && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkSetEvent( static_cast( m_device ), static_cast( m_event ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Event::set" ); + } + + VULKAN_HPP_INLINE void Event::reset() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetEvent && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkResetEvent( static_cast( m_device ), static_cast( m_event ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Event::reset" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::QueryPool queryPool; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateQueryPool( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &queryPool ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createQueryPool" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::QueryPool( *this, *reinterpret_cast( &queryPool ), allocator ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair> QueryPool::getResults( + uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueryPoolResults && "Function requires " ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetQueryPoolResults( static_cast( m_device ), + static_cast( m_queryPool ), + firstQuery, + queryCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + static_cast( stride ), + static_cast( flags ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResults", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + + return std::make_pair( result, std::move( data ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair QueryPool::getResult( + uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueryPoolResults && "Function requires " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetQueryPoolResults( static_cast( m_device ), + static_cast( m_queryPool ), + firstQuery, + queryCount, + sizeof( DataType ), + reinterpret_cast( &data ), + static_cast( stride ), + static_cast( flags ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + + return std::make_pair( result, std::move( data ) ); + } + + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Buffer buffer; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateBuffer( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &buffer ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createBuffer" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Buffer( *this, *reinterpret_cast( &buffer ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::BufferView view; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateBufferView( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createBufferView" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferView( *this, *reinterpret_cast( &view ), allocator ); + } + + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Image image; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateImage( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &image ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createImage" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Image( *this, *reinterpret_cast( &image ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout + Image::getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout && "Function requires " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout layout; + getDispatcher()->vkGetImageSubresourceLayout( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ImageView view; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateImageView( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &view ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createImageView" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ImageView( *this, *reinterpret_cast( &view ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateShaderModule( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shaderModule ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createShaderModule" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderModule( *this, *reinterpret_cast( &shaderModule ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreatePipelineCache( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipelineCache ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createPipelineCache" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache( *this, *reinterpret_cast( &pipelineCache ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PipelineCache::getData() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineCacheData && "Function requires " ); + + std::vector data; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPipelineCacheData( static_cast( m_device ), static_cast( m_pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( getDispatcher()->vkGetPipelineCacheData( + static_cast( m_device ), static_cast( m_pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::getData" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return data; + } + + VULKAN_HPP_INLINE void PipelineCache::merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkMergePipelineCaches && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkMergePipelineCaches( static_cast( m_device ), + static_cast( m_pipelineCache ), + srcCaches.size(), + reinterpret_cast( srcCaches.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::merge" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + Device::createGraphicsPipelines( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateGraphicsPipelines( + static_cast( m_device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createGraphicsPipelines" ); +# endif + } + + std::vector pipelinesRAII; + pipelinesRAII.reserve( pipelines.size() ); + for ( auto & pipeline : pipelines ) + { + pipelinesRAII.emplace_back( *this, *reinterpret_cast( &pipeline ), allocator, result ); + } + return pipelinesRAII; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createGraphicsPipeline( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateGraphicsPipelines( + static_cast( m_device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createGraphicsPipeline" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator, result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + Device::createComputePipelines( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateComputePipelines( + static_cast( m_device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createComputePipelines" ); +# endif + } + + std::vector pipelinesRAII; + pipelinesRAII.reserve( pipelines.size() ); + for ( auto & pipeline : pipelines ) + { + pipelinesRAII.emplace_back( *this, *reinterpret_cast( &pipeline ), allocator, result ); + } + return pipelinesRAII; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createComputePipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateComputePipelines( + static_cast( m_device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createComputePipeline" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator, result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreatePipelineLayout( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipelineLayout ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createPipelineLayout" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineLayout( *this, *reinterpret_cast( &pipelineLayout ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Sampler sampler; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateSampler( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &sampler ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSampler" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Sampler( *this, *reinterpret_cast( &sampler ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDescriptorSetLayout( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &setLayout ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createDescriptorSetLayout" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout( *this, *reinterpret_cast( &setLayout ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDescriptorPool( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorPool ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createDescriptorPool" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorPool( *this, *reinterpret_cast( &descriptorPool ), allocator ); + } + + VULKAN_HPP_INLINE void DescriptorPool::reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetDescriptorPool && "Function requires " ); + + getDispatcher()->vkResetDescriptorPool( + static_cast( m_device ), static_cast( m_descriptorPool ), static_cast( flags ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + Device::allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const + { + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAllocateDescriptorSets( static_cast( m_device ), + reinterpret_cast( &allocateInfo ), + reinterpret_cast( descriptorSets.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::allocateDescriptorSets" ); +# endif + } + + std::vector descriptorSetsRAII; + descriptorSetsRAII.reserve( descriptorSets.size() ); + for ( auto & descriptorSet : descriptorSets ) + { + descriptorSetsRAII.emplace_back( + *this, *reinterpret_cast( &descriptorSet ), static_cast( allocateInfo.descriptorPool ) ); + } + return descriptorSetsRAII; + } + + VULKAN_HPP_INLINE void Device::updateDescriptorSets( + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorCopies ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSets && "Function requires " ); + + getDispatcher()->vkUpdateDescriptorSets( static_cast( m_device ), + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ), + descriptorCopies.size(), + reinterpret_cast( descriptorCopies.data() ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateFramebuffer( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &framebuffer ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createFramebuffer" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Framebuffer( *this, *reinterpret_cast( &framebuffer ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateRenderPass( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRenderPass" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, *reinterpret_cast( &renderPass ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D RenderPass::getRenderAreaGranularity() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderAreaGranularity && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Extent2D granularity; + getDispatcher()->vkGetRenderAreaGranularity( + static_cast( m_device ), static_cast( m_renderPass ), reinterpret_cast( &granularity ) ); + + return granularity; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::CommandPool commandPool; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateCommandPool( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &commandPool ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createCommandPool" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandPool( *this, *reinterpret_cast( &commandPool ), allocator ); + } + + VULKAN_HPP_INLINE void CommandPool::reset( VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetCommandPool && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkResetCommandPool( + static_cast( m_device ), static_cast( m_commandPool ), static_cast( flags ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandPool::reset" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + Device::allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const + { + std::vector commandBuffers( allocateInfo.commandBufferCount ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAllocateCommandBuffers( static_cast( m_device ), + reinterpret_cast( &allocateInfo ), + reinterpret_cast( commandBuffers.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::allocateCommandBuffers" ); +# endif + } + + std::vector commandBuffersRAII; + commandBuffersRAII.reserve( commandBuffers.size() ); + for ( auto & commandBuffer : commandBuffers ) + { + commandBuffersRAII.emplace_back( + *this, *reinterpret_cast( &commandBuffer ), static_cast( allocateInfo.commandPool ) ); + } + return commandBuffersRAII; + } + + VULKAN_HPP_INLINE void CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBeginCommandBuffer && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBeginCommandBuffer( + static_cast( m_commandBuffer ), reinterpret_cast( &beginInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::end() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEndCommandBuffer && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkEndCommandBuffer( static_cast( m_commandBuffer ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetCommandBuffer && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkResetCommandBuffer( static_cast( m_commandBuffer ), static_cast( flags ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindPipeline && "Function requires " ); + + getDispatcher()->vkCmdBindPipeline( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setViewport( uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewport && "Function requires " ); + + getDispatcher()->vkCmdSetViewport( + static_cast( m_commandBuffer ), firstViewport, viewports.size(), reinterpret_cast( viewports.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setScissor( uint32_t firstScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissor && "Function requires " ); + + getDispatcher()->vkCmdSetScissor( + static_cast( m_commandBuffer ), firstScissor, scissors.size(), reinterpret_cast( scissors.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineWidth && "Function requires " ); + + getDispatcher()->vkCmdSetLineWidth( static_cast( m_commandBuffer ), lineWidth ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBias && "Function requires " ); + + getDispatcher()->vkCmdSetDepthBias( static_cast( m_commandBuffer ), depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetBlendConstants && "Function requires " ); + + getDispatcher()->vkCmdSetBlendConstants( static_cast( m_commandBuffer ), blendConstants ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBounds && "Function requires " ); + + getDispatcher()->vkCmdSetDepthBounds( static_cast( m_commandBuffer ), minDepthBounds, maxDepthBounds ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilCompareMask && "Function requires " ); + + getDispatcher()->vkCmdSetStencilCompareMask( static_cast( m_commandBuffer ), static_cast( faceMask ), compareMask ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilWriteMask && "Function requires " ); + + getDispatcher()->vkCmdSetStencilWriteMask( static_cast( m_commandBuffer ), static_cast( faceMask ), writeMask ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilReference && "Function requires " ); + + getDispatcher()->vkCmdSetStencilReference( static_cast( m_commandBuffer ), static_cast( faceMask ), reference ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dynamicOffsets ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorSets && "Function requires " ); + + getDispatcher()->vkCmdBindDescriptorSets( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + descriptorSets.size(), + reinterpret_cast( descriptorSets.data() ), + dynamicOffsets.size(), + dynamicOffsets.data() ); + } + + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer && "Function requires " ); + + getDispatcher()->vkCmdBindIndexBuffer( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( indexType ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers && "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBindVertexBuffers( static_cast( m_commandBuffer ), + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDraw && "Function requires " ); + + getDispatcher()->vkCmdDraw( static_cast( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( + uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexed && "Function requires " ); + + getDispatcher()->vkCmdDrawIndexed( static_cast( m_commandBuffer ), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirect && "Function requires " ); + + getDispatcher()->vkCmdDrawIndirect( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexedIndirect && "Function requires " ); + + getDispatcher()->vkCmdDrawIndexedIndirect( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } + + VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatch && "Function requires " ); + + getDispatcher()->vkCmdDispatch( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); + } + + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchIndirect && "Function requires " ); + + getDispatcher()->vkCmdDispatchIndirect( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer && "Function requires " ); + + getDispatcher()->vkCmdCopyBuffer( static_cast( m_commandBuffer ), + static_cast( srcBuffer ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage && "Function requires " ); + + getDispatcher()->vkCmdCopyImage( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, + VULKAN_HPP_NAMESPACE::Filter filter ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage && "Function requires " ); + + getDispatcher()->vkCmdBlitImage( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ), + static_cast( filter ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( + VULKAN_HPP_NAMESPACE::Buffer srcBuffer, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage && "Function requires " ); + + getDispatcher()->vkCmdCopyBufferToImage( static_cast( m_commandBuffer ), + static_cast( srcBuffer ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( + VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer && "Function requires " ); + + getDispatcher()->vkCmdCopyImageToBuffer( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstBuffer ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdUpdateBuffer && "Function requires " ); + + getDispatcher()->vkCmdUpdateBuffer( static_cast( m_commandBuffer ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + uint32_t data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdFillBuffer && "Function requires " ); + + getDispatcher()->vkCmdFillBuffer( static_cast( m_commandBuffer ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( size ), + data ); + } + + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( + VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearColorValue & color, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdClearColorImage && "Function requires " ); + + getDispatcher()->vkCmdClearColorImage( static_cast( m_commandBuffer ), + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &color ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( + VULKAN_HPP_NAMESPACE::Image image, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, + const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, + VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdClearDepthStencilImage && "Function requires " ); + + getDispatcher()->vkCmdClearDepthStencilImage( static_cast( m_commandBuffer ), + static_cast( image ), + static_cast( imageLayout ), + reinterpret_cast( &depthStencil ), + ranges.size(), + reinterpret_cast( ranges.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy const & attachments, + VULKAN_HPP_NAMESPACE::ArrayProxy const & rects ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdClearAttachments && "Function requires " ); + + getDispatcher()->vkCmdClearAttachments( static_cast( m_commandBuffer ), + attachments.size(), + reinterpret_cast( attachments.data() ), + rects.size(), + reinterpret_cast( rects.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage && "Function requires " ); + + getDispatcher()->vkCmdResolveImage( static_cast( m_commandBuffer ), + static_cast( srcImage ), + static_cast( srcImageLayout ), + static_cast( dstImage ), + static_cast( dstImageLayout ), + regions.size(), + reinterpret_cast( regions.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent && "Function requires " ); + + getDispatcher()->vkCmdSetEvent( + static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent && "Function requires " ); + + getDispatcher()->vkCmdResetEvent( + static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::waitEvents( + VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents && "Function requires " ); + + getDispatcher()->vkCmdWaitEvents( static_cast( m_commandBuffer ), + events.size(), + reinterpret_cast( events.data() ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, + VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier && "Function requires " ); + + getDispatcher()->vkCmdPipelineBarrier( static_cast( m_commandBuffer ), + static_cast( srcStageMask ), + static_cast( dstStageMask ), + static_cast( dependencyFlags ), + memoryBarriers.size(), + reinterpret_cast( memoryBarriers.data() ), + bufferMemoryBarriers.size(), + reinterpret_cast( bufferMemoryBarriers.data() ), + imageMemoryBarriers.size(), + reinterpret_cast( imageMemoryBarriers.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginQuery && "Function requires " ); + + getDispatcher()->vkCmdBeginQuery( + static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndQuery && "Function requires " ); + + getDispatcher()->vkCmdEndQuery( static_cast( m_commandBuffer ), static_cast( queryPool ), query ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetQueryPool && "Function requires " ); + + getDispatcher()->vkCmdResetQueryPool( static_cast( m_commandBuffer ), static_cast( queryPool ), firstQuery, queryCount ); + } + + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp && "Function requires " ); + + getDispatcher()->vkCmdWriteTimestamp( + static_cast( m_commandBuffer ), static_cast( pipelineStage ), static_cast( queryPool ), query ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + VULKAN_HPP_NAMESPACE::DeviceSize stride, + VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyQueryPoolResults && "Function requires " ); + + getDispatcher()->vkCmdCopyQueryPoolResults( static_cast( m_commandBuffer ), + static_cast( queryPool ), + firstQuery, + queryCount, + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( stride ), + static_cast( flags ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, + uint32_t offset, + VULKAN_HPP_NAMESPACE::ArrayProxy const & values ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushConstants && "Function requires " ); + + getDispatcher()->vkCmdPushConstants( static_cast( m_commandBuffer ), + static_cast( layout ), + static_cast( stageFlags ), + offset, + values.size() * sizeof( ValuesType ), + reinterpret_cast( values.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass && "Function requires " ); + + getDispatcher()->vkCmdBeginRenderPass( static_cast( m_commandBuffer ), + reinterpret_cast( &renderPassBegin ), + static_cast( contents ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass && "Function requires " ); + + getDispatcher()->vkCmdNextSubpass( static_cast( m_commandBuffer ), static_cast( contents ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass && "Function requires " ); + + getDispatcher()->vkCmdEndRenderPass( static_cast( m_commandBuffer ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( + VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdExecuteCommands && "Function requires " ); + + getDispatcher()->vkCmdExecuteCommands( + static_cast( m_commandBuffer ), commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); + } + + //=== VK_VERSION_1_1 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t Context::enumerateInstanceVersion() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateInstanceVersion && "Function requires " ); + + uint32_t apiVersion; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkEnumerateInstanceVersion( &apiVersion ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceVersion" ); + + return apiVersion; + } + + VULKAN_HPP_INLINE void + Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory2 && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBindBufferMemory2( + static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); + } + + VULKAN_HPP_INLINE void Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory2 && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBindImageMemory2( + static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPeerMemoryFeatures && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + getDispatcher()->vkGetDeviceGroupPeerMemoryFeatures( static_cast( m_device ), + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( &peerMemoryFeatures ) ); + + return peerMemoryFeatures; + } + + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDeviceMask && "Function requires or " ); + + getDispatcher()->vkCmdSetDeviceMask( static_cast( m_commandBuffer ), deviceMask ); + } + + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchBase && "Function requires or " ); + + getDispatcher()->vkCmdDispatchBase( + static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Instance::enumeratePhysicalDeviceGroups() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceGroups && + "Function requires or " ); + + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkEnumeratePhysicalDeviceGroups( static_cast( m_instance ), &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( + getDispatcher()->vkEnumeratePhysicalDeviceGroups( static_cast( m_instance ), + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return physicalDeviceGroupProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetImageMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetImageMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetBufferMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetBufferMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements2 && + "Function requires or " ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetImageSparseMemoryRequirements2( + static_cast( m_device ), reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + getDispatcher()->vkGetImageSparseMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + + return features; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + + return properties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + getDispatcher()->vkGetPhysicalDeviceFormatProperties2( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return formatProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceFormatProperties2( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2 + PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + + return imageFormatProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2 && + "Function requires or " ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function requires or " ); + + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2 && + "Function requires or " ); + + std::vector structureChains; + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; + } + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get() = queueFamilyProperties[i]; + } + return structureChains; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + + return memoryProperties; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getMemoryProperties2() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2 && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2 && + "Function requires or " ); + + std::vector properties; + uint32_t propertyCount; + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_INLINE void CommandPool::trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkTrimCommandPool && "Function requires or " ); + + getDispatcher()->vkTrimCommandPool( + static_cast( m_device ), static_cast( m_commandPool ), static_cast( flags ) ); + } + + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Queue queue; + getDispatcher()->vkGetDeviceQueue2( + static_cast( m_device ), reinterpret_cast( &queueInfo ), reinterpret_cast( &queue ) ); + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue( *this, *reinterpret_cast( &queue ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateSamplerYcbcrConversion( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSamplerYcbcrConversion" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion( + *this, *reinterpret_cast( &ycbcrConversion ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDescriptorUpdateTemplate( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createDescriptorUpdateTemplate" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate( + *this, *reinterpret_cast( &descriptorUpdateTemplate ), allocator ); + } + + template + VULKAN_HPP_INLINE void DescriptorSet::updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSetWithTemplate && + "Function requires or " ); + + getDispatcher()->vkUpdateDescriptorSetWithTemplate( static_cast( m_device ), + static_cast( m_descriptorSet ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( &data ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties + PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalBufferProperties && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + getDispatcher()->vkGetPhysicalDeviceExternalBufferProperties( static_cast( m_physicalDevice ), + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + + return externalBufferProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalFenceProperties && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + getDispatcher()->vkGetPhysicalDeviceExternalFenceProperties( static_cast( m_physicalDevice ), + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + + return externalFenceProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalSemaphoreProperties && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + getDispatcher()->vkGetPhysicalDeviceExternalSemaphoreProperties( + static_cast( m_physicalDevice ), + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + + return externalSemaphoreProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupport && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + getDispatcher()->vkGetDescriptorSetLayoutSupport( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + + return support; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupport && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get(); + getDispatcher()->vkGetDescriptorSetLayoutSupport( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); + + return structureChain; + } + + //=== VK_VERSION_1_2 === + + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCount && + "Function requires or or " ); + + getDispatcher()->vkCmdDrawIndirectCount( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexedIndirectCount && + "Function requires or or " ); + + getDispatcher()->vkCmdDrawIndexedIndirectCount( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateRenderPass2( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRenderPass2" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, *reinterpret_cast( &renderPass ), allocator ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass2 && + "Function requires or " ); + + getDispatcher()->vkCmdBeginRenderPass2( static_cast( m_commandBuffer ), + reinterpret_cast( &renderPassBegin ), + reinterpret_cast( &subpassBeginInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass2 && "Function requires or " ); + + getDispatcher()->vkCmdNextSubpass2( static_cast( m_commandBuffer ), + reinterpret_cast( &subpassBeginInfo ), + reinterpret_cast( &subpassEndInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass2 && "Function requires or " ); + + getDispatcher()->vkCmdEndRenderPass2( static_cast( m_commandBuffer ), reinterpret_cast( &subpassEndInfo ) ); + } + + VULKAN_HPP_INLINE void QueryPool::reset( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkResetQueryPool && "Function requires or " ); + + getDispatcher()->vkResetQueryPool( static_cast( m_device ), static_cast( m_queryPool ), firstQuery, queryCount ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValue() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreCounterValue && + "Function requires or " ); + + uint64_t value; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetSemaphoreCounterValue( static_cast( m_device ), static_cast( m_semaphore ), &value ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValue" ); + + return value; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, + uint64_t timeout ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWaitSemaphores && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkWaitSemaphores( static_cast( m_device ), reinterpret_cast( &waitInfo ), timeout ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + + return static_cast( result ); + } + + VULKAN_HPP_INLINE void Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSignalSemaphore && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkSignalSemaphore( static_cast( m_device ), reinterpret_cast( &signalInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferDeviceAddress && + "Function requires or or " ); + + VkDeviceAddress result = + getDispatcher()->vkGetBufferDeviceAddress( static_cast( m_device ), reinterpret_cast( &info ) ); + + return static_cast( result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureAddress && + "Function requires or " ); + + uint64_t result = + getDispatcher()->vkGetBufferOpaqueCaptureAddress( static_cast( m_device ), reinterpret_cast( &info ) ); + + return result; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddress && + "Function requires or " ); + + uint64_t result = getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddress( static_cast( m_device ), + reinterpret_cast( &info ) ); + + return result; + } + + //=== VK_VERSION_1_3 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getToolProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceToolProperties && + "Function requires or " ); + + std::vector toolProperties; + uint32_t toolCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceToolProperties( static_cast( m_physicalDevice ), &toolCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) + { + toolProperties.resize( toolCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceToolProperties( + static_cast( m_physicalDevice ), &toolCount, reinterpret_cast( toolProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) + { + toolProperties.resize( toolCount ); + } + return toolProperties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreatePrivateDataSlot( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createPrivateDataSlot" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, *reinterpret_cast( &privateDataSlot ), allocator ); + } + + VULKAN_HPP_INLINE void Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetPrivateData && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkSetPrivateData( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPrivateData && "Function requires or " ); + + uint64_t data; + getDispatcher()->vkGetPrivateData( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), &data ); + + return data; + } + + VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent2 && "Function requires or " ); + + getDispatcher()->vkCmdSetEvent2( + static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( &dependencyInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent2 && "Function requires or " ); + + getDispatcher()->vkCmdResetEvent2( + static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents2 && "Function requires or " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); +# else + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdWaitEvents2( static_cast( m_commandBuffer ), + events.size(), + reinterpret_cast( events.data() ), + reinterpret_cast( dependencyInfos.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier2 && "Function requires or " ); + + getDispatcher()->vkCmdPipelineBarrier2( static_cast( m_commandBuffer ), reinterpret_cast( &dependencyInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp2 && "Function requires or " ); + + getDispatcher()->vkCmdWriteTimestamp2( + static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); + } + + VULKAN_HPP_INLINE void Queue::submit2( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit2 && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueSubmit2( + static_cast( m_queue ), submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer2 && "Function requires or " ); + + getDispatcher()->vkCmdCopyBuffer2( static_cast( m_commandBuffer ), reinterpret_cast( ©BufferInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage2 && "Function requires or " ); + + getDispatcher()->vkCmdCopyImage2( static_cast( m_commandBuffer ), reinterpret_cast( ©ImageInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage2 && + "Function requires or " ); + + getDispatcher()->vkCmdCopyBufferToImage2( static_cast( m_commandBuffer ), + reinterpret_cast( ©BufferToImageInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer2 && + "Function requires or " ); + + getDispatcher()->vkCmdCopyImageToBuffer2( static_cast( m_commandBuffer ), + reinterpret_cast( ©ImageToBufferInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage2 && "Function requires or " ); + + getDispatcher()->vkCmdBlitImage2( static_cast( m_commandBuffer ), reinterpret_cast( &blitImageInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage2 && "Function requires or " ); + + getDispatcher()->vkCmdResolveImage2( static_cast( m_commandBuffer ), + reinterpret_cast( &resolveImageInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRendering && "Function requires or " ); + + getDispatcher()->vkCmdBeginRendering( static_cast( m_commandBuffer ), reinterpret_cast( &renderingInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::endRendering() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRendering && "Function requires or " ); + + getDispatcher()->vkCmdEndRendering( static_cast( m_commandBuffer ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullMode && + "Function requires or or " ); + + getDispatcher()->vkCmdSetCullMode( static_cast( m_commandBuffer ), static_cast( cullMode ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFace && + "Function requires or or " ); + + getDispatcher()->vkCmdSetFrontFace( static_cast( m_commandBuffer ), static_cast( frontFace ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopology && + "Function requires or or " ); + + getDispatcher()->vkCmdSetPrimitiveTopology( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCount && + "Function requires or or " ); + + getDispatcher()->vkCmdSetViewportWithCount( + static_cast( m_commandBuffer ), viewports.size(), reinterpret_cast( viewports.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCount && + "Function requires or or " ); + + getDispatcher()->vkCmdSetScissorWithCount( + static_cast( m_commandBuffer ), scissors.size(), reinterpret_cast( scissors.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2 && + "Function requires or or " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" ); + } + if ( !strides.empty() && buffers.size() != strides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBindVertexBuffers2( static_cast( m_commandBuffer ), + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ), + reinterpret_cast( strides.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthTestEnable( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthWriteEnable( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOp && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthCompareOp( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthBoundsTestEnable( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetStencilTestEnable( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOp && + "Function requires or or " ); + + getDispatcher()->vkCmdSetStencilOp( static_cast( m_commandBuffer ), + static_cast( faceMask ), + static_cast( failOp ), + static_cast( passOp ), + static_cast( depthFailOp ), + static_cast( compareOp ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetRasterizerDiscardEnable( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthBiasEnable( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnable && + "Function requires or or " ); + + getDispatcher()->vkCmdSetPrimitiveRestartEnable( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirements && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetDeviceBufferMemoryRequirements( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirements && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetDeviceBufferMemoryRequirements( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirements && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetDeviceImageMemoryRequirements( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirements && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetDeviceImageMemoryRequirements( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSparseMemoryRequirements && + "Function requires or " ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetDeviceImageSparseMemoryRequirements( + static_cast( m_device ), reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + getDispatcher()->vkGetDeviceImageSparseMemoryRequirements( static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; + } + + //=== VK_VERSION_1_4 === + + VULKAN_HPP_INLINE void CommandBuffer::setLineStipple( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStipple && + "Function requires or or " ); + + getDispatcher()->vkCmdSetLineStipple( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkMapMemory2 && "Function requires or " ); + + void * pData; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkMapMemory2( static_cast( m_device ), reinterpret_cast( &memoryMapInfo ), &pData ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2" ); + + return pData; + } + + VULKAN_HPP_INLINE void Device::unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUnmapMemory2 && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkUnmapMemory2( static_cast( m_device ), reinterpret_cast( &memoryUnmapInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer2 && "Function requires or " ); + + getDispatcher()->vkCmdBindIndexBuffer2( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( size ), + static_cast( indexType ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderingAreaGranularity && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Extent2D granularity; + getDispatcher()->vkGetRenderingAreaGranularity( static_cast( m_device ), + reinterpret_cast( &renderingAreaInfo ), + reinterpret_cast( &granularity ) ); + + return granularity; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayout && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + getDispatcher()->vkGetDeviceImageSubresourceLayout( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayout && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + getDispatcher()->vkGetDeviceImageSubresourceLayout( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &layout ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Image::getSubresourceLayout2( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2 && + "Function requires or or or " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + getDispatcher()->vkGetImageSubresourceLayout2( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Image::getSubresourceLayout2( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2 && + "Function requires or or or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + getDispatcher()->vkGetImageSubresourceLayout2( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; + } + + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSet && "Function requires or " ); + + getDispatcher()->vkCmdPushDescriptorSet( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdPushDescriptorSetWithTemplate && + "Function requires or or " ); + + getDispatcher()->vkCmdPushDescriptorSetWithTemplate( static_cast( m_commandBuffer ), + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + reinterpret_cast( &data ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRenderingAttachmentLocations && + "Function requires or " ); + + getDispatcher()->vkCmdSetRenderingAttachmentLocations( static_cast( m_commandBuffer ), + reinterpret_cast( &locationInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setRenderingInputAttachmentIndices( + const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRenderingInputAttachmentIndices && + "Function requires or " ); + + getDispatcher()->vkCmdSetRenderingInputAttachmentIndices( static_cast( m_commandBuffer ), + reinterpret_cast( &inputAttachmentIndexInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorSets2 && + "Function requires or " ); + + getDispatcher()->vkCmdBindDescriptorSets2( static_cast( m_commandBuffer ), + reinterpret_cast( &bindDescriptorSetsInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushConstants2 && "Function requires or " ); + + getDispatcher()->vkCmdPushConstants2( static_cast( m_commandBuffer ), + reinterpret_cast( &pushConstantsInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSet2 && "Function requires or " ); + + getDispatcher()->vkCmdPushDescriptorSet2( static_cast( m_commandBuffer ), + reinterpret_cast( &pushDescriptorSetInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate2( + const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetWithTemplate2 && + "Function requires or " ); + + getDispatcher()->vkCmdPushDescriptorSetWithTemplate2( + static_cast( m_commandBuffer ), reinterpret_cast( &pushDescriptorSetWithTemplateInfo ) ); + } + + VULKAN_HPP_INLINE void Device::copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToImage && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyMemoryToImage( + static_cast( m_device ), reinterpret_cast( ©MemoryToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImage" ); + } + + VULKAN_HPP_INLINE void Device::copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToMemory && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyImageToMemory( + static_cast( m_device ), reinterpret_cast( ©ImageToMemoryInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemory" ); + } + + VULKAN_HPP_INLINE void Device::copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToImage && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkCopyImageToImage( static_cast( m_device ), reinterpret_cast( ©ImageToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImage" ); + } + + VULKAN_HPP_INLINE void + Device::transitionImageLayout( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkTransitionImageLayout && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkTransitionImageLayout( + static_cast( m_device ), transitions.size(), reinterpret_cast( transitions.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayout" ); + } + + //=== VK_KHR_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Bool32 supported; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR( static_cast( m_physicalDevice ), + queueFamilyIndex, + static_cast( surface ), + reinterpret_cast( &supported ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); + + return supported; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR + PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + reinterpret_cast( &surfaceCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + + return surfaceCapabilities; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR && "Function requires " ); + + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR( + static_cast( m_physicalDevice ), static_cast( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return surfaceFormats; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR && + "Function requires " ); + + std::vector presentModes; + uint32_t presentModeCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR( + static_cast( m_physicalDevice ), static_cast( surface ), &presentModeCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return presentModes; + } + + //=== VK_KHR_swapchain === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateSwapchainKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSwapchainKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, *reinterpret_cast( &swapchain ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector SwapchainKHR::getImages() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainImagesKHR && "Function requires " ); + + std::vector swapchainImages; + uint32_t swapchainImageCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetSwapchainImagesKHR( + static_cast( m_device ), static_cast( m_swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = + static_cast( getDispatcher()->vkGetSwapchainImagesKHR( static_cast( m_device ), + static_cast( m_swapchain ), + &swapchainImageCount, + reinterpret_cast( swapchainImages.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getImages" ); + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + if ( swapchainImageCount < swapchainImages.size() ) + { + swapchainImages.resize( swapchainImageCount ); + } + return swapchainImages; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + SwapchainKHR::acquireNextImage( uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImageKHR && "Function requires " ); + + uint32_t imageIndex; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkAcquireNextImageKHR( static_cast( m_device ), + static_cast( m_swapchain ), + timeout, + static_cast( semaphore ), + static_cast( fence ), + &imageIndex ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireNextImage", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return std::make_pair( result, std::move( imageIndex ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkQueuePresentKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkQueuePresentKHR( static_cast( m_queue ), reinterpret_cast( &presentInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return static_cast( result ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR Device::getGroupPresentCapabilitiesKHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR( + static_cast( m_device ), reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); + + return deviceGroupPresentCapabilities; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR + Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR( + static_cast( m_device ), static_cast( surface ), reinterpret_cast( &modes ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); + + return modes; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR && + "Function requires or " ); + + std::vector rects; + uint32_t rectCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR( + static_cast( m_physicalDevice ), static_cast( surface ), &rectCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( + getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + &rectCount, + reinterpret_cast( rects.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + if ( rectCount < rects.size() ) + { + rects.resize( rectCount ); + } + return rects; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImage2KHR && "Function requires or " ); + + uint32_t imageIndex; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkAcquireNextImage2KHR( + static_cast( m_device ), reinterpret_cast( &acquireInfo ), &imageIndex ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + + return std::make_pair( result, std::move( imageIndex ) ); + } + + //=== VK_KHR_display === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getDisplayPropertiesKHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getDisplayPlanePropertiesKHR() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const + { + std::vector displays; + uint32_t displayCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetDisplayPlaneSupportedDisplaysKHR( static_cast( m_physicalDevice ), planeIndex, &displayCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( getDispatcher()->vkGetDisplayPlaneSupportedDisplaysKHR( + static_cast( m_physicalDevice ), planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncomplete ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); +# endif + } + + std::vector displaysRAII; + displaysRAII.reserve( displays.size() ); + for ( auto & display : displays ) + { + displaysRAII.emplace_back( *this, *reinterpret_cast( &display ) ); + } + return displaysRAII; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector DisplayKHR::getModeProperties() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModePropertiesKHR && "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetDisplayModePropertiesKHR( + static_cast( m_physicalDevice ), static_cast( m_display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + getDispatcher()->vkGetDisplayModePropertiesKHR( static_cast( m_physicalDevice ), + static_cast( m_display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + DisplayKHR::createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDisplayModeKHR( + static_cast( m_physicalDevice ), + static_cast( m_display ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &mode ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "DisplayKHR::createMode" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR( *this, *reinterpret_cast( &mode ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR + DisplayModeKHR::getDisplayPlaneCapabilities( uint32_t planeIndex ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR( static_cast( m_physicalDevice ), + static_cast( m_displayModeKHR ), + planeIndex, + reinterpret_cast( &capabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayModeKHR::getDisplayPlaneCapabilities" ); + + return capabilities; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDisplayPlaneSurfaceKHR( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createDisplayPlaneSurfaceKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } + + //=== VK_KHR_display_swapchain === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + std::vector swapchains( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateSharedSwapchainsKHR( + static_cast( m_device ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( swapchains.data() ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSharedSwapchainsKHR" ); +# endif + } + + std::vector swapchainsRAII; + swapchainsRAII.reserve( swapchains.size() ); + for ( auto & swapchain : swapchains ) + { + swapchainsRAII.emplace_back( *this, *reinterpret_cast( &swapchain ), allocator ); + } + return swapchainsRAII; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateSharedSwapchainsKHR( + static_cast( m_device ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &swapchain ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSharedSwapchainKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, *reinterpret_cast( &swapchain ), allocator ); + } + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateXlibSurfaceKHR( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createXlibSurfaceKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR && + "Function requires " ); + + VkBool32 result = + getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, &dpy, visualID ); + + return static_cast( result ); + } +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateXcbSurfaceKHR( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createXcbSurfaceKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( + uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR && + "Function requires " ); + + VkBool32 result = getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR( + static_cast( m_physicalDevice ), queueFamilyIndex, &connection, visual_id ); + + return static_cast( result ); + } +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateWaylandSurfaceKHR( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createWaylandSurfaceKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR && + "Function requires " ); + + VkBool32 result = + getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, &display ); + + return static_cast( result ); + } +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateAndroidSurfaceKHR( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createAndroidSurfaceKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateWin32SurfaceKHR( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createWin32SurfaceKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR && + "Function requires " ); + + VkBool32 result = getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex ); + + return static_cast( result ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDebugReportCallbackEXT( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &callback ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createDebugReportCallbackEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT( + *this, *reinterpret_cast( &callback ), allocator ); + } + + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object, + size_t location, + int32_t messageCode, + const std::string & layerPrefix, + const std::string & message ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDebugReportMessageEXT && "Function requires " ); + + getDispatcher()->vkDebugReportMessageEXT( static_cast( m_instance ), + static_cast( flags ), + static_cast( objectType_ ), + object, + location, + messageCode, + layerPrefix.c_str(), + message.c_str() ); + } + + //=== VK_EXT_debug_marker === + + VULKAN_HPP_INLINE void Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectTagEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkDebugMarkerSetObjectTagEXT( + static_cast( m_device ), reinterpret_cast( &tagInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); + } + + VULKAN_HPP_INLINE void Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectNameEXT && "Function requires " ); - VULKAN_HPP_INLINE void CommandPool::reset( VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags ) const + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkDebugMarkerSetObjectNameEXT( + static_cast( m_device ), reinterpret_cast( &nameInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT { - VkResult result = getDispatcher()->vkResetCommandPool( - static_cast( m_device ), static_cast( m_commandPool ), static_cast( flags ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandPool::reset" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerBeginEXT && "Function requires " ); + + getDispatcher()->vkCmdDebugMarkerBeginEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &markerInfo ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Device::allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT() const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::CommandBuffers( *this, allocateInfo ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerEndEXT && "Function requires " ); + + getDispatcher()->vkCmdDebugMarkerEndEXT( static_cast( m_commandBuffer ) ); } - VULKAN_HPP_INLINE void CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo ) const + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT { - VkResult result = getDispatcher()->vkBeginCommandBuffer( static_cast( m_commandBuffer ), - reinterpret_cast( &beginInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerInsertEXT && "Function requires " ); + + getDispatcher()->vkCmdDebugMarkerInsertEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &markerInfo ) ); } - VULKAN_HPP_INLINE void CommandBuffer::end() const + //=== VK_KHR_video_queue === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const { - VkResult result = getDispatcher()->vkEndCommandBuffer( static_cast( m_commandBuffer ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoProfile ), + reinterpret_cast( &capabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + + return capabilities; } - VULKAN_HPP_INLINE void CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const { - VkResult result = - getDispatcher()->vkResetCommandBuffer( static_cast( m_commandBuffer ), static_cast( flags ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR && + "Function requires " ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoProfile ), + reinterpret_cast( &capabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + + return structureChain; } - VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, - VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const { - getDispatcher()->vkCmdBindPipeline( - static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); + + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + videoFormatProperties.resize( videoFormatPropertyCount ); + } + return videoFormatProperties; } - VULKAN_HPP_INLINE void - CommandBuffer::setViewport( uint32_t firstViewport, - VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const { - getDispatcher()->vkCmdSetViewport( - static_cast( m_commandBuffer ), firstViewport, viewports.size(), reinterpret_cast( viewports.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); + + std::vector structureChains; + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + structureChains.resize( videoFormatPropertyCount ); + videoFormatProperties.resize( videoFormatPropertyCount ); + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + videoFormatProperties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + structureChains.resize( videoFormatPropertyCount ); + } + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + structureChains[i].template get() = videoFormatProperties[i]; + } + return structureChains; } - VULKAN_HPP_INLINE void - CommandBuffer::setScissor( uint32_t firstScissor, - VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - getDispatcher()->vkCmdSetScissor( - static_cast( m_commandBuffer ), firstScissor, scissors.size(), reinterpret_cast( scissors.data() ) ); + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateVideoSessionKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &videoSession ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createVideoSessionKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR( *this, *reinterpret_cast( &videoSession ), allocator ); } - VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector VideoSessionKHR::getMemoryRequirements() const { - getDispatcher()->vkCmdSetLineWidth( static_cast( m_commandBuffer ), lineWidth ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR && + "Function requires " ); + + std::vector memoryRequirements; + uint32_t memoryRequirementsCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR( + static_cast( m_device ), static_cast( m_videoSession ), &memoryRequirementsCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && memoryRequirementsCount ) + { + memoryRequirements.resize( memoryRequirementsCount ); + result = static_cast( + getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR( static_cast( m_device ), + static_cast( m_videoSession ), + &memoryRequirementsCount, + reinterpret_cast( memoryRequirements.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + + VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); + if ( memoryRequirementsCount < memoryRequirements.size() ) + { + memoryRequirements.resize( memoryRequirementsCount ); + } + return memoryRequirements; } - VULKAN_HPP_INLINE void - CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void VideoSessionKHR::bindMemory( + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos ) const { - getDispatcher()->vkCmdSetDepthBias( static_cast( m_commandBuffer ), depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + VULKAN_HPP_ASSERT( getDispatcher()->vkBindVideoSessionMemoryKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkBindVideoSessionMemoryKHR( static_cast( m_device ), + static_cast( m_videoSession ), + bindSessionMemoryInfos.size(), + reinterpret_cast( bindSessionMemoryInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::bindMemory" ); } - VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR>::Type + Device::createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - getDispatcher()->vkCmdSetBlendConstants( static_cast( m_commandBuffer ), blendConstants ); + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateVideoSessionParametersKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &videoSessionParameters ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createVideoSessionParametersKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR( + *this, *reinterpret_cast( &videoSessionParameters ), allocator ); } - VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void VideoSessionParametersKHR::update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const { - getDispatcher()->vkCmdSetDepthBounds( static_cast( m_commandBuffer ), minDepthBounds, maxDepthBounds ); + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateVideoSessionParametersKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkUpdateVideoSessionParametersKHR( static_cast( m_device ), + static_cast( m_videoSessionParameters ), + reinterpret_cast( &updateInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionParametersKHR::update" ); } - VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, - uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdSetStencilCompareMask( static_cast( m_commandBuffer ), static_cast( faceMask ), compareMask ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginVideoCodingKHR && "Function requires " ); + + getDispatcher()->vkCmdBeginVideoCodingKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &beginInfo ) ); } - VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdSetStencilWriteMask( static_cast( m_commandBuffer ), static_cast( faceMask ), writeMask ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndVideoCodingKHR && "Function requires " ); + + getDispatcher()->vkCmdEndVideoCodingKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &endCodingInfo ) ); } - VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdSetStencilReference( static_cast( m_commandBuffer ), static_cast( faceMask ), reference ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdControlVideoCodingKHR && "Function requires " ); + + getDispatcher()->vkCmdControlVideoCodingKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &codingControlInfo ) ); } + //=== VK_KHR_video_decode_queue === + + VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecodeVideoKHR && "Function requires " ); + + getDispatcher()->vkCmdDecodeVideoKHR( static_cast( m_commandBuffer ), reinterpret_cast( &decodeInfo ) ); + } + + //=== VK_EXT_transform_feedback === + VULKAN_HPP_INLINE void - CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, - VULKAN_HPP_NAMESPACE::PipelineLayout layout, - uint32_t firstSet, - VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorSets, - VULKAN_HPP_NAMESPACE::ArrayProxy const & dynamicOffsets ) const VULKAN_HPP_NOEXCEPT + CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes ) const { - getDispatcher()->vkCmdBindDescriptorSets( static_cast( m_commandBuffer ), - static_cast( pipelineBindPoint ), - static_cast( layout ), - firstSet, - descriptorSets.size(), - reinterpret_cast( descriptorSets.data() ), - dynamicOffsets.size(), - dynamicOffsets.data() ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT && + "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); +# else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT( static_cast( m_commandBuffer ), + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ) ); } - VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets ) const { - getDispatcher()->vkCmdBindIndexBuffer( static_cast( m_commandBuffer ), - static_cast( buffer ), - static_cast( offset ), - static_cast( indexType ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginTransformFeedbackEXT && "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBeginTransformFeedbackEXT( static_cast( m_commandBuffer ), + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); } - VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, - VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets ) const + VULKAN_HPP_INLINE void + CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndTransformFeedbackEXT && "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); +# else + if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdEndTransformFeedbackEXT( static_cast( m_commandBuffer ), + firstCounterBuffer, + counterBuffers.size(), + reinterpret_cast( counterBuffers.data() ), + reinterpret_cast( counterBufferOffsets.data() ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query, + VULKAN_HPP_NAMESPACE::QueryControlFlags flags, + uint32_t index ) const VULKAN_HPP_NOEXCEPT { - if ( buffers.size() != offsets.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginQueryIndexedEXT && "Function requires " ); - getDispatcher()->vkCmdBindVertexBuffers( static_cast( m_commandBuffer ), - firstBinding, - buffers.size(), - reinterpret_cast( buffers.data() ), - reinterpret_cast( offsets.data() ) ); + getDispatcher()->vkCmdBeginQueryIndexedEXT( + static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ), index ); } VULKAN_HPP_INLINE void - CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdDraw( static_cast( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndQueryIndexedEXT && "Function requires " ); - VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( - uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkCmdDrawIndexed( static_cast( m_commandBuffer ), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + getDispatcher()->vkCmdEndQueryIndexedEXT( static_cast( m_commandBuffer ), static_cast( queryPool ), query, index ); } - VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - uint32_t drawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, + uint32_t firstInstance, + VULKAN_HPP_NAMESPACE::Buffer counterBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdDrawIndirect( - static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectByteCountEXT && "Function requires " ); + + getDispatcher()->vkCmdDrawIndirectByteCountEXT( static_cast( m_commandBuffer ), + instanceCount, + firstInstance, + static_cast( counterBuffer ), + static_cast( counterBufferOffset ), + counterOffset, + vertexStride ); } - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - uint32_t drawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT + //=== VK_NVX_binary_import === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - getDispatcher()->vkCmdDrawIndexedIndirect( - static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + VULKAN_HPP_NAMESPACE::CuModuleNVX module; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateCuModuleNVX( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createCuModuleNVX" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX( *this, *reinterpret_cast( &module ), allocator ); } - VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - getDispatcher()->vkCmdDispatch( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); + VULKAN_HPP_NAMESPACE::CuFunctionNVX function; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateCuFunctionNVX( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createCuFunctionNVX" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX( *this, *reinterpret_cast( &function ), allocator ); } - VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo ) const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdDispatchIndirect( - static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCuLaunchKernelNVX && "Function requires " ); + + getDispatcher()->vkCmdCuLaunchKernelNVX( static_cast( m_commandBuffer ), reinterpret_cast( &launchInfo ) ); } - VULKAN_HPP_INLINE void - CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + //=== VK_NVX_image_view_handle === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t + Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdCopyBuffer( static_cast( m_commandBuffer ), - static_cast( srcBuffer ), - static_cast( dstBuffer ), - regions.size(), - reinterpret_cast( regions.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewHandleNVX && "Function requires " ); + + uint32_t result = + getDispatcher()->vkGetImageViewHandleNVX( static_cast( m_device ), reinterpret_cast( &info ) ); + + return result; } - VULKAN_HPP_INLINE void - CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, - VULKAN_HPP_NAMESPACE::Image dstImage, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, - VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdCopyImage( static_cast( m_commandBuffer ), - static_cast( srcImage ), - static_cast( srcImageLayout ), - static_cast( dstImage ), - static_cast( dstImageLayout ), - regions.size(), - reinterpret_cast( regions.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewHandle64NVX && "Function requires " ); + + uint64_t result = + getDispatcher()->vkGetImageViewHandle64NVX( static_cast( m_device ), reinterpret_cast( &info ) ); + + return result; } - VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, - VULKAN_HPP_NAMESPACE::Image dstImage, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, - VULKAN_HPP_NAMESPACE::ArrayProxy const & regions, - VULKAN_HPP_NAMESPACE::Filter filter ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX ImageView::getAddressNVX() const { - getDispatcher()->vkCmdBlitImage( static_cast( m_commandBuffer ), - static_cast( srcImage ), - static_cast( srcImageLayout ), - static_cast( dstImage ), - static_cast( dstImageLayout ), - regions.size(), - reinterpret_cast( regions.data() ), - static_cast( filter ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewAddressNVX && "Function requires " ); + + VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetImageViewAddressNVX( + static_cast( m_device ), static_cast( m_imageView ), reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ImageView::getAddressNVX" ); + + return properties; } - VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( - VULKAN_HPP_NAMESPACE::Buffer srcBuffer, - VULKAN_HPP_NAMESPACE::Image dstImage, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, - VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + //=== VK_AMD_draw_indirect_count === + + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdCopyBufferToImage( static_cast( m_commandBuffer ), - static_cast( srcBuffer ), - static_cast( dstImage ), - static_cast( dstImageLayout ), - regions.size(), - reinterpret_cast( regions.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountAMD && + "Function requires or or " ); + + getDispatcher()->vkCmdDrawIndirectCountAMD( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } - VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( - VULKAN_HPP_NAMESPACE::Image srcImage, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdCopyImageToBuffer( static_cast( m_commandBuffer ), - static_cast( srcImage ), - static_cast( srcImageLayout ), - static_cast( dstBuffer ), - regions.size(), - reinterpret_cast( regions.data() ) ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdDrawIndexedIndirectCountAMD && + "Function requires or or " ); + + getDispatcher()->vkCmdDrawIndexedIndirectCountAMD( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } - template - VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - VULKAN_HPP_NAMESPACE::ArrayProxy const & data ) const VULKAN_HPP_NOEXCEPT + //=== VK_AMD_shader_info === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Pipeline::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, + VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const { - getDispatcher()->vkCmdUpdateBuffer( static_cast( m_commandBuffer ), - static_cast( dstBuffer ), - static_cast( dstOffset ), - data.size() * sizeof( DataType ), - reinterpret_cast( data.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderInfoAMD && "Function requires " ); + + std::vector info; + size_t infoSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetShaderInfoAMD( static_cast( m_device ), + static_cast( m_pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( getDispatcher()->vkGetShaderInfoAMD( static_cast( m_device ), + static_cast( m_pipeline ), + static_cast( shaderStage ), + static_cast( infoType ), + &infoSize, + reinterpret_cast( info.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getShaderInfoAMD" ); + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + if ( infoSize < info.size() ) + { + info.resize( infoSize ); + } + return info; } - VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - VULKAN_HPP_NAMESPACE::DeviceSize size, - uint32_t data ) const VULKAN_HPP_NOEXCEPT + //=== VK_KHR_dynamic_rendering === + + VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdFillBuffer( static_cast( m_commandBuffer ), - static_cast( dstBuffer ), - static_cast( dstOffset ), - static_cast( size ), - data ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderingKHR && + "Function requires or " ); + + getDispatcher()->vkCmdBeginRenderingKHR( static_cast( m_commandBuffer ), reinterpret_cast( &renderingInfo ) ); } - VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( - VULKAN_HPP_NAMESPACE::Image image, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, - const VULKAN_HPP_NAMESPACE::ClearColorValue & color, - VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR() const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdClearColorImage( static_cast( m_commandBuffer ), - static_cast( image ), - static_cast( imageLayout ), - reinterpret_cast( &color ), - ranges.size(), - reinterpret_cast( ranges.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderingKHR && "Function requires or " ); + + getDispatcher()->vkCmdEndRenderingKHR( static_cast( m_commandBuffer ) ); } - VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( - VULKAN_HPP_NAMESPACE::Image image, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, - const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, - VULKAN_HPP_NAMESPACE::ArrayProxy const & ranges ) const VULKAN_HPP_NOEXCEPT +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - getDispatcher()->vkCmdClearDepthStencilImage( static_cast( m_commandBuffer ), - static_cast( image ), - static_cast( imageLayout ), - reinterpret_cast( &depthStencil ), - ranges.size(), - reinterpret_cast( ranges.data() ) ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateStreamDescriptorSurfaceGGP( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createStreamDescriptorSurfaceGGP" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); } +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === - VULKAN_HPP_INLINE void - CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy const & attachments, - VULKAN_HPP_NAMESPACE::ArrayProxy const & rects ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV + PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, + VULKAN_HPP_NAMESPACE::ImageType type, + VULKAN_HPP_NAMESPACE::ImageTiling tiling, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType ) const { - getDispatcher()->vkCmdClearAttachments( static_cast( m_commandBuffer ), - attachments.size(), - reinterpret_cast( attachments.data() ), - rects.size(), - reinterpret_cast( rects.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + static_cast( m_physicalDevice ), + static_cast( format ), + static_cast( type ), + static_cast( tiling ), + static_cast( usage ), + static_cast( flags ), + static_cast( externalHandleType ), + reinterpret_cast( &externalImageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + + return externalImageFormatProperties; } - VULKAN_HPP_INLINE void - CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, - VULKAN_HPP_NAMESPACE::Image dstImage, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, - VULKAN_HPP_NAMESPACE::ArrayProxy const & regions ) const VULKAN_HPP_NOEXCEPT +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE DeviceMemory::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const { - getDispatcher()->vkCmdResolveImage( static_cast( m_commandBuffer ), - static_cast( srcImage ), - static_cast( srcImageLayout ), - static_cast( dstImage ), - static_cast( dstImageLayout ), - regions.size(), - reinterpret_cast( regions.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandleNV && "Function requires " ); + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetMemoryWin32HandleNV( + static_cast( m_device ), static_cast( m_memory ), static_cast( handleType ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::getMemoryWin32HandleNV" ); + + return handle; } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, - VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + //=== VK_KHR_get_physical_device_properties2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdSetEvent( - static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + + return features; } - VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, - VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdResetEvent( - static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &features ) ); + + return structureChain; } - VULKAN_HPP_INLINE void CommandBuffer::waitEvents( - VULKAN_HPP_NAMESPACE::ArrayProxy const & events, - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, - VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdWaitEvents( static_cast( m_commandBuffer ), - events.size(), - reinterpret_cast( events.data() ), - static_cast( srcStageMask ), - static_cast( dstStageMask ), - memoryBarriers.size(), - reinterpret_cast( memoryBarriers.data() ), - bufferMemoryBarriers.size(), - reinterpret_cast( bufferMemoryBarriers.data() ), - imageMemoryBarriers.size(), - reinterpret_cast( imageMemoryBarriers.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + getDispatcher()->vkGetPhysicalDeviceProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + + return properties; } - VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, - VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryBarriers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferMemoryBarriers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdPipelineBarrier( static_cast( m_commandBuffer ), - static_cast( srcStageMask ), - static_cast( dstStageMask ), - static_cast( dependencyFlags ), - memoryBarriers.size(), - reinterpret_cast( memoryBarriers.data() ), - bufferMemoryBarriers.size(), - reinterpret_cast( bufferMemoryBarriers.data() ), - imageMemoryBarriers.size(), - reinterpret_cast( imageMemoryBarriers.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &properties ) ); + + return structureChain; } - VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t query, - VULKAN_HPP_NAMESPACE::QueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdBeginQuery( - static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return formatProperties; } - VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdEndQuery( static_cast( m_commandBuffer ), static_cast( queryPool ), query ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + + return structureChain; } - VULKAN_HPP_INLINE void - CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2 + PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const { - getDispatcher()->vkCmdResetQueryPool( static_cast( m_commandBuffer ), static_cast( queryPool ), firstQuery, queryCount ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + + return imageFormatProperties; } - VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, - VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t query ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const { - getDispatcher()->vkCmdWriteTimestamp( - static_cast( m_commandBuffer ), static_cast( pipelineStage ), static_cast( queryPool ), query ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR && + "Function requires or " ); + + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &imageFormatInfo ), + reinterpret_cast( &imageFormatProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + + return structureChain; } - VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - VULKAN_HPP_NAMESPACE::DeviceSize stride, - VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR() const { - getDispatcher()->vkCmdCopyQueryPoolResults( static_cast( m_commandBuffer ), - static_cast( queryPool ), - firstQuery, - queryCount, - static_cast( dstBuffer ), - static_cast( dstOffset ), - static_cast( stride ), - static_cast( flags ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function requires or " ); + + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + queueFamilyProperties.resize( queueFamilyPropertyCount ); + } + return queueFamilyProperties; } - template - VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, - uint32_t offset, - VULKAN_HPP_NAMESPACE::ArrayProxy const & values ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR() const { - getDispatcher()->vkCmdPushConstants( static_cast( m_commandBuffer ), - static_cast( layout ), - static_cast( stageFlags ), - offset, - values.size() * sizeof( ValuesType ), - reinterpret_cast( values.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR && + "Function requires or " ); + + std::vector structureChains; + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); + structureChains.resize( queueFamilyPropertyCount ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; + } + getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast( m_physicalDevice ), + &queueFamilyPropertyCount, + reinterpret_cast( queueFamilyProperties.data() ) ); + + VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); + if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) + { + structureChains.resize( queueFamilyPropertyCount ); + } + for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + { + structureChains[i].template get() = queueFamilyProperties[i]; + } + return structureChains; } - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, - VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 + PhysicalDevice::getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdBeginRenderPass( static_cast( m_commandBuffer ), - reinterpret_cast( &renderPassBegin ), - static_cast( contents ) ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkCmdNextSubpass( static_cast( m_commandBuffer ), static_cast( contents ) ); + return memoryProperties; } - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass() const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdEndRenderPass( static_cast( m_commandBuffer ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = + structureChain.template get(); + getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &memoryProperties ) ); + + return structureChain; } - VULKAN_HPP_INLINE void CommandBuffer::executeCommands( - VULKAN_HPP_NAMESPACE::ArrayProxy const & commandBuffers ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const { - getDispatcher()->vkCmdExecuteCommands( - static_cast( m_commandBuffer ), commandBuffers.size(), reinterpret_cast( commandBuffers.data() ) ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR && + "Function requires or " ); + + std::vector properties; + uint32_t propertyCount; + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &formatInfo ), + &propertyCount, + nullptr ); + properties.resize( propertyCount ); + getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &formatInfo ), + &propertyCount, + reinterpret_cast( properties.data() ) ); + + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; } - //=== VK_VERSION_1_1 === + //=== VK_KHR_device_group === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t Context::enumerateInstanceVersion() const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags + Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT { - uint32_t apiVersion; - VkResult result = getDispatcher()->vkEnumerateInstanceVersion( &apiVersion ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceVersion" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR && + "Function requires or " ); - return apiVersion; - } + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR( static_cast( m_device ), + heapIndex, + localDeviceIndex, + remoteDeviceIndex, + reinterpret_cast( &peerMemoryFeatures ) ); - VULKAN_HPP_INLINE void - Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const - { - VkResult result = getDispatcher()->vkBindBufferMemory2( - static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); + return peerMemoryFeatures; } - VULKAN_HPP_INLINE void Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT { - VkResult result = getDispatcher()->vkBindImageMemory2( - static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDeviceMaskKHR && "Function requires or " ); + + getDispatcher()->vkCmdSetDeviceMaskKHR( static_cast( m_commandBuffer ), deviceMask ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags - Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; - getDispatcher()->vkGetDeviceGroupPeerMemoryFeatures( static_cast( m_device ), - heapIndex, - localDeviceIndex, - remoteDeviceIndex, - reinterpret_cast( &peerMemoryFeatures ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchBaseKHR && "Function requires or " ); - return peerMemoryFeatures; + getDispatcher()->vkCmdDispatchBaseKHR( + static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - getDispatcher()->vkCmdSetDeviceMask( static_cast( m_commandBuffer ), deviceMask ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateViSurfaceNN( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createViSurfaceNN" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); } +# endif /*VK_USE_PLATFORM_VI_NN*/ - VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + //=== VK_KHR_maintenance1 === + + VULKAN_HPP_INLINE void CommandPool::trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdDispatchBase( - static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + VULKAN_HPP_ASSERT( getDispatcher()->vkTrimCommandPoolKHR && "Function requires or " ); + + getDispatcher()->vkTrimCommandPoolKHR( + static_cast( m_device ), static_cast( m_commandPool ), static_cast( flags ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Instance::enumeratePhysicalDeviceGroups() const + //=== VK_KHR_device_group_creation === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Instance::enumeratePhysicalDeviceGroupsKHR() const { + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR && + "Function requires or " ); + std::vector physicalDeviceGroupProperties; uint32_t physicalDeviceGroupCount; - VkResult result; + VULKAN_HPP_NAMESPACE::Result result; do { - result = getDispatcher()->vkEnumeratePhysicalDeviceGroups( static_cast( m_instance ), &physicalDeviceGroupCount, nullptr ); - if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount ) + result = static_cast( + getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR( static_cast( m_instance ), &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceGroupCount ) { physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = - getDispatcher()->vkEnumeratePhysicalDeviceGroups( static_cast( m_instance ), - &physicalDeviceGroupCount, - reinterpret_cast( physicalDeviceGroupProperties.data() ) ); + result = static_cast( getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR( + static_cast( m_instance ), + &physicalDeviceGroupCount, + reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) { @@ -11494,6008 +18169,6786 @@ namespace VULKAN_HPP_NAMESPACE return physicalDeviceGroupProperties; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 - Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + //=== VK_KHR_external_memory_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - getDispatcher()->vkGetImageMemoryRequirements2( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR && + "Function requires or " ); - return memoryRequirements; + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &externalBufferInfo ), + reinterpret_cast( &externalBufferProperties ) ); + + return externalBufferProperties; } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE + Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); - getDispatcher()->vkGetImageMemoryRequirements2( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandleKHR && "Function requires " ); - return structureChain; + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetMemoryWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); + + return handle; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 - Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR + Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle ) const { - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - getDispatcher()->vkGetBufferMemoryRequirements2( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR && + "Function requires " ); - return memoryRequirements; + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR( static_cast( m_device ), + static_cast( handleType ), + handle, + reinterpret_cast( &memoryWin32HandleProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); + + return memoryWin32HandleProperties; } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + //=== VK_KHR_external_memory_fd === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo ) const { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); - getDispatcher()->vkGetBufferMemoryRequirements2( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdKHR && "Function requires " ); - return structureChain; + int fd; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetMemoryFdKHR( static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); + + return fd; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR + Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd ) const { - std::vector sparseMemoryRequirements; - uint32_t sparseMemoryRequirementCount; - getDispatcher()->vkGetImageSparseMemoryRequirements2( - static_cast( m_device ), reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - getDispatcher()->vkGetImageSparseMemoryRequirements2( static_cast( m_device ), - reinterpret_cast( &info ), - &sparseMemoryRequirementCount, - reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdPropertiesKHR && "Function requires " ); - VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); - if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) - { - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - } - return sparseMemoryRequirements; + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetMemoryFdPropertiesKHR( static_cast( m_device ), + static_cast( handleType ), + fd, + reinterpret_cast( &memoryFdProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); + + return memoryFdProperties; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT + //=== VK_KHR_external_semaphore_capabilities === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; - getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast( m_physicalDevice ), - reinterpret_cast( &features ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &externalSemaphoreInfo ), + reinterpret_cast( &externalSemaphoreProperties ) ); + + return externalSemaphoreProperties; + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + VULKAN_HPP_INLINE void + Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreWin32HandleKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkImportSemaphoreWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE + Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreWin32HandleKHR && + "Function requires " ); + + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetSemaphoreWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); + + return handle; + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + VULKAN_HPP_INLINE void Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreFdKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkImportSemaphoreFdKHR( + static_cast( m_device ), reinterpret_cast( &importSemaphoreFdInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreFdKHR && "Function requires " ); + + int fd; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetSemaphoreFdKHR( static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); + + return fd; + } + + //=== VK_KHR_push_descriptor === + + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetKHR && + "Function requires or " ); - return features; + getDispatcher()->vkCmdPushDescriptorSetKHR( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get(); - getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast( m_physicalDevice ), - reinterpret_cast( &features ) ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR && + "Function requires or or " ); - return structureChain; + getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR( static_cast( m_commandBuffer ), + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + reinterpret_cast( &data ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT + //=== VK_EXT_conditional_rendering === + + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( + const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; - getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast( m_physicalDevice ), - reinterpret_cast( &properties ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginConditionalRenderingEXT && + "Function requires " ); - return properties; + getDispatcher()->vkCmdBeginConditionalRenderingEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &conditionalRenderingBegin ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT() const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get(); - getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast( m_physicalDevice ), - reinterpret_cast( &properties ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndConditionalRenderingEXT && + "Function requires " ); - return structureChain; + getDispatcher()->vkCmdEndConditionalRenderingEXT( static_cast( m_commandBuffer ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 - PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + //=== VK_KHR_descriptor_update_template === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; - getDispatcher()->vkGetPhysicalDeviceFormatProperties2( - static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDescriptorUpdateTemplateKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &descriptorUpdateTemplate ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createDescriptorUpdateTemplateKHR" ); +# endif + } - return formatProperties; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate( + *this, *reinterpret_cast( &descriptorUpdateTemplate ), allocator ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + Optional allocator ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get(); - getDispatcher()->vkGetPhysicalDeviceFormatProperties2( - static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR && + "Function requires or " ); - return structureChain; + getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR( + static_cast( m_device ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( static_cast( allocator ) ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2 - PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + template + VULKAN_HPP_INLINE void DescriptorSet::updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + DataType const & data ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; - VkResult result = - getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast( m_physicalDevice ), - reinterpret_cast( &imageFormatInfo ), - reinterpret_cast( &imageFormatProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR && + "Function requires or " ); - return imageFormatProperties; + getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR( static_cast( m_device ), + static_cast( m_descriptorSet ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( &data ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + //=== VK_NV_clip_space_w_scaling === + + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( + uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportWScalings ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get(); - VkResult result = - getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast( m_physicalDevice ), - reinterpret_cast( &imageFormatInfo ), - reinterpret_cast( &imageFormatProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingNV && "Function requires " ); - return structureChain; + getDispatcher()->vkCmdSetViewportWScalingNV( static_cast( m_commandBuffer ), + firstViewport, + viewportWScalings.size(), + reinterpret_cast( viewportWScalings.data() ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2() const +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + VULKAN_HPP_INLINE void PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const { - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast( m_physicalDevice ), - &queueFamilyPropertyCount, - reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireXlibDisplayEXT && "Function requires " ); - VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); - if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) - { - queueFamilyProperties.resize( queueFamilyPropertyCount ); - } - return queueFamilyProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAcquireXlibDisplayEXT( static_cast( m_physicalDevice ), &dpy, static_cast( display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2() const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { - std::vector structureChains; - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); - structureChains.resize( queueFamilyPropertyCount ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetRandROutputDisplayEXT( + static_cast( m_physicalDevice ), &dpy, rrOutput, reinterpret_cast( &display ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "PhysicalDevice::getRandROutputDisplayEXT" ); +# endif } - getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast( m_physicalDevice ), - &queueFamilyPropertyCount, - reinterpret_cast( queueFamilyProperties.data() ) ); - VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); - if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) - { - structureChains.resize( queueFamilyPropertyCount ); - } - for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) - { - structureChains[i].template get() = queueFamilyProperties[i]; - } - return structureChains; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, *reinterpret_cast( &display ) ); } +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 - PhysicalDevice::getMemoryProperties2() const VULKAN_HPP_NOEXCEPT + //=== VK_EXT_display_surface_counter === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT + PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const { - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; - getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( static_cast( m_physicalDevice ), - reinterpret_cast( &memoryProperties ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT && + "Function requires " ); - return memoryProperties; + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast( m_physicalDevice ), + static_cast( surface ), + reinterpret_cast( &surfaceCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); + + return surfaceCapabilities; } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain PhysicalDevice::getMemoryProperties2() const VULKAN_HPP_NOEXCEPT + //=== VK_EXT_display_control === + + VULKAN_HPP_INLINE void Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, + const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = - structureChain.template get(); - getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( static_cast( m_physicalDevice ), - reinterpret_cast( &memoryProperties ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkDisplayPowerControlEXT && "Function requires " ); - return structureChain; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkDisplayPowerControlEXT( + static_cast( m_device ), static_cast( display ), reinterpret_cast( &displayPowerInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - std::vector properties; - uint32_t propertyCount; - getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast( m_physicalDevice ), - reinterpret_cast( &formatInfo ), - &propertyCount, - nullptr ); - properties.resize( propertyCount ); - getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast( m_physicalDevice ), - reinterpret_cast( &formatInfo ), - &propertyCount, - reinterpret_cast( properties.data() ) ); - - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkRegisterDeviceEventEXT( + static_cast( m_device ), + reinterpret_cast( &deviceEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - properties.resize( propertyCount ); +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::registerEventEXT" ); +# endif } - return properties; - } - VULKAN_HPP_INLINE void CommandPool::trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkTrimCommandPool( - static_cast( m_device ), static_cast( m_commandPool ), static_cast( flags ) ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence( *this, *reinterpret_cast( &fence ), allocator ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Queue Device::getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, + VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::Queue( *this, queueInfo ); - } + VULKAN_HPP_NAMESPACE::Fence fence; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkRegisterDisplayEventEXT( + static_cast( m_device ), + static_cast( *display ), + reinterpret_cast( &displayEventInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &fence ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::registerDisplayEventEXT" ); +# endif + } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion - Device::createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion( *this, createInfo, allocator ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence( *this, *reinterpret_cast( &fence ), allocator ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate - Device::createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t SwapchainKHR::getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const { - return VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate( *this, createInfo, allocator ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainCounterEXT && "Function requires " ); - template - VULKAN_HPP_INLINE void DescriptorSet::updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - DataType const & data ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkUpdateDescriptorSetWithTemplate( static_cast( m_device ), - static_cast( m_descriptorSet ), - static_cast( descriptorUpdateTemplate ), - reinterpret_cast( &data ) ); + uint64_t counterValue; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetSwapchainCounterEXT( + static_cast( m_device ), static_cast( m_swapchain ), static_cast( counter ), &counterValue ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getCounterEXT" ); + + return counterValue; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties - PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT + //=== VK_GOOGLE_display_timing === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE SwapchainKHR::getRefreshCycleDurationGOOGLE() const { - VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; - getDispatcher()->vkGetPhysicalDeviceExternalBufferProperties( static_cast( m_physicalDevice ), - reinterpret_cast( &externalBufferInfo ), - reinterpret_cast( &externalBufferProperties ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRefreshCycleDurationGOOGLE && "Function requires " ); - return externalBufferProperties; + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetRefreshCycleDurationGOOGLE( static_cast( m_device ), + static_cast( m_swapchain ), + reinterpret_cast( &displayTimingProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getRefreshCycleDurationGOOGLE" ); + + return displayTimingProperties; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties - PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector SwapchainKHR::getPastPresentationTimingGOOGLE() const { - VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; - getDispatcher()->vkGetPhysicalDeviceExternalFenceProperties( static_cast( m_physicalDevice ), - reinterpret_cast( &externalFenceInfo ), - reinterpret_cast( &externalFenceProperties ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPastPresentationTimingGOOGLE && + "Function requires " ); - return externalFenceProperties; + std::vector presentationTimings; + uint32_t presentationTimingCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPastPresentationTimingGOOGLE( + static_cast( m_device ), static_cast( m_swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( + getDispatcher()->vkGetPastPresentationTimingGOOGLE( static_cast( m_device ), + static_cast( m_swapchain ), + &presentationTimingCount, + reinterpret_cast( presentationTimings.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getPastPresentationTimingGOOGLE" ); + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + if ( presentationTimingCount < presentationTimings.size() ) + { + presentationTimings.resize( presentationTimingCount ); + } + return presentationTimings; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( - const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT + //=== VK_EXT_discard_rectangles === + + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( + uint32_t firstDiscardRectangle, VULKAN_HPP_NAMESPACE::ArrayProxy const & discardRectangles ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; - getDispatcher()->vkGetPhysicalDeviceExternalSemaphoreProperties( - static_cast( m_physicalDevice ), - reinterpret_cast( &externalSemaphoreInfo ), - reinterpret_cast( &externalSemaphoreProperties ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleEXT && "Function requires " ); - return externalSemaphoreProperties; + getDispatcher()->vkCmdSetDiscardRectangleEXT( static_cast( m_commandBuffer ), + firstDiscardRectangle, + discardRectangles.size(), + reinterpret_cast( discardRectangles.data() ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport - Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; - getDispatcher()->vkGetDescriptorSetLayoutSupport( static_cast( m_device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( &support ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleEnableEXT && + "Function requires " ); - return support; + getDispatcher()->vkCmdSetDiscardRectangleEnableEXT( static_cast( m_commandBuffer ), static_cast( discardRectangleEnable ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get(); - getDispatcher()->vkGetDescriptorSetLayoutSupport( static_cast( m_device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( &support ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleModeEXT && + "Function requires " ); - return structureChain; + getDispatcher()->vkCmdSetDiscardRectangleModeEXT( static_cast( m_commandBuffer ), + static_cast( discardRectangleMode ) ); } - //=== VK_VERSION_1_2 === + //=== VK_EXT_hdr_metadata === - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & swapchains, + VULKAN_HPP_NAMESPACE::ArrayProxy const & metadata ) const { - getDispatcher()->vkCmdDrawIndirectCount( static_cast( m_commandBuffer ), - static_cast( buffer ), - static_cast( offset ), - static_cast( countBuffer ), - static_cast( countBufferOffset ), - maxDrawCount, - stride ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkSetHdrMetadataEXT && "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); +# else + if ( swapchains.size() != metadata.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkCmdDrawIndexedIndirectCount( static_cast( m_commandBuffer ), - static_cast( buffer ), - static_cast( offset ), - static_cast( countBuffer ), - static_cast( countBufferOffset ), - maxDrawCount, - stride ); + getDispatcher()->vkSetHdrMetadataEXT( static_cast( m_device ), + swapchains.size(), + reinterpret_cast( swapchains.data() ), + reinterpret_cast( metadata.data() ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass - Device::createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator ); - } + //=== VK_KHR_create_renderpass2 === - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, - const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - getDispatcher()->vkCmdBeginRenderPass2( static_cast( m_commandBuffer ), - reinterpret_cast( &renderPassBegin ), - reinterpret_cast( &subpassBeginInfo ) ); - } + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateRenderPass2KHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &renderPass ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRenderPass2KHR" ); +# endif + } - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, - const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkCmdNextSubpass2( static_cast( m_commandBuffer ), - reinterpret_cast( &subpassBeginInfo ), - reinterpret_cast( &subpassEndInfo ) ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, *reinterpret_cast( &renderPass ), allocator ); } - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, + const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdEndRenderPass2( static_cast( m_commandBuffer ), reinterpret_cast( &subpassEndInfo ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass2KHR && + "Function requires or " ); + + getDispatcher()->vkCmdBeginRenderPass2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &renderPassBegin ), + reinterpret_cast( &subpassBeginInfo ) ); } - VULKAN_HPP_INLINE void QueryPool::reset( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, + const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkResetQueryPool( static_cast( m_device ), static_cast( m_queryPool ), firstQuery, queryCount ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass2KHR && "Function requires or " ); + + getDispatcher()->vkCmdNextSubpass2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &subpassBeginInfo ), + reinterpret_cast( &subpassEndInfo ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValue() const + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT { - uint64_t value; - VkResult result = getDispatcher()->vkGetSemaphoreCounterValue( static_cast( m_device ), static_cast( m_semaphore ), &value ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValue" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass2KHR && + "Function requires or " ); - return value; + getDispatcher()->vkCmdEndRenderPass2KHR( static_cast( m_commandBuffer ), reinterpret_cast( &subpassEndInfo ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, - uint64_t timeout ) const + //=== VK_KHR_shared_presentable_image === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::getStatus() const { - VkResult result = - getDispatcher()->vkWaitSemaphores( static_cast( m_device ), reinterpret_cast( &waitInfo ), timeout ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainStatusKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetSwapchainStatusKHR( static_cast( m_device ), static_cast( m_swapchain ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getStatus", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); return static_cast( result ); } - VULKAN_HPP_INLINE void Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const - { - VkResult result = getDispatcher()->vkSignalSemaphore( static_cast( m_device ), reinterpret_cast( &signalInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); - } + //=== VK_KHR_external_fence_capabilities === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress - Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties + PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT { - VkDeviceAddress result = - getDispatcher()->vkGetBufferDeviceAddress( static_cast( m_device ), reinterpret_cast( &info ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR && + "Function requires or " ); - return static_cast( result ); + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &externalFenceInfo ), + reinterpret_cast( &externalFenceProperties ) ); + + return externalFenceProperties; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t - Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + VULKAN_HPP_INLINE void Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const { - uint64_t result = - getDispatcher()->vkGetBufferOpaqueCaptureAddress( static_cast( m_device ), reinterpret_cast( &info ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceWin32HandleKHR && "Function requires " ); - return result; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkImportFenceWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( &importFenceWin32HandleInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t - Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE + Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const { - uint64_t result = getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddress( static_cast( m_device ), - reinterpret_cast( &info ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceWin32HandleKHR && "Function requires " ); - return result; + HANDLE handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetFenceWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); + + return handle; } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_VERSION_1_3 === + //=== VK_KHR_external_fence_fd === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getToolProperties() const + VULKAN_HPP_INLINE void Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo ) const { - std::vector toolProperties; - uint32_t toolCount; - VkResult result; - do - { - result = getDispatcher()->vkGetPhysicalDeviceToolProperties( static_cast( m_physicalDevice ), &toolCount, nullptr ); - if ( ( result == VK_SUCCESS ) && toolCount ) - { - toolProperties.resize( toolCount ); - result = getDispatcher()->vkGetPhysicalDeviceToolProperties( - static_cast( m_physicalDevice ), &toolCount, reinterpret_cast( toolProperties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); - VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); - if ( toolCount < toolProperties.size() ) - { - toolProperties.resize( toolCount ); - } - return toolProperties; - } + VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceFdKHR && "Function requires " ); - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot - Device::createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, createInfo, allocator ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkImportFenceFdKHR( static_cast( m_device ), reinterpret_cast( &importFenceFdInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); } - VULKAN_HPP_INLINE void Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, - uint64_t objectHandle, - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - uint64_t data ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo ) const { - VkResult result = getDispatcher()->vkSetPrivateData( - static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceFdKHR && "Function requires " ); - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, - uint64_t objectHandle, - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT - { - uint64_t data; - getDispatcher()->vkGetPrivateData( - static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), &data ); + int fd; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetFenceFdKHR( static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); - return data; + return fd; } - VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, - const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkCmdSetEvent2( - static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( &dependencyInfo ) ); - } + //=== VK_KHR_performance_query === - VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event, - VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD + VULKAN_HPP_INLINE std::pair, std::vector> + PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const { - getDispatcher()->vkCmdResetEvent2( - static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && + "Function requires " ); - VULKAN_HPP_INLINE void - CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, - VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos ) const - { - if ( events.size() != dependencyInfos.size() ) + std::pair, std::vector> data_; + std::vector & counters = data_.first; + std::vector & counterDescriptions = data_.second; + uint32_t counterCount; + VULKAN_HPP_NAMESPACE::Result result; + do { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" ); + result = static_cast( getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + static_cast( m_physicalDevice ), queueFamilyIndex, &counterCount, nullptr, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && counterCount ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); + result = static_cast( getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + static_cast( m_physicalDevice ), + queueFamilyIndex, + &counterCount, + reinterpret_cast( counters.data() ), + reinterpret_cast( counterDescriptions.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + VULKAN_HPP_ASSERT( counterCount <= counters.size() ); + if ( counterCount < counters.size() ) + { + counters.resize( counterCount ); + counterDescriptions.resize( counterCount ); } - - getDispatcher()->vkCmdWaitEvents2( static_cast( m_commandBuffer ), - events.size(), - reinterpret_cast( events.data() ), - reinterpret_cast( dependencyInfos.data() ) ); + return data_; } - VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( + const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdPipelineBarrier2( static_cast( m_commandBuffer ), reinterpret_cast( &dependencyInfo ) ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR && + "Function requires " ); - VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkCmdWriteTimestamp2( - static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); - } + uint32_t numPasses; + getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &performanceQueryCreateInfo ), + &numPasses ); - VULKAN_HPP_INLINE void Queue::submit2( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, - VULKAN_HPP_NAMESPACE::Fence fence ) const - { - VkResult result = getDispatcher()->vkQueueSubmit2( - static_cast( m_queue ), submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); + return numPasses; } - VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info ) const { - getDispatcher()->vkCmdCopyBuffer2( static_cast( m_commandBuffer ), reinterpret_cast( ©BufferInfo ) ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireProfilingLockKHR && "Function requires " ); - VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkCmdCopyImage2( static_cast( m_commandBuffer ), reinterpret_cast( ©ImageInfo ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAcquireProfilingLockKHR( static_cast( m_device ), reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); } - VULKAN_HPP_INLINE void - CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR() const VULKAN_HPP_NOEXCEPT { - getDispatcher()->vkCmdCopyBufferToImage2( static_cast( m_commandBuffer ), - reinterpret_cast( ©BufferToImageInfo ) ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseProfilingLockKHR && "Function requires " ); - VULKAN_HPP_INLINE void - CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkCmdCopyImageToBuffer2( static_cast( m_commandBuffer ), - reinterpret_cast( ©ImageToBufferInfo ) ); + getDispatcher()->vkReleaseProfilingLockKHR( static_cast( m_device ) ); } - VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkCmdBlitImage2( static_cast( m_commandBuffer ), reinterpret_cast( &blitImageInfo ) ); - } + //=== VK_KHR_get_surface_capabilities2 === - VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR + PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const { - getDispatcher()->vkCmdResolveImage2( static_cast( m_commandBuffer ), - reinterpret_cast( &resolveImageInfo ) ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR && + "Function requires " ); - VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkCmdBeginRendering( static_cast( m_commandBuffer ), reinterpret_cast( &renderingInfo ) ); - } + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); - VULKAN_HPP_INLINE void CommandBuffer::endRendering() const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkCmdEndRendering( static_cast( m_commandBuffer ) ); + return surfaceCapabilities; } - VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const { - getDispatcher()->vkCmdSetCullMode( static_cast( m_commandBuffer ), static_cast( cullMode ) ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR && + "Function requires " ); - VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkCmdSetFrontFace( static_cast( m_commandBuffer ), static_cast( frontFace ) ); - } + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &surfaceCapabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); - VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkCmdSetPrimitiveTopology( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); + return structureChain; } - VULKAN_HPP_INLINE void - CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const { - getDispatcher()->vkCmdSetViewportWithCount( - static_cast( m_commandBuffer ), viewports.size(), reinterpret_cast( viewports.data() ) ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function requires " ); - VULKAN_HPP_INLINE void - CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkCmdSetScissorWithCount( - static_cast( m_commandBuffer ), scissors.size(), reinterpret_cast( scissors.data() ) ); + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) + { + surfaceFormats.resize( surfaceFormatCount ); + } + return surfaceFormats; } - VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, - VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, - VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, - VULKAN_HPP_NAMESPACE::ArrayProxy const & strides ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const { - if ( buffers.size() != offsets.size() ) + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR && + "Function requires " ); + + std::vector structureChains; + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + VULKAN_HPP_NAMESPACE::Result result; + do { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" ); - } - if ( !sizes.empty() && buffers.size() != sizes.size() ) + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && surfaceFormatCount ) + { + structureChains.resize( surfaceFormatCount ); + surfaceFormats.resize( surfaceFormatCount ); + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + { + surfaceFormats[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &surfaceFormatCount, + reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + if ( surfaceFormatCount < surfaceFormats.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" ); + structureChains.resize( surfaceFormatCount ); } - if ( !strides.empty() && buffers.size() != strides.size() ) + for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" ); + structureChains[i].template get() = surfaceFormats[i]; } - - getDispatcher()->vkCmdBindVertexBuffers2( static_cast( m_commandBuffer ), - firstBinding, - buffers.size(), - reinterpret_cast( buffers.data() ), - reinterpret_cast( offsets.data() ), - reinterpret_cast( sizes.data() ), - reinterpret_cast( strides.data() ) ); + return structureChains; } - VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkCmdSetDepthTestEnable( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); - } + //=== VK_KHR_get_display_properties2 === - VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getDisplayProperties2KHR() const { - getDispatcher()->vkCmdSetDepthWriteEnable( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR && + "Function requires " ); - VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT - { - getDispatcher()->vkCmdSetDepthCompareOp( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; } - VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getDisplayPlaneProperties2KHR() const { - getDispatcher()->vkCmdSetDepthBoundsTestEnable( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; } - VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector DisplayKHR::getModeProperties2() const { - getDispatcher()->vkCmdSetStencilTestEnable( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModeProperties2KHR && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetDisplayModeProperties2KHR( + static_cast( m_physicalDevice ), static_cast( m_display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( + getDispatcher()->vkGetDisplayModeProperties2KHR( static_cast( m_physicalDevice ), + static_cast( m_display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties2" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; } - VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, - VULKAN_HPP_NAMESPACE::StencilOp failOp, - VULKAN_HPP_NAMESPACE::StencilOp passOp, - VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, - VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector DisplayKHR::getModeProperties2() const { - getDispatcher()->vkCmdSetStencilOp( static_cast( m_commandBuffer ), - static_cast( faceMask ), - static_cast( failOp ), - static_cast( passOp ), - static_cast( depthFailOp ), - static_cast( compareOp ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModeProperties2KHR && + "Function requires " ); + + std::vector structureChains; + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetDisplayModeProperties2KHR( + static_cast( m_physicalDevice ), static_cast( m_display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + structureChains.resize( propertyCount ); + properties.resize( propertyCount ); + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + properties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + getDispatcher()->vkGetDisplayModeProperties2KHR( static_cast( m_physicalDevice ), + static_cast( m_display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties2" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + structureChains.resize( propertyCount ); + } + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + structureChains[i].template get() = properties[i]; + } + return structureChains; } - VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR + PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const { - getDispatcher()->vkCmdSetRasterizerDiscardEnable( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayPlaneCapabilities2KHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetDisplayPlaneCapabilities2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( &displayPlaneInfo ), + reinterpret_cast( &capabilities ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + + return capabilities; } - VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - getDispatcher()->vkCmdSetDepthBiasEnable( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateIOSSurfaceMVK( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createIOSSurfaceMVK" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); } +# endif /*VK_USE_PLATFORM_IOS_MVK*/ - VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - getDispatcher()->vkCmdSetPrimitiveRestartEnable( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateMacOSSurfaceMVK( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createMacOSSurfaceMVK" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); } +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 - Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo ) const { - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - getDispatcher()->vkGetDeviceBufferMemoryRequirements( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectNameEXT && "Function requires " ); - return memoryRequirements; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkSetDebugUtilsObjectNameEXT( + static_cast( m_device ), reinterpret_cast( &nameInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo ) const { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); - getDispatcher()->vkGetDeviceBufferMemoryRequirements( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectTagEXT && "Function requires " ); - return structureChain; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkSetDebugUtilsObjectTagEXT( static_cast( m_device ), reinterpret_cast( &tagInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 - Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - getDispatcher()->vkGetDeviceImageMemoryRequirements( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueBeginDebugUtilsLabelEXT && "Function requires " ); - return memoryRequirements; + getDispatcher()->vkQueueBeginDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( &labelInfo ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT { - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); - getDispatcher()->vkGetDeviceImageMemoryRequirements( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueEndDebugUtilsLabelEXT && "Function requires " ); - return structureChain; + getDispatcher()->vkQueueEndDebugUtilsLabelEXT( static_cast( m_queue ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT { - std::vector sparseMemoryRequirements; - uint32_t sparseMemoryRequirementCount; - getDispatcher()->vkGetDeviceImageSparseMemoryRequirements( - static_cast( m_device ), reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - getDispatcher()->vkGetDeviceImageSparseMemoryRequirements( static_cast( m_device ), - reinterpret_cast( &info ), - &sparseMemoryRequirementCount, - reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueInsertDebugUtilsLabelEXT && "Function requires " ); - VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); - if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) - { - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - } - return sparseMemoryRequirements; + getDispatcher()->vkQueueInsertDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( &labelInfo ) ); } - //=== VK_KHR_surface === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, - VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginDebugUtilsLabelEXT && "Function requires " ); - VULKAN_HPP_NAMESPACE::Bool32 supported; - VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR( - static_cast( m_physicalDevice ), queueFamilyIndex, static_cast( surface ), reinterpret_cast( &supported ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); - - return supported; + getDispatcher()->vkCmdBeginDebugUtilsLabelEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &labelInfo ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR - PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR && - "Function needs extension enabled!" ); - - VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; - VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast( m_physicalDevice ), - static_cast( surface ), - reinterpret_cast( &surfaceCapabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndDebugUtilsLabelEXT && "Function requires " ); - return surfaceCapabilities; + getDispatcher()->vkCmdEndDebugUtilsLabelEXT( static_cast( m_commandBuffer ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInsertDebugUtilsLabelEXT && "Function requires " ); - std::vector surfaceFormats; - uint32_t surfaceFormatCount; - VkResult result; - do - { - result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR( - static_cast( m_physicalDevice ), static_cast( surface ), &surfaceFormatCount, nullptr ); - if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) - { - surfaceFormats.resize( surfaceFormatCount ); - result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast( m_physicalDevice ), - static_cast( surface ), - &surfaceFormatCount, - reinterpret_cast( surfaceFormats.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); - VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); - if ( surfaceFormatCount < surfaceFormats.size() ) - { - surfaceFormats.resize( surfaceFormatCount ); + getDispatcher()->vkCmdInsertDebugUtilsLabelEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &labelInfo ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDebugUtilsMessengerEXT( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &messenger ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createDebugUtilsMessengerEXT" ); +# endif } - return surfaceFormats; + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT( + *this, *reinterpret_cast( &messenger ), allocator ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + VULKAN_HPP_INLINE void + Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkSubmitDebugUtilsMessageEXT && "Function requires " ); - std::vector presentModes; - uint32_t presentModeCount; - VkResult result; - do - { - result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR( - static_cast( m_physicalDevice ), static_cast( surface ), &presentModeCount, nullptr ); - if ( ( result == VK_SUCCESS ) && presentModeCount ) - { - presentModes.resize( presentModeCount ); - result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast( m_physicalDevice ), - static_cast( surface ), - &presentModeCount, - reinterpret_cast( presentModes.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); - VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); - if ( presentModeCount < presentModes.size() ) - { - presentModes.resize( presentModeCount ); - } - return presentModes; + getDispatcher()->vkSubmitDebugUtilsMessageEXT( static_cast( m_instance ), + static_cast( messageSeverity ), + static_cast( messageTypes ), + reinterpret_cast( &callbackData ) ); } - //=== VK_KHR_swapchain === +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR - Device::createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const { - return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, createInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID( + static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + + return properties; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector SwapchainKHR::getImages() const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainImagesKHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID && + "Function requires " ); - std::vector swapchainImages; - uint32_t swapchainImageCount; - VkResult result; - do - { - result = getDispatcher()->vkGetSwapchainImagesKHR( - static_cast( m_device ), static_cast( m_swapchain ), &swapchainImageCount, nullptr ); - if ( ( result == VK_SUCCESS ) && swapchainImageCount ) - { - swapchainImages.resize( swapchainImageCount ); - result = getDispatcher()->vkGetSwapchainImagesKHR( - static_cast( m_device ), static_cast( m_swapchain ), &swapchainImageCount, swapchainImages.data() ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getImages" ); - VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); - if ( swapchainImageCount < swapchainImages.size() ) - { - swapchainImages.resize( swapchainImageCount ); - } - return swapchainImages; + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = + structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID( + static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + + return structureChain; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair - SwapchainKHR::acquireNextImage( uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE struct AHardwareBuffer * + Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImageKHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID && + "Function requires " ); - uint32_t imageIndex; - VkResult result = getDispatcher()->vkAcquireNextImageKHR( static_cast( m_device ), - static_cast( m_swapchain ), - timeout, - static_cast( semaphore ), - static_cast( fence ), - &imageIndex ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireNextImage", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eTimeout, - VULKAN_HPP_NAMESPACE::Result::eNotReady, - VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + struct AHardwareBuffer * buffer; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID( + static_cast( m_device ), reinterpret_cast( &info ), &buffer ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); - return std::make_pair( static_cast( result ), imageIndex ); + return buffer; } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + Device::createExecutionGraphPipelinesAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkQueuePresentKHR && "Function needs extension enabled!" ); + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateExecutionGraphPipelinesAMDX( + static_cast( m_device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createExecutionGraphPipelinesAMDX" ); +# endif + } + + std::vector pipelinesRAII; + pipelinesRAII.reserve( pipelines.size() ); + for ( auto & pipeline : pipelines ) + { + pipelinesRAII.emplace_back( *this, *reinterpret_cast( &pipeline ), allocator, result ); + } + return pipelinesRAII; + } - VkResult result = getDispatcher()->vkQueuePresentKHR( static_cast( m_queue ), reinterpret_cast( &presentInfo ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createExecutionGraphPipelineAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateExecutionGraphPipelinesAMDX( + static_cast( m_device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createExecutionGraphPipelineAMDX" ); +# endif + } - return static_cast( result ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator, result ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR Device::getGroupPresentCapabilitiesKHR() const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX Pipeline::getExecutionGraphScratchSizeAMDX() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetExecutionGraphPipelineScratchSizeAMDX && + "Function requires " ); - VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; - VkResult result = getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR( - static_cast( m_device ), reinterpret_cast( &deviceGroupPresentCapabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetExecutionGraphPipelineScratchSizeAMDX( + static_cast( m_device ), static_cast( m_pipeline ), reinterpret_cast( &sizeInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphScratchSizeAMDX" ); - return deviceGroupPresentCapabilities; + return sizeInfo; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR - Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t + Pipeline::getExecutionGraphNodeIndexAMDX( const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetExecutionGraphPipelineNodeIndexAMDX && + "Function requires " ); - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; - VkResult result = getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR( - static_cast( m_device ), static_cast( surface ), reinterpret_cast( &modes ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); + uint32_t nodeIndex; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetExecutionGraphPipelineNodeIndexAMDX( static_cast( m_device ), + static_cast( m_pipeline ), + reinterpret_cast( &nodeInfo ), + &nodeIndex ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphNodeIndexAMDX" ); - return modes; + return nodeIndex; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX && + "Function requires " ); - std::vector rects; - uint32_t rectCount; - VkResult result; - do - { - result = getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR( - static_cast( m_physicalDevice ), static_cast( surface ), &rectCount, nullptr ); - if ( ( result == VK_SUCCESS ) && rectCount ) - { - rects.resize( rectCount ); - result = getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR( - static_cast( m_physicalDevice ), static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); - VULKAN_HPP_ASSERT( rectCount <= rects.size() ); - if ( rectCount < rects.size() ) - { - rects.resize( rectCount ); - } - return rects; + getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX( static_cast( m_commandBuffer ), + static_cast( executionGraph ), + static_cast( scratch ), + static_cast( scratchSize ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair - Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImage2KHR && "Function needs extension enabled!" ); - - uint32_t imageIndex; - VkResult result = getDispatcher()->vkAcquireNextImage2KHR( - static_cast( m_device ), reinterpret_cast( &acquireInfo ), &imageIndex ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eTimeout, - VULKAN_HPP_NAMESPACE::Result::eNotReady, - VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphAMDX && "Function requires " ); - return std::make_pair( static_cast( result ), imageIndex ); + getDispatcher()->vkCmdDispatchGraphAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + static_cast( scratchSize ), + reinterpret_cast( &countInfo ) ); } - //=== VK_KHR_display === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getDisplayPropertiesKHR() const + VULKAN_HPP_INLINE void + CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectAMDX && "Function requires " ); - std::vector properties; - uint32_t propertyCount; - VkResult result; - do - { - result = getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) - { - properties.resize( propertyCount ); - result = getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR( - static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return properties; + getDispatcher()->vkCmdDispatchGraphIndirectAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + static_cast( scratchSize ), + reinterpret_cast( &countInfo ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getDisplayPlanePropertiesKHR() const + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, + VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX && + "Function requires " ); - std::vector properties; - uint32_t propertyCount; - VkResult result; - do - { - result = getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) - { - properties.resize( propertyCount ); - result = getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR( - static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return properties; + getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + static_cast( scratchSize ), + static_cast( countInfo ) ); } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const + VULKAN_HPP_INLINE void + CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::DisplayKHRs( *this, planeIndex ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEXT && "Function requires " ); + + getDispatcher()->vkCmdSetSampleLocationsEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &sampleLocationsInfo ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector DisplayKHR::getModeProperties() const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT + PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModePropertiesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT && + "Function requires " ); - std::vector properties; - uint32_t propertyCount; - VkResult result; - do - { - result = getDispatcher()->vkGetDisplayModePropertiesKHR( - static_cast( m_physicalDevice ), static_cast( m_display ), &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) - { - properties.resize( propertyCount ); - result = getDispatcher()->vkGetDisplayModePropertiesKHR( static_cast( m_physicalDevice ), - static_cast( m_display ), - &propertyCount, - reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return properties; + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; + getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT( static_cast( m_physicalDevice ), + static_cast( samples ), + reinterpret_cast( &multisampleProperties ) ); + + return multisampleProperties; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR - DisplayKHR::createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + //=== VK_KHR_get_memory_requirements2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR( *this, createInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2KHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetImageMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR - DisplayModeKHR::getDisplayPlaneCapabilities( uint32_t planeIndex ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2KHR && + "Function requires or " ); - VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; - VkResult result = getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR( static_cast( m_physicalDevice ), - static_cast( m_displayModeKHR ), - planeIndex, - reinterpret_cast( &capabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayModeKHR::getDisplayPlaneCapabilities" ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetImageMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); - return capabilities; + return structureChain; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - Instance::createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2KHR && + "Function requires or " ); - //=== VK_KHR_display_swapchain === + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetBufferMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHRs( *this, createInfos, allocator ); + return memoryRequirements; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR - Device::createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, createInfo, allocator ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2KHR && + "Function requires or " ); -# if defined( VK_USE_PLATFORM_XLIB_KHR ) - //=== VK_KHR_xlib_surface === + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetBufferMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - Instance::createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + return structureChain; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 - PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements2KHR && + "Function requires or " ); - VkBool32 result = - getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, &dpy, visualID ); + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetImageSparseMemoryRequirements2KHR( + static_cast( m_device ), reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + getDispatcher()->vkGetImageSparseMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); - return static_cast( result ); + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + { + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + } + return sparseMemoryRequirements; } -# endif /*VK_USE_PLATFORM_XLIB_KHR*/ -# if defined( VK_USE_PLATFORM_XCB_KHR ) - //=== VK_KHR_xcb_surface === + //=== VK_KHR_acceleration_structure === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - Instance::createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateAccelerationStructureKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createAccelerationStructureKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR( + *this, *reinterpret_cast( &accelerationStructure ), allocator ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( - uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR && - "Function needs extension enabled!" ); - - VkBool32 result = getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR( - static_cast( m_physicalDevice ), queueFamilyIndex, &connection, visual_id ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructuresKHR && + "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - return static_cast( result ); + getDispatcher()->vkCmdBuildAccelerationStructuresKHR( + static_cast( m_commandBuffer ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ); } -# endif /*VK_USE_PLATFORM_XCB_KHR*/ - -# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) - //=== VK_KHR_wayland_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - Instance::createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectDeviceAddresses, + VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectStrides, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pMaxPrimitiveCounts ) const { - return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR && + "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() ); + VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() ); + VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() ); +# else + if ( infos.size() != indirectDeviceAddresses.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); + } + if ( infos.size() != indirectStrides.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); + } + if ( infos.size() != pMaxPrimitiveCounts.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR( static_cast( m_commandBuffer ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( indirectDeviceAddresses.data() ), + indirectStrides.data(), + pMaxPrimitiveCounts.data() ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 - PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR( + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, + VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkBuildAccelerationStructuresKHR && + "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() ); +# else + if ( infos.size() != pBuildRangeInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - VkBool32 result = - getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, &display ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBuildAccelerationStructuresKHR( + static_cast( m_device ), + static_cast( deferredOperation ), + infos.size(), + reinterpret_cast( infos.data() ), + reinterpret_cast( pBuildRangeInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); - return static_cast( result ); + return static_cast( result ); } -# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -# if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_KHR_android_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - Instance::createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const { - return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); - } -# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_win32_surface === + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyAccelerationStructureKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkCopyAccelerationStructureKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - Instance::createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + return static_cast( result ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 - PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyAccelerationStructureToMemoryKHR && + "Function requires " ); - VkBool32 result = getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkCopyAccelerationStructureToMemoryKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); - return static_cast( result ); + return static_cast( result ); } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_EXT_debug_report === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT - Instance::createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const { - return VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT( *this, createInfo, allocator ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToAccelerationStructureKHR && + "Function requires " ); - VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, - uint64_t object, - size_t location, - int32_t messageCode, - const std::string & layerPrefix, - const std::string & message ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( getDispatcher()->vkDebugReportMessageEXT && "Function needs extension enabled!" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkCopyMemoryToAccelerationStructureKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); - getDispatcher()->vkDebugReportMessageEXT( static_cast( m_instance ), - static_cast( flags ), - static_cast( objectType_ ), - object, - location, - messageCode, - layerPrefix.c_str(), - message.c_str() ); + return static_cast( result ); } - //=== VK_EXT_debug_marker === - - VULKAN_HPP_INLINE void Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectTagEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR && + "Function requires " ); + + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( static_cast( m_device ), + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + stride ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); - VkResult result = - getDispatcher()->vkDebugMarkerSetObjectTagEXT( static_cast( m_device ), reinterpret_cast( &tagInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); + return data; } - VULKAN_HPP_INLINE void Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Device::writeAccelerationStructuresPropertyKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectNameEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR && + "Function requires " ); - VkResult result = getDispatcher()->vkDebugMarkerSetObjectNameEXT( static_cast( m_device ), - reinterpret_cast( &nameInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( static_cast( m_device ), + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + sizeof( DataType ), + reinterpret_cast( &data ), + stride ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); + + return data; } - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerBeginEXT && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureKHR && + "Function requires " ); - getDispatcher()->vkCmdDebugMarkerBeginEXT( static_cast( m_commandBuffer ), - reinterpret_cast( &markerInfo ) ); + getDispatcher()->vkCmdCopyAccelerationStructureKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); } - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( + const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerEndEXT && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR && + "Function requires " ); - getDispatcher()->vkCmdDebugMarkerEndEXT( static_cast( m_commandBuffer ) ); + getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); } - VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( + const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerInsertEXT && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR && + "Function requires " ); - getDispatcher()->vkCmdDebugMarkerInsertEXT( static_cast( m_commandBuffer ), - reinterpret_cast( &markerInfo ) ); + getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); } -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_queue === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR - PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR && + "Function requires " ); - VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; - VkResult result = getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast( m_physicalDevice ), - reinterpret_cast( &videoProfile ), - reinterpret_cast( &capabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + VkDeviceAddress result = getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR( + static_cast( m_device ), reinterpret_cast( &info ) ); - return capabilities; + return static_cast( result ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR && - "Function needs extension enabled!" ); - - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get(); - VkResult result = getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast( m_physicalDevice ), - reinterpret_cast( &videoProfile ), - reinterpret_cast( &capabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR && + "Function requires " ); - return structureChain; + getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR( static_cast( m_commandBuffer ), + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR Device::getAccelerationStructureCompatibilityKHR( + const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR && + "Function requires " ); - std::vector videoFormatProperties; - uint32_t videoFormatPropertyCount; - VkResult result; - do - { - result = getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), - reinterpret_cast( &videoFormatInfo ), - &videoFormatPropertyCount, - nullptr ); - if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount ) - { - videoFormatProperties.resize( videoFormatPropertyCount ); - result = - getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), - reinterpret_cast( &videoFormatInfo ), - &videoFormatPropertyCount, - reinterpret_cast( videoFormatProperties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); - VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); - if ( videoFormatPropertyCount < videoFormatProperties.size() ) - { - videoFormatProperties.resize( videoFormatPropertyCount ); - } - return videoFormatProperties; - } + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; + getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR( static_cast( m_device ), + reinterpret_cast( &versionInfo ), + reinterpret_cast( &compatibility ) ); - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR - Device::createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR( *this, createInfo, allocator ); + return compatibility; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector VideoSessionKHR::getMemoryRequirements() const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, + VULKAN_HPP_NAMESPACE::ArrayProxy const & maxPrimitiveCounts ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR && - "Function needs extension enabled!" ); - - std::vector memoryRequirements; - uint32_t memoryRequirementsCount; - VkResult result; - do - { - result = getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR( - static_cast( m_device ), static_cast( m_videoSession ), &memoryRequirementsCount, nullptr ); - if ( ( result == VK_SUCCESS ) && memoryRequirementsCount ) - { - memoryRequirements.resize( memoryRequirementsCount ); - result = - getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR( static_cast( m_device ), - static_cast( m_videoSession ), - &memoryRequirementsCount, - reinterpret_cast( memoryRequirements.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::getMemoryRequirements" ); - VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); - if ( memoryRequirementsCount < memoryRequirements.size() ) + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureBuildSizesKHR && + "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount ); +# else + if ( maxPrimitiveCounts.size() != buildInfo.geometryCount ) { - memoryRequirements.resize( memoryRequirementsCount ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" ); } - return memoryRequirements; - } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - VULKAN_HPP_INLINE void VideoSessionKHR::bindMemory( - VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos ) const - { - VULKAN_HPP_ASSERT( getDispatcher()->vkBindVideoSessionMemoryKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + getDispatcher()->vkGetAccelerationStructureBuildSizesKHR( static_cast( m_device ), + static_cast( buildType ), + reinterpret_cast( &buildInfo ), + maxPrimitiveCounts.data(), + reinterpret_cast( &sizeInfo ) ); - VkResult result = - getDispatcher()->vkBindVideoSessionMemoryKHR( static_cast( m_device ), - static_cast( m_videoSession ), - bindSessionMemoryInfos.size(), - reinterpret_cast( bindSessionMemoryInfos.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::bindMemory" ); + return sizeInfo; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR - Device::createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR( *this, createInfo, allocator ); - } + //=== VK_KHR_ray_tracing_pipeline === - VULKAN_HPP_INLINE void VideoSessionParametersKHR::update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const + VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateVideoSessionParametersKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysKHR && "Function requires " ); - VkResult result = getDispatcher()->vkUpdateVideoSessionParametersKHR( static_cast( m_device ), - static_cast( m_videoSessionParameters ), - reinterpret_cast( &updateInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::VideoSessionParametersKHR::update" ); + getDispatcher()->vkCmdTraceRaysKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + width, + height, + depth ); } - VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + Device::createRayTracingPipelinesKHR( + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginVideoCodingKHR && "Function needs extension enabled!" ); + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateRayTracingPipelinesKHR( + static_cast( m_device ), + deferredOperation ? static_cast( **deferredOperation ) : 0, + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRayTracingPipelinesKHR" ); +# endif + } - getDispatcher()->vkCmdBeginVideoCodingKHR( static_cast( m_commandBuffer ), - reinterpret_cast( &beginInfo ) ); + std::vector pipelinesRAII; + pipelinesRAII.reserve( pipelines.size() ); + for ( auto & pipeline : pipelines ) + { + pipelinesRAII.emplace_back( *this, *reinterpret_cast( &pipeline ), allocator, result ); + } + return pipelinesRAII; } - VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRayTracingPipelineKHR( + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndVideoCodingKHR && "Function needs extension enabled!" ); + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateRayTracingPipelinesKHR( + static_cast( m_device ), + deferredOperation ? static_cast( **deferredOperation ) : 0, + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) && + ( result != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRayTracingPipelineKHR" ); +# endif + } - getDispatcher()->vkCmdEndVideoCodingKHR( static_cast( m_commandBuffer ), - reinterpret_cast( &endCodingInfo ) ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator, result ); } - VULKAN_HPP_INLINE void - CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Pipeline::getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdControlVideoCodingKHR && "Function needs extension enabled!" ); - - getDispatcher()->vkCmdControlVideoCodingKHR( static_cast( m_commandBuffer ), - reinterpret_cast( &codingControlInfo ) ); - } -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_decode_queue === + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR && + "Function requires or " ); - VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & frameInfo ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecodeVideoKHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesKHR" ); - getDispatcher()->vkCmdDecodeVideoKHR( static_cast( m_commandBuffer ), reinterpret_cast( &frameInfo ) ); + return data; } -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_EXT_transform_feedback === - - VULKAN_HPP_INLINE void - CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, - VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, - VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT && - "Function needs extension enabled!" ); - if ( buffers.size() != offsets.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); - } - if ( !sizes.empty() && buffers.size() != sizes.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR && + "Function requires or " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + sizeof( DataType ), + reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleKHR" ); - getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT( static_cast( m_commandBuffer ), - firstBinding, - buffers.size(), - reinterpret_cast( buffers.data() ), - reinterpret_cast( offsets.data() ), - reinterpret_cast( sizes.data() ) ); + return data; } - VULKAN_HPP_INLINE void - CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, - VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginTransformFeedbackEXT && - "Function needs extension enabled!" ); - if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); - } - - getDispatcher()->vkCmdBeginTransformFeedbackEXT( static_cast( m_commandBuffer ), - firstCounterBuffer, - counterBuffers.size(), - reinterpret_cast( counterBuffers.data() ), - reinterpret_cast( counterBufferOffsets.data() ) ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && + "Function requires " ); - VULKAN_HPP_INLINE void - CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, - VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBuffers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & counterBufferOffsets ) const - { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndTransformFeedbackEXT && - "Function needs extension enabled!" ); - if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); - } + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); - getDispatcher()->vkCmdEndTransformFeedbackEXT( static_cast( m_commandBuffer ), - firstCounterBuffer, - counterBuffers.size(), - reinterpret_cast( counterBuffers.data() ), - reinterpret_cast( counterBufferOffsets.data() ) ); + return data; } - VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t query, - VULKAN_HPP_NAMESPACE::QueryControlFlags flags, - uint32_t index ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginQueryIndexedEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && + "Function requires " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + sizeof( DataType ), + reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR" ); - getDispatcher()->vkCmdBeginQueryIndexedEXT( - static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ), index ); + return data; } - VULKAN_HPP_INLINE void - CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, + const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndQueryIndexedEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysIndirectKHR && "Function requires " ); - getDispatcher()->vkCmdEndQueryIndexedEXT( static_cast( m_commandBuffer ), static_cast( queryPool ), query, index ); + getDispatcher()->vkCmdTraceRaysIndirectKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &raygenShaderBindingTable ), + reinterpret_cast( &missShaderBindingTable ), + reinterpret_cast( &hitShaderBindingTable ), + reinterpret_cast( &callableShaderBindingTable ), + static_cast( indirectDeviceAddress ) ); } - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, - uint32_t firstInstance, - VULKAN_HPP_NAMESPACE::Buffer counterBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, - uint32_t counterOffset, - uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize + Pipeline::getRayTracingShaderGroupStackSizeKHR( uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectByteCountEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR && + "Function requires " ); - getDispatcher()->vkCmdDrawIndirectByteCountEXT( static_cast( m_commandBuffer ), - instanceCount, - firstInstance, - static_cast( counterBuffer ), - static_cast( counterBufferOffset ), - counterOffset, - vertexStride ); - } + VkDeviceSize result = getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR( + static_cast( m_device ), static_cast( m_pipeline ), group, static_cast( groupShader ) ); - //=== VK_NVX_binary_import === + return static_cast( result ); + } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX - Device::createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX( *this, createInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR && + "Function requires " ); + + getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR( static_cast( m_commandBuffer ), pipelineStackSize ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX - Device::createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + //=== VK_KHR_sampler_ycbcr_conversion === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX( *this, createInfo, allocator ); + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateSamplerYcbcrConversionKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &ycbcrConversion ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createSamplerYcbcrConversionKHR" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion( + *this, *reinterpret_cast( &ycbcrConversion ), allocator ); } - VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, + Optional allocator ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCuLaunchKernelNVX && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkDestroySamplerYcbcrConversionKHR && + "Function requires or " ); - getDispatcher()->vkCmdCuLaunchKernelNVX( static_cast( m_commandBuffer ), reinterpret_cast( &launchInfo ) ); + getDispatcher()->vkDestroySamplerYcbcrConversionKHR( + static_cast( m_device ), + static_cast( ycbcrConversion ), + reinterpret_cast( static_cast( allocator ) ) ); } - //=== VK_NVX_image_view_handle === + //=== VK_KHR_bind_memory2 === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t - Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewHandleNVX && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory2KHR && "Function requires or " ); - uint32_t result = - getDispatcher()->vkGetImageViewHandleNVX( static_cast( m_device ), reinterpret_cast( &info ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBindBufferMemory2KHR( + static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); + } - return result; + VULKAN_HPP_INLINE void + Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory2KHR && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBindImageMemory2KHR( + static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX ImageView::getAddressNVX() const + //=== VK_EXT_image_drm_format_modifier === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT Image::getDrmFormatModifierPropertiesEXT() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewAddressNVX && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT && + "Function requires " ); - VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; - VkResult result = getDispatcher()->vkGetImageViewAddressNVX( - static_cast( m_device ), static_cast( m_imageView ), reinterpret_cast( &properties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::ImageView::getAddressNVX" ); + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT( + static_cast( m_device ), static_cast( m_image ), reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Image::getDrmFormatModifierPropertiesEXT" ); return properties; } - //=== VK_AMD_draw_indirect_count === + //=== VK_EXT_validation_cache === - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountAMD && - "Function needs extension enabled!" ); + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateValidationCacheEXT( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &validationCache ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createValidationCacheEXT" ); +# endif + } - getDispatcher()->vkCmdDrawIndirectCountAMD( static_cast( m_commandBuffer ), - static_cast( buffer ), - static_cast( offset ), - static_cast( countBuffer ), - static_cast( countBufferOffset ), - maxDrawCount, - stride ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT( + *this, *reinterpret_cast( &validationCache ), allocator ); } - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void ValidationCacheEXT::merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexedIndirectCountAMD && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkMergeValidationCachesEXT && "Function requires " ); - getDispatcher()->vkCmdDrawIndexedIndirectCountAMD( static_cast( m_commandBuffer ), - static_cast( buffer ), - static_cast( offset ), - static_cast( countBuffer ), - static_cast( countBufferOffset ), - maxDrawCount, - stride ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkMergeValidationCachesEXT( static_cast( m_device ), + static_cast( m_validationCache ), + srcCaches.size(), + reinterpret_cast( srcCaches.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::merge" ); } - //=== VK_AMD_shader_info === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Pipeline::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, - VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector ValidationCacheEXT::getData() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderInfoAMD && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetValidationCacheDataEXT && "Function requires " ); - std::vector info; - size_t infoSize; - VkResult result; + std::vector data; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; do { - result = getDispatcher()->vkGetShaderInfoAMD( static_cast( m_device ), - static_cast( m_pipeline ), - static_cast( shaderStage ), - static_cast( infoType ), - &infoSize, - nullptr ); - if ( ( result == VK_SUCCESS ) && infoSize ) + result = static_cast( getDispatcher()->vkGetValidationCacheDataEXT( + static_cast( m_device ), static_cast( m_validationCache ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) { - info.resize( infoSize ); - result = getDispatcher()->vkGetShaderInfoAMD( static_cast( m_device ), - static_cast( m_pipeline ), - static_cast( shaderStage ), - static_cast( infoType ), - &infoSize, - reinterpret_cast( info.data() ) ); + data.resize( dataSize ); + result = static_cast( getDispatcher()->vkGetValidationCacheDataEXT( + static_cast( m_device ), static_cast( m_validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getShaderInfoAMD" ); - VULKAN_HPP_ASSERT( infoSize <= info.size() ); - if ( infoSize < info.size() ) + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::getData" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) { - info.resize( infoSize ); + data.resize( dataSize ); } - return info; + return data; } - //=== VK_KHR_dynamic_rendering === + //=== VK_NV_shading_rate_image === - VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderingKHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindShadingRateImageNV && "Function requires " ); - getDispatcher()->vkCmdBeginRenderingKHR( static_cast( m_commandBuffer ), reinterpret_cast( &renderingInfo ) ); + getDispatcher()->vkCmdBindShadingRateImageNV( + static_cast( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); } - VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( + uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shadingRatePalettes ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderingKHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportShadingRatePaletteNV && + "Function requires " ); - getDispatcher()->vkCmdEndRenderingKHR( static_cast( m_commandBuffer ) ); + getDispatcher()->vkCmdSetViewportShadingRatePaletteNV( static_cast( m_commandBuffer ), + firstViewport, + shadingRatePalettes.size(), + reinterpret_cast( shadingRatePalettes.data() ) ); } -# if defined( VK_USE_PLATFORM_GGP ) - //=== VK_GGP_stream_descriptor_surface === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - Instance::createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, + VULKAN_HPP_NAMESPACE::ArrayProxy const & customSampleOrders ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoarseSampleOrderNV && "Function requires " ); + + getDispatcher()->vkCmdSetCoarseSampleOrderNV( static_cast( m_commandBuffer ), + static_cast( sampleOrderType ), + customSampleOrders.size(), + reinterpret_cast( customSampleOrders.data() ) ); } -# endif /*VK_USE_PLATFORM_GGP*/ - //=== VK_NV_external_memory_capabilities === + //=== VK_NV_ray_tracing === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV - PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, - VULKAN_HPP_NAMESPACE::ImageType type, - VULKAN_HPP_NAMESPACE::ImageTiling tiling, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV && - "Function needs extension enabled!" ); - - VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; - VkResult result = getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV( - static_cast( m_physicalDevice ), - static_cast( format ), - static_cast( type ), - static_cast( tiling ), - static_cast( usage ), - static_cast( flags ), - static_cast( externalHandleType ), - reinterpret_cast( &externalImageFormatProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateAccelerationStructureNV( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &accelerationStructure ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createAccelerationStructureNV" ); +# endif + } - return externalImageFormatProperties; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV( + *this, *reinterpret_cast( &accelerationStructure ), allocator ); } -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_external_memory_win32 === + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV && + "Function requires " ); - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE DeviceMemory::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; + getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getAccelerationStructureMemoryRequirementsNV( + const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandleNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV && + "Function requires " ); - HANDLE handle; - VkResult result = getDispatcher()->vkGetMemoryWin32HandleNV( - static_cast( m_device ), static_cast( m_memory ), static_cast( handleType ), &handle ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::getMemoryWin32HandleNV" ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); - return handle; + return structureChain; } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_get_physical_device_properties2 === + VULKAN_HPP_INLINE void Device::bindAccelerationStructureMemoryNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBindAccelerationStructureMemoryNV && + "Function requires " ); - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBindAccelerationStructureMemoryNV( + static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, + VULKAN_HPP_NAMESPACE::Buffer instanceData, + VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, + VULKAN_HPP_NAMESPACE::Bool32 update, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::Buffer scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructureNV && "Function requires " ); - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; - getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( &features ) ); + getDispatcher()->vkCmdBuildAccelerationStructureNV( static_cast( m_commandBuffer ), + reinterpret_cast( &info ), + static_cast( instanceData ), + static_cast( instanceOffset ), + static_cast( update ), + static_cast( dst ), + static_cast( src ), + static_cast( scratch ), + static_cast( scratchOffset ) ); + } - return features; + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureNV && "Function requires " ); + + getDispatcher()->vkCmdCopyAccelerationStructureNV( static_cast( m_commandBuffer ), + static_cast( dst ), + static_cast( src ), + static_cast( mode ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, + VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, + VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2KHR && - "Function needs extension enabled!" ); - - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get(); - getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( &features ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysNV && "Function requires " ); - return structureChain; + getDispatcher()->vkCmdTraceRaysNV( static_cast( m_commandBuffer ), + static_cast( raygenShaderBindingTableBuffer ), + static_cast( raygenShaderBindingOffset ), + static_cast( missShaderBindingTableBuffer ), + static_cast( missShaderBindingOffset ), + static_cast( missShaderBindingStride ), + static_cast( hitShaderBindingTableBuffer ), + static_cast( hitShaderBindingOffset ), + static_cast( hitShaderBindingStride ), + static_cast( callableShaderBindingTableBuffer ), + static_cast( callableShaderBindingOffset ), + static_cast( callableShaderBindingStride ), + width, + height, + depth ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + Device::createRayTracingPipelinesNV( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2KHR && - "Function needs extension enabled!" ); - - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; - getDispatcher()->vkGetPhysicalDeviceProperties2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( &properties ) ); + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateRayTracingPipelinesNV( + static_cast( m_device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRayTracingPipelinesNV" ); +# endif + } - return properties; + std::vector pipelinesRAII; + pipelinesRAII.reserve( pipelines.size() ); + for ( auto & pipeline : pipelines ) + { + pipelinesRAII.emplace_back( *this, *reinterpret_cast( &pipeline ), allocator, result ); + } + return pipelinesRAII; } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRayTracingPipelineNV( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2KHR && - "Function needs extension enabled!" ); - - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get(); - getDispatcher()->vkGetPhysicalDeviceProperties2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( &properties ) ); + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateRayTracingPipelinesNV( + static_cast( m_device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createRayTracingPipelineNV" ); +# endif + } - return structureChain; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator, result ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 - PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Pipeline::getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV && + "Function requires or " ); - VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; - getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR( - static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesNV" ); - return formatProperties; + return data; } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingShaderGroupHandleNV( uint32_t firstGroup, uint32_t groupCount ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR && - "Function needs extension enabled!" ); - - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get(); - getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR( - static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( &formatProperties ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV && + "Function requires or " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast( m_device ), + static_cast( m_pipeline ), + firstGroup, + groupCount, + sizeof( DataType ), + reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleNV" ); - return structureChain; + return data; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2 - PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector AccelerationStructureNV::getHandle( size_t dataSize ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureHandleNV && "Function requires " ); - VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; - VkResult result = - getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( &imageFormatInfo ), - reinterpret_cast( &imageFormatProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetAccelerationStructureHandleNV( static_cast( m_device ), + static_cast( m_accelerationStructure ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" ); - return imageFormatProperties; + return data; } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType AccelerationStructureNV::getHandle() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureHandleNV && "Function requires " ); - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get(); - VkResult result = - getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( &imageFormatInfo ), - reinterpret_cast( &imageFormatProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetAccelerationStructureHandleNV( static_cast( m_device ), + static_cast( m_accelerationStructure ), + sizeof( DataType ), + reinterpret_cast( &data ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" ); - return structureChain; + return data; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR() const + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR && - "Function needs extension enabled!" ); - - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast( m_physicalDevice ), - &queueFamilyPropertyCount, - reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV && + "Function requires " ); - VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); - if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) - { - queueFamilyProperties.resize( queueFamilyPropertyCount ); - } - return queueFamilyProperties; + getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV( static_cast( m_commandBuffer ), + accelerationStructures.size(), + reinterpret_cast( accelerationStructures.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR() const + VULKAN_HPP_INLINE void Pipeline::compileDeferredNV( uint32_t shader ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR && - "Function needs extension enabled!" ); - - std::vector structureChains; - std::vector queueFamilyProperties; - uint32_t queueFamilyPropertyCount; - getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast( m_physicalDevice ), &queueFamilyPropertyCount, nullptr ); - structureChains.resize( queueFamilyPropertyCount ); - queueFamilyProperties.resize( queueFamilyPropertyCount ); - for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) - { - queueFamilyProperties[i].pNext = structureChains[i].template get().pNext; - } - getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast( m_physicalDevice ), - &queueFamilyPropertyCount, - reinterpret_cast( queueFamilyProperties.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCompileDeferredNV && "Function requires " ); - VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); - if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) - { - structureChains.resize( queueFamilyPropertyCount ); - } - for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) - { - structureChains[i].template get() = queueFamilyProperties[i]; - } - return structureChains; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkCompileDeferredNV( static_cast( m_device ), static_cast( m_pipeline ), shader ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::compileDeferredNV" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 - PhysicalDevice::getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT + //=== VK_KHR_maintenance3 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupportKHR && + "Function requires or " ); - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; - getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( &memoryProperties ) ); + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); - return memoryProperties; + return support; } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain PhysicalDevice::getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupportKHR && + "Function requires or " ); - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = - structureChain.template get(); - getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( &memoryProperties ) ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get(); + getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &support ) ); return structureChain; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const - { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR && - "Function needs extension enabled!" ); + //=== VK_KHR_draw_indirect_count === - std::vector properties; - uint32_t propertyCount; - getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( &formatInfo ), - &propertyCount, - nullptr ); - properties.resize( propertyCount ); - getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( &formatInfo ), - &propertyCount, - reinterpret_cast( properties.data() ) ); + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountKHR && + "Function requires or or " ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return properties; + getDispatcher()->vkCmdDrawIndirectCountKHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } - //=== VK_KHR_device_group === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags - Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR && - "Function needs extension enabled!" ); - - VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; - getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR( static_cast( m_device ), - heapIndex, - localDeviceIndex, - remoteDeviceIndex, - reinterpret_cast( &peerMemoryFeatures ) ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdDrawIndexedIndirectCountKHR && + "Function requires or or " ); - return peerMemoryFeatures; + getDispatcher()->vkCmdDrawIndexedIndirectCountKHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } - VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDeviceMaskKHR && "Function needs extension enabled!" ); - - getDispatcher()->vkCmdSetDeviceMaskKHR( static_cast( m_commandBuffer ), deviceMask ); - } + //=== VK_EXT_external_memory_host === - VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT + Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchBaseKHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryHostPointerPropertiesEXT && + "Function requires " ); - getDispatcher()->vkCmdDispatchBaseKHR( - static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetMemoryHostPointerPropertiesEXT( static_cast( m_device ), + static_cast( handleType ), + pHostPointer, + reinterpret_cast( &memoryHostPointerProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); + + return memoryHostPointerProperties; } -# if defined( VK_USE_PLATFORM_VI_NN ) - //=== VK_NN_vi_surface === + //=== VK_AMD_buffer_marker === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - Instance::createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); - } -# endif /*VK_USE_PLATFORM_VI_NN*/ + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarkerAMD && "Function requires " ); - //=== VK_KHR_maintenance1 === + getDispatcher()->vkCmdWriteBufferMarkerAMD( static_cast( m_commandBuffer ), + static_cast( pipelineStage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } - VULKAN_HPP_INLINE void CommandPool::trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkTrimCommandPoolKHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarker2AMD && "Function requires " ); - getDispatcher()->vkTrimCommandPoolKHR( - static_cast( m_device ), static_cast( m_commandPool ), static_cast( flags ) ); + getDispatcher()->vkCmdWriteBufferMarker2AMD( static_cast( m_commandBuffer ), + static_cast( stage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); } - //=== VK_KHR_device_group_creation === + //=== VK_EXT_calibrated_timestamps === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Instance::enumeratePhysicalDeviceGroupsKHR() const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getCalibrateableTimeDomainsEXT() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && + "Function requires or " ); - std::vector physicalDeviceGroupProperties; - uint32_t physicalDeviceGroupCount; - VkResult result; + std::vector timeDomains; + uint32_t timeDomainCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR( static_cast( m_instance ), &physicalDeviceGroupCount, nullptr ); - if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount ) + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( static_cast( m_physicalDevice ), &timeDomainCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) { - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); - result = - getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR( static_cast( m_instance ), - &physicalDeviceGroupCount, - reinterpret_cast( physicalDeviceGroupProperties.data() ) ); + timeDomains.resize( timeDomainCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + static_cast( m_physicalDevice ), &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); - VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); - if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) { - physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + timeDomains.resize( timeDomainCount ); } - return physicalDeviceGroupProperties; + return timeDomains; } - //=== VK_KHR_external_memory_capabilities === + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair, uint64_t> Device::getCalibratedTimestampsEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsEXT && + "Function requires or " ); - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( - const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT + std::pair, uint64_t> data_( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetCalibratedTimestampsEXT( static_cast( m_device ), + timestampInfos.size(), + reinterpret_cast( timestampInfos.data() ), + timestamps.data(), + &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + + return data_; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsEXT && + "Function requires or " ); - VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; - getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR( static_cast( m_physicalDevice ), - reinterpret_cast( &externalBufferInfo ), - reinterpret_cast( &externalBufferProperties ) ); + std::pair data_; + uint64_t & timestamp = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetCalibratedTimestampsEXT( + static_cast( m_device ), 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); - return externalBufferProperties; + return data_; } -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_memory_win32 === + //=== VK_NV_mesh_shader === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE - Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandleKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksNV && "Function requires " ); - HANDLE handle; - VkResult result = getDispatcher()->vkGetMemoryWin32HandleKHR( - static_cast( m_device ), reinterpret_cast( &getWin32HandleInfo ), &handle ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); - - return handle; + getDispatcher()->vkCmdDrawMeshTasksNV( static_cast( m_commandBuffer ), taskCount, firstTask ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR - Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle ) const + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectNV && "Function requires " ); - VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; - VkResult result = - getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR( static_cast( m_device ), - static_cast( handleType ), - handle, - reinterpret_cast( &memoryWin32HandleProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); + getDispatcher()->vkCmdDrawMeshTasksIndirectNV( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } - return memoryWin32HandleProperties; + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectCountNV && "Function requires " ); + + getDispatcher()->vkCmdDrawMeshTasksIndirectCountNV( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_external_memory_fd === + //=== VK_NV_scissor_exclusive === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo ) const + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorEnableNV( + uint32_t firstExclusiveScissor, + VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissorEnables ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdKHR && "Function needs extension enabled!" ); - - int fd; - VkResult result = - getDispatcher()->vkGetMemoryFdKHR( static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExclusiveScissorEnableNV && + "Function requires " ); - return fd; + getDispatcher()->vkCmdSetExclusiveScissorEnableNV( static_cast( m_commandBuffer ), + firstExclusiveScissor, + exclusiveScissorEnables.size(), + reinterpret_cast( exclusiveScissorEnables.data() ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR - Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd ) const + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( + uint32_t firstExclusiveScissor, VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissors ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdPropertiesKHR && - "Function needs extension enabled!" ); - - VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; - VkResult result = getDispatcher()->vkGetMemoryFdPropertiesKHR( static_cast( m_device ), - static_cast( handleType ), - fd, - reinterpret_cast( &memoryFdProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExclusiveScissorNV && "Function requires " ); - return memoryFdProperties; + getDispatcher()->vkCmdSetExclusiveScissorNV( static_cast( m_commandBuffer ), + firstExclusiveScissor, + exclusiveScissors.size(), + reinterpret_cast( exclusiveScissors.data() ) ); } - //=== VK_KHR_external_semaphore_capabilities === + //=== VK_NV_device_diagnostic_checkpoints === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( - const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR && - "Function needs extension enabled!" ); - - VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; - getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( - static_cast( m_physicalDevice ), - reinterpret_cast( &externalSemaphoreInfo ), - reinterpret_cast( &externalSemaphoreProperties ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCheckpointNV && "Function requires " ); - return externalSemaphoreProperties; + getDispatcher()->vkCmdSetCheckpointNV( static_cast( m_commandBuffer ), reinterpret_cast( &checkpointMarker ) ); } -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_semaphore_win32 === - - VULKAN_HPP_INLINE void - Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreWin32HandleKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointDataNV && + "Function requires " ); + + std::vector checkpointData; + uint32_t checkpointDataCount; + getDispatcher()->vkGetQueueCheckpointDataNV( static_cast( m_queue ), &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + getDispatcher()->vkGetQueueCheckpointDataNV( + static_cast( m_queue ), &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - VkResult result = getDispatcher()->vkImportSemaphoreWin32HandleKHR( - static_cast( m_device ), reinterpret_cast( &importSemaphoreWin32HandleInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE - Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Queue::getCheckpointData2NV() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreWin32HandleKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointData2NV && + "Function requires " ); - HANDLE handle; - VkResult result = getDispatcher()->vkGetSemaphoreWin32HandleKHR( - static_cast( m_device ), reinterpret_cast( &getWin32HandleInfo ), &handle ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); + std::vector checkpointData; + uint32_t checkpointDataCount; + getDispatcher()->vkGetQueueCheckpointData2NV( static_cast( m_queue ), &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + getDispatcher()->vkGetQueueCheckpointData2NV( + static_cast( m_queue ), &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - return handle; + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_external_semaphore_fd === + //=== VK_KHR_timeline_semaphore === - VULKAN_HPP_INLINE void Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValueKHR() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreFdKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreCounterValueKHR && + "Function requires or " ); - VkResult result = getDispatcher()->vkImportSemaphoreFdKHR( static_cast( m_device ), - reinterpret_cast( &importSemaphoreFdInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); + uint64_t value; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetSemaphoreCounterValueKHR( static_cast( m_device ), static_cast( m_semaphore ), &value ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValueKHR" ); + + return value; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, + uint64_t timeout ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreFdKHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkWaitSemaphoresKHR && "Function requires or " ); - int fd; - VkResult result = - getDispatcher()->vkGetSemaphoreFdKHR( static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkWaitSemaphoresKHR( static_cast( m_device ), reinterpret_cast( &waitInfo ), timeout ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); - return fd; + return static_cast( result ); } - //=== VK_KHR_push_descriptor === - - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, - VULKAN_HPP_NAMESPACE::PipelineLayout layout, - uint32_t set, - VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkSignalSemaphoreKHR && "Function requires or " ); - getDispatcher()->vkCmdPushDescriptorSetKHR( static_cast( m_commandBuffer ), - static_cast( pipelineBindPoint ), - static_cast( layout ), - set, - descriptorWrites.size(), - reinterpret_cast( descriptorWrites.data() ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkSignalSemaphoreKHR( static_cast( m_device ), reinterpret_cast( &signalInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); } - template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - VULKAN_HPP_NAMESPACE::PipelineLayout layout, - uint32_t set, - DataType const & data ) const VULKAN_HPP_NOEXCEPT + //=== VK_INTEL_performance_query === + + VULKAN_HPP_INLINE void Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkInitializePerformanceApiINTEL && + "Function requires " ); - getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR( static_cast( m_commandBuffer ), - static_cast( descriptorUpdateTemplate ), - static_cast( layout ), - set, - reinterpret_cast( &data ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkInitializePerformanceApiINTEL( + static_cast( m_device ), reinterpret_cast( &initializeInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); } - //=== VK_EXT_conditional_rendering === - - VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( - const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginConditionalRenderingEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkUninitializePerformanceApiINTEL && + "Function requires " ); - getDispatcher()->vkCmdBeginConditionalRenderingEXT( static_cast( m_commandBuffer ), - reinterpret_cast( &conditionalRenderingBegin ) ); + getDispatcher()->vkUninitializePerformanceApiINTEL( static_cast( m_device ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceMarkerINTEL && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCmdSetPerformanceMarkerINTEL( + static_cast( m_commandBuffer ), reinterpret_cast( &markerInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); } - VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndConditionalRenderingEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL && + "Function requires " ); - getDispatcher()->vkCmdEndConditionalRenderingEXT( static_cast( m_commandBuffer ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL( + static_cast( m_commandBuffer ), reinterpret_cast( &markerInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); } - //=== VK_KHR_descriptor_update_template === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate - Device::createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const { - return VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate( *this, createInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceOverrideINTEL && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCmdSetPerformanceOverrideINTEL( + static_cast( m_commandBuffer ), reinterpret_cast( &overrideInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); } - VULKAN_HPP_INLINE void - Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - Optional allocator ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL>::Type + Device::acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAcquirePerformanceConfigurationINTEL( static_cast( m_device ), + reinterpret_cast( &acquireInfo ), + reinterpret_cast( &configuration ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::acquirePerformanceConfigurationINTEL" ); +# endif + } - getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR( - static_cast( m_device ), - static_cast( descriptorUpdateTemplate ), - reinterpret_cast( static_cast( allocator ) ) ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL( + *this, *reinterpret_cast( &configuration ) ); } - template - VULKAN_HPP_INLINE void DescriptorSet::updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, - DataType const & data ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSetPerformanceConfigurationINTEL && + "Function requires " ); - getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR( static_cast( m_device ), - static_cast( m_descriptorSet ), - static_cast( descriptorUpdateTemplate ), - reinterpret_cast( &data ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueSetPerformanceConfigurationINTEL( + static_cast( m_queue ), static_cast( configuration ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); } - //=== VK_NV_clip_space_w_scaling === - - VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( - uint32_t firstViewport, - VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportWScalings ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PerformanceValueINTEL + Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPerformanceParameterINTEL && "Function requires " ); - getDispatcher()->vkCmdSetViewportWScalingNV( static_cast( m_commandBuffer ), - firstViewport, - viewportWScalings.size(), - reinterpret_cast( viewportWScalings.data() ) ); + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetPerformanceParameterINTEL( + static_cast( m_device ), static_cast( parameter ), reinterpret_cast( &value ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); + + return value; } -# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) - //=== VK_EXT_acquire_xlib_display === + //=== VK_AMD_display_native_hdr === - VULKAN_HPP_INLINE void PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const + VULKAN_HPP_INLINE void SwapchainKHR::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireXlibDisplayEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkSetLocalDimmingAMD && "Function requires " ); - VkResult result = - getDispatcher()->vkAcquireXlibDisplayEXT( static_cast( m_physicalDevice ), &dpy, static_cast( display ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); + getDispatcher()->vkSetLocalDimmingAMD( + static_cast( m_device ), static_cast( m_swapchain ), static_cast( localDimmingEnable ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, - RROutput rrOutput ) const +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, dpy, rrOutput ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateImagePipeSurfaceFUCHSIA( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createImagePipeSurfaceFUCHSIA" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); } -# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ +# endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_EXT_display_surface_counter === +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT - PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT && - "Function needs extension enabled!" ); - - VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; - VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast( m_physicalDevice ), - static_cast( surface ), - reinterpret_cast( &surfaceCapabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateMetalSurfaceEXT( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createMetalSurfaceEXT" ); +# endif + } - return surfaceCapabilities; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_EXT_display_control === + //=== VK_KHR_fragment_shading_rate === - VULKAN_HPP_INLINE void Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, - const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getFragmentShadingRatesKHR() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkDisplayPowerControlEXT && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR && + "Function requires " ); - VkResult result = getDispatcher()->vkDisplayPowerControlEXT( - static_cast( m_device ), static_cast( display ), reinterpret_cast( &displayPowerInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); + std::vector fragmentShadingRates; + uint32_t fragmentShadingRateCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR( + static_cast( m_physicalDevice ), &fragmentShadingRateCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && fragmentShadingRateCount ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR( + static_cast( m_physicalDevice ), + &fragmentShadingRateCount, + reinterpret_cast( fragmentShadingRates.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); + if ( fragmentShadingRateCount < fragmentShadingRates.size() ) + { + fragmentShadingRates.resize( fragmentShadingRateCount ); + } + return fragmentShadingRates; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence - Device::registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void + CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, deviceEventInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFragmentShadingRateKHR && + "Function requires " ); + + getDispatcher()->vkCmdSetFragmentShadingRateKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &fragmentSize ), + reinterpret_cast( combinerOps ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence - Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, - VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + //=== VK_KHR_dynamic_rendering_local_read === + + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, display, displayEventInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRenderingAttachmentLocationsKHR && + "Function requires or " ); + + getDispatcher()->vkCmdSetRenderingAttachmentLocationsKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &locationInfo ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t SwapchainKHR::getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const + VULKAN_HPP_INLINE void CommandBuffer::setRenderingInputAttachmentIndicesKHR( + const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainCounterEXT && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRenderingInputAttachmentIndicesKHR && + "Function requires or " ); - uint64_t counterValue; - VkResult result = getDispatcher()->vkGetSwapchainCounterEXT( - static_cast( m_device ), static_cast( m_swapchain ), static_cast( counter ), &counterValue ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getCounterEXT" ); - - return counterValue; + getDispatcher()->vkCmdSetRenderingInputAttachmentIndicesKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &inputAttachmentIndexInfo ) ); } - //=== VK_GOOGLE_display_timing === + //=== VK_EXT_buffer_device_address === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE SwapchainKHR::getRefreshCycleDurationGOOGLE() const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetRefreshCycleDurationGOOGLE && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetBufferDeviceAddressEXT && + "Function requires or or " ); - VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; - VkResult result = getDispatcher()->vkGetRefreshCycleDurationGOOGLE( static_cast( m_device ), - static_cast( m_swapchain ), - reinterpret_cast( &displayTimingProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getRefreshCycleDurationGOOGLE" ); + VkDeviceAddress result = + getDispatcher()->vkGetBufferDeviceAddressEXT( static_cast( m_device ), reinterpret_cast( &info ) ); - return displayTimingProperties; + return static_cast( result ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector SwapchainKHR::getPastPresentationTimingGOOGLE() const + //=== VK_EXT_tooling_info === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getToolPropertiesEXT() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPastPresentationTimingGOOGLE && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT && + "Function requires or " ); - std::vector presentationTimings; - uint32_t presentationTimingCount; - VkResult result; + std::vector toolProperties; + uint32_t toolCount; + VULKAN_HPP_NAMESPACE::Result result; do { - result = getDispatcher()->vkGetPastPresentationTimingGOOGLE( - static_cast( m_device ), static_cast( m_swapchain ), &presentationTimingCount, nullptr ); - if ( ( result == VK_SUCCESS ) && presentationTimingCount ) + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT( static_cast( m_physicalDevice ), &toolCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && toolCount ) { - presentationTimings.resize( presentationTimingCount ); - result = getDispatcher()->vkGetPastPresentationTimingGOOGLE( static_cast( m_device ), - static_cast( m_swapchain ), - &presentationTimingCount, - reinterpret_cast( presentationTimings.data() ) ); + toolProperties.resize( toolCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT( + static_cast( m_physicalDevice ), &toolCount, reinterpret_cast( toolProperties.data() ) ) ); } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getPastPresentationTimingGOOGLE" ); - VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); - if ( presentationTimingCount < presentationTimings.size() ) + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); + if ( toolCount < toolProperties.size() ) { - presentationTimings.resize( presentationTimingCount ); + toolProperties.resize( toolCount ); } - return presentationTimings; + return toolProperties; } - //=== VK_EXT_discard_rectangles === + //=== VK_KHR_present_wait === - VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( - uint32_t firstDiscardRectangle, VULKAN_HPP_NAMESPACE::ArrayProxy const & discardRectangles ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::waitForPresent( uint64_t presentId, uint64_t timeout ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleEXT && - "Function needs extension enabled!" ); - - getDispatcher()->vkCmdSetDiscardRectangleEXT( static_cast( m_commandBuffer ), - firstDiscardRectangle, - discardRectangles.size(), - reinterpret_cast( discardRectangles.data() ) ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkWaitForPresentKHR && "Function requires " ); - //=== VK_EXT_hdr_metadata === - - VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & swapchains, - VULKAN_HPP_NAMESPACE::ArrayProxy const & metadata ) const - { - VULKAN_HPP_ASSERT( getDispatcher()->vkSetHdrMetadataEXT && "Function needs extension enabled!" ); - if ( swapchains.size() != metadata.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); - } + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkWaitForPresentKHR( static_cast( m_device ), static_cast( m_swapchain ), presentId, timeout ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::waitForPresent", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); - getDispatcher()->vkSetHdrMetadataEXT( static_cast( m_device ), - swapchains.size(), - reinterpret_cast( swapchains.data() ), - reinterpret_cast( metadata.data() ) ); + return static_cast( result ); } - //=== VK_KHR_create_renderpass2 === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass - Device::createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator ); - } + //=== VK_NV_cooperative_matrix === - VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, - const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getCooperativeMatrixPropertiesNV() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && + "Function requires " ); - getDispatcher()->vkCmdBeginRenderPass2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( &renderPassBegin ), - reinterpret_cast( &subpassBeginInfo ) ); + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; } - VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, - const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass2KHR && "Function needs extension enabled!" ); - - getDispatcher()->vkCmdNextSubpass2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( &subpassBeginInfo ), - reinterpret_cast( &subpassEndInfo ) ); - } + //=== VK_NV_coverage_reduction_mode === - VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass2KHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && + "Function requires " ); - getDispatcher()->vkCmdEndRenderPass2KHR( static_cast( m_commandBuffer ), reinterpret_cast( &subpassEndInfo ) ); + std::vector combinations; + uint32_t combinationCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + static_cast( m_physicalDevice ), &combinationCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + static_cast( m_physicalDevice ), + &combinationCount, + reinterpret_cast( combinations.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + if ( combinationCount < combinations.size() ) + { + combinations.resize( combinationCount ); + } + return combinations; } - //=== VK_KHR_shared_presentable_image === +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::getStatus() const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainStatusKHR && - "Function needs extension enabled!" ); - - VkResult result = getDispatcher()->vkGetSwapchainStatusKHR( static_cast( m_device ), static_cast( m_swapchain ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getStatus", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT && + "Function requires " ); - return static_cast( result ); + std::vector presentModes; + uint32_t presentModeCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast( m_physicalDevice ), + reinterpret_cast( &surfaceInfo ), + &presentModeCount, + reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + if ( presentModeCount < presentModes.size() ) + { + presentModes.resize( presentModeCount ); + } + return presentModes; } - //=== VK_KHR_external_fence_capabilities === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties - PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void SwapchainKHR::acquireFullScreenExclusiveModeEXT() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR && - "Function needs extension enabled!" ); - - VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; - getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR( static_cast( m_physicalDevice ), - reinterpret_cast( &externalFenceInfo ), - reinterpret_cast( &externalFenceProperties ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireFullScreenExclusiveModeEXT && + "Function requires " ); - return externalFenceProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAcquireFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( m_swapchain ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireFullScreenExclusiveModeEXT" ); } -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_fence_win32 === - - VULKAN_HPP_INLINE void Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const + VULKAN_HPP_INLINE void SwapchainKHR::releaseFullScreenExclusiveModeEXT() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceWin32HandleKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseFullScreenExclusiveModeEXT && + "Function requires " ); - VkResult result = getDispatcher()->vkImportFenceWin32HandleKHR( - static_cast( m_device ), reinterpret_cast( &importFenceWin32HandleInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkReleaseFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( m_swapchain ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::releaseFullScreenExclusiveModeEXT" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE - Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR + Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceWin32HandleKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT && + "Function requires " ); - HANDLE handle; - VkResult result = getDispatcher()->vkGetFenceWin32HandleKHR( - static_cast( m_device ), reinterpret_cast( &getWin32HandleInfo ), &handle ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT( static_cast( m_device ), + reinterpret_cast( &surfaceInfo ), + reinterpret_cast( &modes ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); - return handle; + return modes; } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_external_fence_fd === + //=== VK_EXT_headless_surface === - VULKAN_HPP_INLINE void Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo ) const - { - VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceFdKHR && "Function needs extension enabled!" ); + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateHeadlessSurfaceEXT( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createHeadlessSurfaceEXT" ); +# endif + } - VkResult result = - getDispatcher()->vkImportFenceFdKHR( static_cast( m_device ), reinterpret_cast( &importFenceFdInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo ) const + //=== VK_KHR_buffer_device_address === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceFdKHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetBufferDeviceAddressKHR && + "Function requires or or " ); - int fd; - VkResult result = getDispatcher()->vkGetFenceFdKHR( static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); + VkDeviceAddress result = + getDispatcher()->vkGetBufferDeviceAddressKHR( static_cast( m_device ), reinterpret_cast( &info ) ); - return fd; + return static_cast( result ); } - //=== VK_KHR_performance_query === - - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE std::pair, std::vector> - PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR && + "Function requires or " ); - std::pair, std::vector> data; - std::vector & counters = data.first; - std::vector & counterDescriptions = data.second; - uint32_t counterCount; - VkResult result; - do - { - result = getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( - static_cast( m_physicalDevice ), queueFamilyIndex, &counterCount, nullptr, nullptr ); - if ( ( result == VK_SUCCESS ) && counterCount ) - { - counters.resize( counterCount ); - counterDescriptions.resize( counterCount ); - result = getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( - static_cast( m_physicalDevice ), - queueFamilyIndex, - &counterCount, - reinterpret_cast( counters.data() ), - reinterpret_cast( counterDescriptions.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); - VULKAN_HPP_ASSERT( counterCount <= counters.size() ); - if ( counterCount < counters.size() ) - { - counters.resize( counterCount ); - counterDescriptions.resize( counterCount ); - } - return data; + uint64_t result = + getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR( static_cast( m_device ), reinterpret_cast( &info ) ); + + return result; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( - const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR && + "Function requires or " ); - uint32_t numPasses; - getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( - static_cast( m_physicalDevice ), - reinterpret_cast( &performanceQueryCreateInfo ), - &numPasses ); + uint64_t result = getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR( static_cast( m_device ), + reinterpret_cast( &info ) ); - return numPasses; + return result; } - VULKAN_HPP_INLINE void Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info ) const + //=== VK_EXT_line_rasterization === + + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireProfilingLockKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEXT && + "Function requires or or " ); - VkResult result = - getDispatcher()->vkAcquireProfilingLockKHR( static_cast( m_device ), reinterpret_cast( &info ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); + getDispatcher()->vkCmdSetLineStippleEXT( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); } - VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR() const VULKAN_HPP_NOEXCEPT + //=== VK_EXT_host_query_reset === + + VULKAN_HPP_INLINE void QueryPool::resetEXT( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseProfilingLockKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkResetQueryPoolEXT && "Function requires or " ); - getDispatcher()->vkReleaseProfilingLockKHR( static_cast( m_device ) ); + getDispatcher()->vkResetQueryPoolEXT( static_cast( m_device ), static_cast( m_queryPool ), firstQuery, queryCount ); } - //=== VK_KHR_get_surface_capabilities2 === + //=== VK_EXT_extended_dynamic_state === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR - PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR && - "Function needs extension enabled!" ); - - VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; - VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( &surfaceInfo ), - reinterpret_cast( &surfaceCapabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullModeEXT && + "Function requires or or " ); - return surfaceCapabilities; + getDispatcher()->vkCmdSetCullModeEXT( static_cast( m_commandBuffer ), static_cast( cullMode ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFaceEXT && + "Function requires or or " ); - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get(); - VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( &surfaceInfo ), - reinterpret_cast( &surfaceCapabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + getDispatcher()->vkCmdSetFrontFaceEXT( static_cast( m_commandBuffer ), static_cast( frontFace ) ); + } - return structureChain; + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopologyEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetPrimitiveTopologyEXT( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCountEXT && + "Function requires or or " ); - std::vector surfaceFormats; - uint32_t surfaceFormatCount; - VkResult result; - do - { - result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( &surfaceInfo ), - &surfaceFormatCount, - nullptr ); - if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) - { - surfaceFormats.resize( surfaceFormatCount ); - result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( &surfaceInfo ), - &surfaceFormatCount, - reinterpret_cast( surfaceFormats.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); - VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); - if ( surfaceFormatCount < surfaceFormats.size() ) - { - surfaceFormats.resize( surfaceFormatCount ); - } - return surfaceFormats; + getDispatcher()->vkCmdSetViewportWithCountEXT( + static_cast( m_commandBuffer ), viewports.size(), reinterpret_cast( viewports.data() ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + VULKAN_HPP_INLINE void + CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCountEXT && + "Function requires or or " ); - std::vector structureChains; - std::vector surfaceFormats; - uint32_t surfaceFormatCount; - VkResult result; - do + getDispatcher()->vkCmdSetScissorWithCountEXT( + static_cast( m_commandBuffer ), scissors.size(), reinterpret_cast( scissors.data() ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, + VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, + VULKAN_HPP_NAMESPACE::ArrayProxy const & strides ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2EXT && + "Function requires or or " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); + VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); + VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); +# else + if ( buffers.size() != offsets.size() ) { - result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( &surfaceInfo ), - &surfaceFormatCount, - nullptr ); - if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) - { - structureChains.resize( surfaceFormatCount ); - surfaceFormats.resize( surfaceFormatCount ); - for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) - { - surfaceFormats[i].pNext = structureChains[i].template get().pNext; - } - result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( &surfaceInfo ), - &surfaceFormatCount, - reinterpret_cast( surfaceFormats.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); - VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); - if ( surfaceFormatCount < surfaceFormats.size() ) + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); + } + if ( !sizes.empty() && buffers.size() != sizes.size() ) { - structureChains.resize( surfaceFormatCount ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); } - for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) + if ( !strides.empty() && buffers.size() != strides.size() ) { - structureChains[i].template get() = surfaceFormats[i]; + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); } - return structureChains; - } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - //=== VK_KHR_get_display_properties2 === + getDispatcher()->vkCmdBindVertexBuffers2EXT( static_cast( m_commandBuffer ), + firstBinding, + buffers.size(), + reinterpret_cast( buffers.data() ), + reinterpret_cast( offsets.data() ), + reinterpret_cast( sizes.data() ), + reinterpret_cast( strides.data() ) ); + } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getDisplayProperties2KHR() const + VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnableEXT && + "Function requires or or " ); - std::vector properties; - uint32_t propertyCount; - VkResult result; - do - { - result = getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) - { - properties.resize( propertyCount ); - result = getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR( - static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return properties; + getDispatcher()->vkCmdSetDepthTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getDisplayPlaneProperties2KHR() const + VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnableEXT && + "Function requires or or " ); - std::vector properties; - uint32_t propertyCount; - VkResult result; - do - { - result = getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) - { - properties.resize( propertyCount ); - result = getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR( - static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return properties; + getDispatcher()->vkCmdSetDepthWriteEnableEXT( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector DisplayKHR::getModeProperties2() const - { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModeProperties2KHR && - "Function needs extension enabled!" ); - - std::vector properties; - uint32_t propertyCount; - VkResult result; - do - { - result = getDispatcher()->vkGetDisplayModeProperties2KHR( - static_cast( m_physicalDevice ), static_cast( m_display ), &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) - { - properties.resize( propertyCount ); - result = getDispatcher()->vkGetDisplayModeProperties2KHR( static_cast( m_physicalDevice ), - static_cast( m_display ), - &propertyCount, - reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties2" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return properties; + VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOpEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetDepthCompareOpEXT( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR - PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const + VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayPlaneCapabilities2KHR && - "Function needs extension enabled!" ); - - VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; - VkResult result = getDispatcher()->vkGetDisplayPlaneCapabilities2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( &displayPlaneInfo ), - reinterpret_cast( &capabilities ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT && + "Function requires or or " ); - return capabilities; + getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); } -# if defined( VK_USE_PLATFORM_IOS_MVK ) - //=== VK_MVK_ios_surface === + VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnableEXT && + "Function requires or or " ); - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - Instance::createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + getDispatcher()->vkCmdSetStencilTestEnableEXT( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, + VULKAN_HPP_NAMESPACE::StencilOp failOp, + VULKAN_HPP_NAMESPACE::StencilOp passOp, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, + VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOpEXT && + "Function requires or or " ); + + getDispatcher()->vkCmdSetStencilOpEXT( static_cast( m_commandBuffer ), + static_cast( faceMask ), + static_cast( failOp ), + static_cast( passOp ), + static_cast( depthFailOp ), + static_cast( compareOp ) ); } -# endif /*VK_USE_PLATFORM_IOS_MVK*/ -# if defined( VK_USE_PLATFORM_MACOS_MVK ) - //=== VK_MVK_macos_surface === + //=== VK_KHR_deferred_host_operations === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - Instance::createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); - } -# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDeferredOperationKHR( + static_cast( m_device ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &deferredOperation ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createDeferredOperationKHR" ); +# endif + } - //=== VK_EXT_debug_utils === + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR( + *this, *reinterpret_cast( &deferredOperation ), allocator ); + } - VULKAN_HPP_INLINE void Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t DeferredOperationKHR::getMaxConcurrency() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectNameEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR && + "Function requires " ); - VkResult result = getDispatcher()->vkSetDebugUtilsObjectNameEXT( static_cast( m_device ), - reinterpret_cast( &nameInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); + uint32_t result = + getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR( static_cast( m_device ), static_cast( m_operation ) ); + + return result; } - VULKAN_HPP_INLINE void Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result DeferredOperationKHR::getResult() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectTagEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeferredOperationResultKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetDeferredOperationResultKHR( static_cast( m_device ), static_cast( m_operation ) ) ); - VkResult result = - getDispatcher()->vkSetDebugUtilsObjectTagEXT( static_cast( m_device ), reinterpret_cast( &tagInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); + return static_cast( result ); } - VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result DeferredOperationKHR::join() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkQueueBeginDebugUtilsLabelEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkDeferredOperationJoinKHR && "Function requires " ); - getDispatcher()->vkQueueBeginDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( &labelInfo ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkDeferredOperationJoinKHR( static_cast( m_device ), static_cast( m_operation ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::DeferredOperationKHR::join", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); + + return static_cast( result ); } - VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT + //=== VK_KHR_pipeline_executable_properties === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkQueueEndDebugUtilsLabelEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutablePropertiesKHR && + "Function requires " ); - getDispatcher()->vkQueueEndDebugUtilsLabelEXT( static_cast( m_queue ) ); + std::vector properties; + uint32_t executableCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPipelineExecutablePropertiesKHR( + static_cast( m_device ), reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && executableCount ) + { + properties.resize( executableCount ); + result = static_cast( + getDispatcher()->vkGetPipelineExecutablePropertiesKHR( static_cast( m_device ), + reinterpret_cast( &pipelineInfo ), + &executableCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + if ( executableCount < properties.size() ) + { + properties.resize( executableCount ); + } + return properties; } - VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkQueueInsertDebugUtilsLabelEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutableStatisticsKHR && + "Function requires " ); - getDispatcher()->vkQueueInsertDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( &labelInfo ) ); + std::vector statistics; + uint32_t statisticCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPipelineExecutableStatisticsKHR( + static_cast( m_device ), reinterpret_cast( &executableInfo ), &statisticCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = static_cast( + getDispatcher()->vkGetPipelineExecutableStatisticsKHR( static_cast( m_device ), + reinterpret_cast( &executableInfo ), + &statisticCount, + reinterpret_cast( statistics.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + if ( statisticCount < statistics.size() ) + { + statistics.resize( statisticCount ); + } + return statistics; } - VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginDebugUtilsLabelEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR && + "Function requires " ); - getDispatcher()->vkCmdBeginDebugUtilsLabelEXT( static_cast( m_commandBuffer ), - reinterpret_cast( &labelInfo ) ); + std::vector internalRepresentations; + uint32_t internalRepresentationCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR( static_cast( m_device ), + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = static_cast( getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR( + static_cast( m_device ), + reinterpret_cast( &executableInfo ), + &internalRepresentationCount, + reinterpret_cast( internalRepresentations.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + if ( internalRepresentationCount < internalRepresentations.size() ) + { + internalRepresentations.resize( internalRepresentationCount ); + } + return internalRepresentations; } - VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT + //=== VK_EXT_host_image_copy === + + VULKAN_HPP_INLINE void Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndDebugUtilsLabelEXT && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToImageEXT && "Function requires or " ); - getDispatcher()->vkCmdEndDebugUtilsLabelEXT( static_cast( m_commandBuffer ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyMemoryToImageEXT( + static_cast( m_device ), reinterpret_cast( ©MemoryToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); } - VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInsertDebugUtilsLabelEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToMemoryEXT && "Function requires or " ); - getDispatcher()->vkCmdInsertDebugUtilsLabelEXT( static_cast( m_commandBuffer ), - reinterpret_cast( &labelInfo ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyImageToMemoryEXT( + static_cast( m_device ), reinterpret_cast( ©ImageToMemoryInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT - Instance::createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo ) const { - return VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT( *this, createInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToImageEXT && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyImageToImageEXT( + static_cast( m_device ), reinterpret_cast( ©ImageToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); } VULKAN_HPP_INLINE void - Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, - VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, - const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const VULKAN_HPP_NOEXCEPT + Device::transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkSubmitDebugUtilsMessageEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkTransitionImageLayoutEXT && + "Function requires or " ); - getDispatcher()->vkSubmitDebugUtilsMessageEXT( static_cast( m_instance ), - static_cast( messageSeverity ), - static_cast( messageTypes ), - reinterpret_cast( &callbackData ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkTransitionImageLayoutEXT( + static_cast( m_device ), transitions.size(), reinterpret_cast( transitions.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); } -# if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_ANDROID_external_memory_android_hardware_buffer === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID - Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( - getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID && - "Function needs extension enabled!" ); + getDispatcher()->vkGetImageSubresourceLayout2EXT && + "Function requires or or or " ); - VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; - VkResult result = getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID( - static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); - return properties; + return layout; } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( - getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID && - "Function needs extension enabled!" ); + getDispatcher()->vkGetImageSubresourceLayout2EXT && + "Function requires or or or " ); - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = - structureChain.template get(); - VkResult result = getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID( - static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); return structureChain; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE struct AHardwareBuffer * - Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const + //=== VK_KHR_map_memory2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkMapMemory2KHR && "Function requires or " ); - struct AHardwareBuffer * buffer; - VkResult result = getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID( - static_cast( m_device ), reinterpret_cast( &info ), &buffer ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); + void * pData; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkMapMemory2KHR( static_cast( m_device ), reinterpret_cast( &memoryMapInfo ), &pData ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); - return buffer; + return pData; } -# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - - //=== VK_EXT_sample_locations === - VULKAN_HPP_INLINE void - CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkUnmapMemory2KHR && "Function requires or " ); - getDispatcher()->vkCmdSetSampleLocationsEXT( static_cast( m_commandBuffer ), - reinterpret_cast( &sampleLocationsInfo ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkUnmapMemory2KHR( static_cast( m_device ), reinterpret_cast( &memoryUnmapInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT - PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT && - "Function needs extension enabled!" ); + //=== VK_EXT_swapchain_maintenance1 === - VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; - getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT( static_cast( m_physicalDevice ), - static_cast( samples ), - reinterpret_cast( &multisampleProperties ) ); + VULKAN_HPP_INLINE void Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseSwapchainImagesEXT && "Function requires " ); - return multisampleProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkReleaseSwapchainImagesEXT( + static_cast( m_device ), reinterpret_cast( &releaseInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" ); } - //=== VK_KHR_get_memory_requirements2 === + //=== VK_NV_device_generated_commands === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 - Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV && + "Function requires " ); VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - getDispatcher()->vkGetImageMemoryRequirements2KHR( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return memoryRequirements; } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV && + "Function requires " ); - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); - getDispatcher()->vkGetImageMemoryRequirements2KHR( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return structureChain; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 - Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2KHR && - "Function needs extension enabled!" ); - - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - getDispatcher()->vkGetBufferMemoryRequirements2KHR( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPreprocessGeneratedCommandsNV && + "Function requires " ); - return memoryRequirements; + getDispatcher()->vkCmdPreprocessGeneratedCommandsNV( static_cast( m_commandBuffer ), + reinterpret_cast( &generatedCommandsInfo ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2KHR && - "Function needs extension enabled!" ); - - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); - getDispatcher()->vkGetBufferMemoryRequirements2KHR( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdExecuteGeneratedCommandsNV && + "Function requires " ); - return structureChain; + getDispatcher()->vkCmdExecuteGeneratedCommandsNV( static_cast( m_commandBuffer ), + static_cast( isPreprocessed ), + reinterpret_cast( &generatedCommandsInfo ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const + VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements2KHR && - "Function needs extension enabled!" ); - - std::vector sparseMemoryRequirements; - uint32_t sparseMemoryRequirementCount; - getDispatcher()->vkGetImageSparseMemoryRequirements2KHR( - static_cast( m_device ), reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - getDispatcher()->vkGetImageSparseMemoryRequirements2KHR( static_cast( m_device ), - reinterpret_cast( &info ), - &sparseMemoryRequirementCount, - reinterpret_cast( sparseMemoryRequirements.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindPipelineShaderGroupNV && + "Function requires " ); - VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); - if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + getDispatcher()->vkCmdBindPipelineShaderGroupNV( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( pipeline ), + groupIndex ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateIndirectCommandsLayoutNV( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createIndirectCommandsLayoutNV" ); +# endif } - return sparseMemoryRequirements; + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV( + *this, *reinterpret_cast( &indirectCommandsLayout ), allocator ); } - //=== VK_KHR_acceleration_structure === + //=== VK_EXT_depth_bias_control === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR - Device::createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR( *this, createInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBias2EXT && "Function requires " ); + + getDispatcher()->vkCmdSetDepthBias2EXT( static_cast( m_commandBuffer ), reinterpret_cast( &depthBiasInfo ) ); } - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( - VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, - VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos ) const + //=== VK_EXT_acquire_drm_display === + + VULKAN_HPP_INLINE void PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructuresKHR && - "Function needs extension enabled!" ); - if ( infos.size() != pBuildRangeInfos.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireDrmDisplayEXT && "Function requires " ); - getDispatcher()->vkCmdBuildAccelerationStructuresKHR( - static_cast( m_commandBuffer ), - infos.size(), - reinterpret_cast( infos.data() ), - reinterpret_cast( pBuildRangeInfos.data() ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAcquireDrmDisplayEXT( static_cast( m_physicalDevice ), drmFd, static_cast( display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); } - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( - VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, - VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectDeviceAddresses, - VULKAN_HPP_NAMESPACE::ArrayProxy const & indirectStrides, - VULKAN_HPP_NAMESPACE::ArrayProxy const & pMaxPrimitiveCounts ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR && - "Function needs extension enabled!" ); - if ( infos.size() != indirectDeviceAddresses.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING - "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" ); - } - if ( infos.size() != indirectStrides.size() ) + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetDrmDisplayEXT( + static_cast( m_physicalDevice ), drmFd, connectorId, reinterpret_cast( &display ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" ); - } - if ( infos.size() != pMaxPrimitiveCounts.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" ); +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "PhysicalDevice::getDrmDisplayEXT" ); +# endif } - getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR( static_cast( m_commandBuffer ), - infos.size(), - reinterpret_cast( infos.data() ), - reinterpret_cast( indirectDeviceAddresses.data() ), - indirectStrides.data(), - pMaxPrimitiveCounts.data() ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, *reinterpret_cast( &display ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR( - VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - VULKAN_HPP_NAMESPACE::ArrayProxy const & infos, - VULKAN_HPP_NAMESPACE::ArrayProxy const & pBuildRangeInfos ) const + //=== VK_EXT_private_data === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkBuildAccelerationStructuresKHR && - "Function needs extension enabled!" ); - if ( infos.size() != pBuildRangeInfos.size() ) + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreatePrivateDataSlotEXT( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &privateDataSlot ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" ); +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createPrivateDataSlotEXT" ); +# endif } - VkResult result = getDispatcher()->vkBuildAccelerationStructuresKHR( - static_cast( m_device ), - static_cast( deferredOperation ), - infos.size(), - reinterpret_cast( infos.data() ), - reinterpret_cast( pBuildRangeInfos.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); - - return static_cast( result ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, *reinterpret_cast( &privateDataSlot ), allocator ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result - Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const + VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + Optional allocator ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCopyAccelerationStructureKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkDestroyPrivateDataSlotEXT && + "Function requires or " ); - VkResult result = getDispatcher()->vkCopyAccelerationStructureKHR( static_cast( m_device ), - static_cast( deferredOperation ), - reinterpret_cast( &info ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + getDispatcher()->vkDestroyPrivateDataSlotEXT( + static_cast( m_device ), + static_cast( privateDataSlot ), + reinterpret_cast( static_cast( allocator ) ) ); + } - return static_cast( result ); + VULKAN_HPP_INLINE void Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, + uint64_t data ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkSetPrivateDataEXT && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkSetPrivateDataEXT( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result - Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle, + VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCopyAccelerationStructureToMemoryKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPrivateDataEXT && "Function requires or " ); - VkResult result = - getDispatcher()->vkCopyAccelerationStructureToMemoryKHR( static_cast( m_device ), - static_cast( deferredOperation ), - reinterpret_cast( &info ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + uint64_t data; + getDispatcher()->vkGetPrivateDataEXT( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), &data ); - return static_cast( result ); + return data; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result - Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, - const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const + //=== VK_KHR_video_encode_queue === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToAccelerationStructureKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && + "Function requires " ); - VkResult result = - getDispatcher()->vkCopyMemoryToAccelerationStructureKHR( static_cast( m_device ), - static_cast( deferredOperation ), - reinterpret_cast( &info ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR qualityLevelProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &qualityLevelInfo ), + reinterpret_cast( &qualityLevelProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); - return static_cast( result ); + return qualityLevelProperties; } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::writeAccelerationStructuresPropertiesKHR( - VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - size_t dataSize, - size_t stride ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( + const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR && + "Function requires " ); - VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); - std::vector data( dataSize / sizeof( DataType ) ); - VkResult result = - getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( static_cast( m_device ), - accelerationStructures.size(), - reinterpret_cast( accelerationStructures.data() ), - static_cast( queryType ), - data.size() * sizeof( DataType ), - reinterpret_cast( data.data() ), - stride ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR & qualityLevelProperties = + structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + static_cast( m_physicalDevice ), + reinterpret_cast( &qualityLevelInfo ), + reinterpret_cast( &qualityLevelProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); - return data; + return structureChain; } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Device::writeAccelerationStructuresPropertyKHR( - VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - size_t stride ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair> + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetEncodedVideoSessionParametersKHR && + "Function requires " ); - DataType data; - VkResult result = - getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( static_cast( m_device ), - accelerationStructures.size(), - reinterpret_cast( accelerationStructures.data() ), - static_cast( queryType ), - sizeof( DataType ), - reinterpret_cast( &data ), - stride ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); + std::pair> data_; + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first; + std::vector & data = data_.second; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetEncodedVideoSessionParametersKHR( + static_cast( m_device ), + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( getDispatcher()->vkGetEncodedVideoSessionParametersKHR( + static_cast( m_device ), + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); - return data; + return data_; } - VULKAN_HPP_INLINE void - CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureKHR && - "Function needs extension enabled!" ); + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair, std::vector> + Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetEncodedVideoSessionParametersKHR && + "Function requires " ); + + std::pair, std::vector> data_; + VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = + data_.first.template get(); + std::vector & data = data_.second; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetEncodedVideoSessionParametersKHR( + static_cast( m_device ), + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( getDispatcher()->vkGetEncodedVideoSessionParametersKHR( + static_cast( m_device ), + reinterpret_cast( &videoSessionParametersInfo ), + reinterpret_cast( &feedbackInfo ), + &dataSize, + reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); - getDispatcher()->vkCmdCopyAccelerationStructureKHR( static_cast( m_commandBuffer ), - reinterpret_cast( &info ) ); + return data_; } - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( - const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEncodeVideoKHR && "Function requires " ); - getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR( static_cast( m_commandBuffer ), - reinterpret_cast( &info ) ); + getDispatcher()->vkCmdEncodeVideoKHR( static_cast( m_commandBuffer ), reinterpret_cast( &encodeInfo ) ); } - VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( - const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_NAMESPACE::CudaModuleNV module; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateCudaModuleNV( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &module ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createCudaModuleNV" ); +# endif + } - getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR( static_cast( m_commandBuffer ), - reinterpret_cast( &info ) ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaModuleNV( *this, *reinterpret_cast( &module ), allocator ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress - Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector CudaModuleNV::getCache() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetCudaModuleCacheNV && "Function requires " ); - VkDeviceAddress result = getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR( - static_cast( m_device ), reinterpret_cast( &info ) ); + std::vector cacheData; + size_t cacheSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetCudaModuleCacheNV( static_cast( m_device ), static_cast( m_module ), &cacheSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && cacheSize ) + { + cacheData.resize( cacheSize ); + result = static_cast( getDispatcher()->vkGetCudaModuleCacheNV( + static_cast( m_device ), static_cast( m_module ), &cacheSize, reinterpret_cast( cacheData.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CudaModuleNV::getCache" ); + VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); + if ( cacheSize < cacheData.size() ) + { + cacheData.resize( cacheSize ); + } + return cacheData; + } - return static_cast( result ); + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::CudaFunctionNV function; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateCudaFunctionNV( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &function ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createCudaFunctionNV" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CudaFunctionNV( *this, *reinterpret_cast( &function ), allocator ); } - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( - VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::cudaLaunchKernelNV( const VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV & launchInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCudaLaunchKernelNV && "Function requires " ); - getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR( static_cast( m_commandBuffer ), - accelerationStructures.size(), - reinterpret_cast( accelerationStructures.data() ), - static_cast( queryType ), - static_cast( queryPool ), - firstQuery ); + getDispatcher()->vkCmdCudaLaunchKernelNV( static_cast( m_commandBuffer ), reinterpret_cast( &launchInfo ) ); } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR Device::getAccelerationStructureCompatibilityKHR( - const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const VULKAN_HPP_NOEXCEPT +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT Device::exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkExportMetalObjectsEXT && "Function requires " ); - VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; - getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR( static_cast( m_device ), - reinterpret_cast( &versionInfo ), - reinterpret_cast( &compatibility ) ); + VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo; + getDispatcher()->vkExportMetalObjectsEXT( static_cast( m_device ), reinterpret_cast( &metalObjectsInfo ) ); - return compatibility; + return metalObjectsInfo; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR - Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, - const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo, - VULKAN_HPP_NAMESPACE::ArrayProxy const & maxPrimitiveCounts ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureBuildSizesKHR && - "Function needs extension enabled!" ); - if ( maxPrimitiveCounts.size() != buildInfo.geometryCount ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkExportMetalObjectsEXT && "Function requires " ); - VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; - getDispatcher()->vkGetAccelerationStructureBuildSizesKHR( static_cast( m_device ), - static_cast( buildType ), - reinterpret_cast( &buildInfo ), - maxPrimitiveCounts.data(), - reinterpret_cast( &sizeInfo ) ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get(); + getDispatcher()->vkExportMetalObjectsEXT( static_cast( m_device ), reinterpret_cast( &metalObjectsInfo ) ); + + return structureChain; + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + + VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent2KHR && "Function requires or " ); - return sizeInfo; + getDispatcher()->vkCmdSetEvent2KHR( + static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( &dependencyInfo ) ); } - //=== VK_KHR_sampler_ycbcr_conversion === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion - Device::createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion( *this, createInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent2KHR && "Function requires or " ); + + getDispatcher()->vkCmdResetEvent2KHR( + static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); } VULKAN_HPP_INLINE void - Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, - Optional allocator ) const VULKAN_HPP_NOEXCEPT + CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, + VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkDestroySamplerYcbcrConversionKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents2KHR && "Function requires or " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); +# else + if ( events.size() != dependencyInfos.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - getDispatcher()->vkDestroySamplerYcbcrConversionKHR( - static_cast( m_device ), - static_cast( ycbcrConversion ), - reinterpret_cast( static_cast( allocator ) ) ); + getDispatcher()->vkCmdWaitEvents2KHR( static_cast( m_commandBuffer ), + events.size(), + reinterpret_cast( events.data() ), + reinterpret_cast( dependencyInfos.data() ) ); } - //=== VK_KHR_bind_memory2 === + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier2KHR && + "Function requires or " ); - VULKAN_HPP_INLINE void - Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + getDispatcher()->vkCmdPipelineBarrier2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &dependencyInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory2KHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp2KHR && + "Function requires or " ); - VkResult result = getDispatcher()->vkBindBufferMemory2KHR( - static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); + getDispatcher()->vkCmdWriteTimestamp2KHR( + static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); } - VULKAN_HPP_INLINE void - Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + VULKAN_HPP_INLINE void Queue::submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, + VULKAN_HPP_NAMESPACE::Fence fence ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory2KHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit2KHR && "Function requires or " ); - VkResult result = getDispatcher()->vkBindImageMemory2KHR( - static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueSubmit2KHR( + static_cast( m_queue ), submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); } - //=== VK_EXT_image_drm_format_modifier === + //=== VK_EXT_descriptor_buffer === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT Image::getDrmFormatModifierPropertiesEXT() const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DescriptorSetLayout::getSizeEXT() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSizeEXT && "Function requires " ); - VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; - VkResult result = getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT( - static_cast( m_device ), static_cast( m_image ), reinterpret_cast( &properties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Image::getDrmFormatModifierPropertiesEXT" ); + VULKAN_HPP_NAMESPACE::DeviceSize layoutSizeInBytes; + getDispatcher()->vkGetDescriptorSetLayoutSizeEXT( static_cast( m_device ), + static_cast( m_descriptorSetLayout ), + reinterpret_cast( &layoutSizeInBytes ) ); - return properties; + return layoutSizeInBytes; } - //=== VK_EXT_validation_cache === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT - Device::createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize + DescriptorSetLayout::getBindingOffsetEXT( uint32_t binding ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT( *this, createInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutBindingOffsetEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::DeviceSize offset; + getDispatcher()->vkGetDescriptorSetLayoutBindingOffsetEXT( + static_cast( m_device ), static_cast( m_descriptorSetLayout ), binding, reinterpret_cast( &offset ) ); + + return offset; } - VULKAN_HPP_INLINE void ValidationCacheEXT::merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const + VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, + size_t dataSize, + void * pDescriptor ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkMergeValidationCachesEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorEXT && "Function requires " ); - VkResult result = getDispatcher()->vkMergeValidationCachesEXT( static_cast( m_device ), - static_cast( m_validationCache ), - srcCaches.size(), - reinterpret_cast( srcCaches.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::merge" ); + getDispatcher()->vkGetDescriptorEXT( + static_cast( m_device ), reinterpret_cast( &descriptorInfo ), dataSize, pDescriptor ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector ValidationCacheEXT::getData() const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorType + Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetValidationCacheDataEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorEXT && "Function requires " ); - std::vector data; - size_t dataSize; - VkResult result; - do - { - result = getDispatcher()->vkGetValidationCacheDataEXT( - static_cast( m_device ), static_cast( m_validationCache ), &dataSize, nullptr ); - if ( ( result == VK_SUCCESS ) && dataSize ) - { - data.resize( dataSize ); - result = getDispatcher()->vkGetValidationCacheDataEXT( - static_cast( m_device ), static_cast( m_validationCache ), &dataSize, reinterpret_cast( data.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::getData" ); - VULKAN_HPP_ASSERT( dataSize <= data.size() ); - if ( dataSize < data.size() ) - { - data.resize( dataSize ); - } - return data; - } + DescriptorType descriptor; + getDispatcher()->vkGetDescriptorEXT( static_cast( m_device ), + reinterpret_cast( &descriptorInfo ), + sizeof( DescriptorType ), + reinterpret_cast( &descriptor ) ); - //=== VK_NV_shading_rate_image === + return descriptor; + } - VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & bindingInfos ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindShadingRateImageNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorBuffersEXT && "Function requires " ); - getDispatcher()->vkCmdBindShadingRateImageNV( - static_cast( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); + getDispatcher()->vkCmdBindDescriptorBuffersEXT( static_cast( m_commandBuffer ), + bindingInfos.size(), + reinterpret_cast( bindingInfos.data() ) ); } - VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( - uint32_t firstViewport, - VULKAN_HPP_NAMESPACE::ArrayProxy const & shadingRatePalettes ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportShadingRatePaletteNV && - "Function needs extension enabled!" ); + VULKAN_HPP_INLINE void + CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t firstSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & bufferIndices, + VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDescriptorBufferOffsetsEXT && + "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( bufferIndices.size() == offsets.size() ); +# else + if ( bufferIndices.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - getDispatcher()->vkCmdSetViewportShadingRatePaletteNV( static_cast( m_commandBuffer ), - firstViewport, - shadingRatePalettes.size(), - reinterpret_cast( shadingRatePalettes.data() ) ); + getDispatcher()->vkCmdSetDescriptorBufferOffsetsEXT( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + firstSet, + bufferIndices.size(), + bufferIndices.data(), + reinterpret_cast( offsets.data() ) ); } - VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( - VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, - VULKAN_HPP_NAMESPACE::ArrayProxy const & customSampleOrders ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoarseSampleOrderNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplersEXT && + "Function requires " ); - getDispatcher()->vkCmdSetCoarseSampleOrderNV( static_cast( m_commandBuffer ), - static_cast( sampleOrderType ), - customSampleOrders.size(), - reinterpret_cast( customSampleOrders.data() ) ); + getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplersEXT( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( layout ), set ); } - //=== VK_NV_ray_tracing === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV - Device::createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType + Device::getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info ) const { - return VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV( *this, createInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureDescriptorDataEXT && + "Function requires " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetBufferOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" ); + + return data; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( - const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType + Device::getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageOpaqueCaptureDescriptorDataEXT && + "Function requires " ); - VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; - getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetImageOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" ); - return memoryRequirements; + return data; } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain Device::getAccelerationStructureMemoryRequirementsNV( - const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType + Device::getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewOpaqueCaptureDescriptorDataEXT && + "Function requires " ); - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get(); - getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetImageViewOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" ); - return structureChain; + return data; } - VULKAN_HPP_INLINE void Device::bindAccelerationStructureMemoryNV( - VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType + Device::getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkBindAccelerationStructureMemoryNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSamplerOpaqueCaptureDescriptorDataEXT && + "Function requires " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetSamplerOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" ); - VkResult result = getDispatcher()->vkBindAccelerationStructureMemoryNV( - static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); + return data; } - VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, - VULKAN_HPP_NAMESPACE::Buffer instanceData, - VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, - VULKAN_HPP_NAMESPACE::Bool32 update, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, - VULKAN_HPP_NAMESPACE::Buffer scratch, - VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( + const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructureNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT && + "Function requires " ); - getDispatcher()->vkCmdBuildAccelerationStructureNV( static_cast( m_commandBuffer ), - reinterpret_cast( &info ), - static_cast( instanceData ), - static_cast( instanceOffset ), - static_cast( update ), - static_cast( dst ), - static_cast( src ), - static_cast( scratch ), - static_cast( scratchOffset ) ); + DataType data; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( &info ), &data ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" ); + + return data; } - VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, - VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT + //=== VK_NV_fragment_shading_rate_enums === + + VULKAN_HPP_INLINE void + CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, + const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFragmentShadingRateEnumNV && + "Function requires " ); - getDispatcher()->vkCmdCopyAccelerationStructureNV( static_cast( m_commandBuffer ), - static_cast( dst ), - static_cast( src ), - static_cast( mode ) ); + getDispatcher()->vkCmdSetFragmentShadingRateEnumNV( static_cast( m_commandBuffer ), + static_cast( shadingRate ), + reinterpret_cast( combinerOps ) ); } - VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, - VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, - VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, - VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, - VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, - VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, - VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, - uint32_t width, - uint32_t height, - uint32_t depth ) const VULKAN_HPP_NOEXCEPT + //=== VK_EXT_mesh_shader === + + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysNV && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksEXT && "Function requires " ); - getDispatcher()->vkCmdTraceRaysNV( static_cast( m_commandBuffer ), - static_cast( raygenShaderBindingTableBuffer ), - static_cast( raygenShaderBindingOffset ), - static_cast( missShaderBindingTableBuffer ), - static_cast( missShaderBindingOffset ), - static_cast( missShaderBindingStride ), - static_cast( hitShaderBindingTableBuffer ), - static_cast( hitShaderBindingOffset ), - static_cast( hitShaderBindingStride ), - static_cast( callableShaderBindingTableBuffer ), - static_cast( callableShaderBindingOffset ), - static_cast( callableShaderBindingStride ), - width, - height, - depth ); + getDispatcher()->vkCmdDrawMeshTasksEXT( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::createRayTracingPipelinesNV( - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + uint32_t drawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectEXT && "Function requires " ); + + getDispatcher()->vkCmdDrawMeshTasksIndirectEXT( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createRayTracingPipelineNV( - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::Buffer countBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectCountEXT && "Function requires " ); + + getDispatcher()->vkCmdDrawMeshTasksIndirectCountEXT( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( countBuffer ), + static_cast( countBufferOffset ), + maxDrawCount, + stride ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Pipeline::getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const - { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV && - "Function needs extension enabled!" ); + //=== VK_KHR_copy_commands2 === - VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); - std::vector data( dataSize / sizeof( DataType ) ); - VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast( m_device ), - static_cast( m_pipeline ), - firstGroup, - groupCount, - data.size() * sizeof( DataType ), - reinterpret_cast( data.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesNV" ); + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer2KHR && "Function requires or " ); - return data; + getDispatcher()->vkCmdCopyBuffer2KHR( static_cast( m_commandBuffer ), reinterpret_cast( ©BufferInfo ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingShaderGroupHandleNV( uint32_t firstGroup, uint32_t groupCount ) const + VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage2KHR && "Function requires or " ); - DataType data; - VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast( m_device ), - static_cast( m_pipeline ), - firstGroup, - groupCount, - sizeof( DataType ), - reinterpret_cast( &data ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleNV" ); - - return data; + getDispatcher()->vkCmdCopyImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( ©ImageInfo ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector AccelerationStructureNV::getHandle( size_t dataSize ) const + VULKAN_HPP_INLINE void + CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureHandleNV && - "Function needs extension enabled!" ); - - VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); - std::vector data( dataSize / sizeof( DataType ) ); - VkResult result = getDispatcher()->vkGetAccelerationStructureHandleNV( static_cast( m_device ), - static_cast( m_accelerationStructure ), - data.size() * sizeof( DataType ), - reinterpret_cast( data.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage2KHR && + "Function requires or " ); - return data; + getDispatcher()->vkCmdCopyBufferToImage2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( ©BufferToImageInfo ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType AccelerationStructureNV::getHandle() const + VULKAN_HPP_INLINE void + CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureHandleNV && - "Function needs extension enabled!" ); - - DataType data; - VkResult result = getDispatcher()->vkGetAccelerationStructureHandleNV( static_cast( m_device ), - static_cast( m_accelerationStructure ), - sizeof( DataType ), - reinterpret_cast( &data ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer2KHR && + "Function requires or " ); - return data; + getDispatcher()->vkCmdCopyImageToBuffer2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( ©ImageToBufferInfo ) ); } - VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( - VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, - VULKAN_HPP_NAMESPACE::QueryType queryType, - VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage2KHR && "Function requires or " ); - getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV( static_cast( m_commandBuffer ), - accelerationStructures.size(), - reinterpret_cast( accelerationStructures.data() ), - static_cast( queryType ), - static_cast( queryPool ), - firstQuery ); + getDispatcher()->vkCmdBlitImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( &blitImageInfo ) ); } - VULKAN_HPP_INLINE void Pipeline::compileDeferredNV( uint32_t shader ) const + VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCompileDeferredNV && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage2KHR && "Function requires or " ); - VkResult result = getDispatcher()->vkCompileDeferredNV( static_cast( m_device ), static_cast( m_pipeline ), shader ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::compileDeferredNV" ); + getDispatcher()->vkCmdResolveImage2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &resolveImageInfo ) ); } - //=== VK_KHR_maintenance3 === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport - Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + //=== VK_EXT_device_fault === + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, + VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupportKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDeviceFaultInfoEXT( static_cast( m_device ), + reinterpret_cast( pFaultCounts ), + reinterpret_cast( pFaultInfo ) ) ); + } +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; - getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( static_cast( m_device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( &support ) ); + VULKAN_HPP_INLINE void DisplayKHR::acquireWinrtNV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireWinrtDisplayNV && "Function requires " ); - return support; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkAcquireWinrtDisplayNV( static_cast( m_physicalDevice ), static_cast( m_display ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::acquireWinrtNV" ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupportKHR && - "Function needs extension enabled!" ); - - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get(); - getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( static_cast( m_device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( &support ) ); + VULKAN_HPP_NAMESPACE::DisplayKHR display; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetWinrtDisplayNV( + static_cast( m_physicalDevice ), deviceRelativeId, reinterpret_cast( &display ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "PhysicalDevice::getWinrtDisplayNV" ); +# endif + } - return structureChain; + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, *reinterpret_cast( &display ) ); } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_draw_indirect_count === +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === - VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDirectFBSurfaceEXT( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createDirectFBSurfaceEXT" ); +# endif + } - getDispatcher()->vkCmdDrawIndirectCountKHR( static_cast( m_commandBuffer ), - static_cast( buffer ), - static_cast( offset ), - static_cast( countBuffer ), - static_cast( countBufferOffset ), - maxDrawCount, - stride ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); } - VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexedIndirectCountKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT && + "Function requires " ); - getDispatcher()->vkCmdDrawIndexedIndirectCountKHR( static_cast( m_commandBuffer ), - static_cast( buffer ), - static_cast( offset ), - static_cast( countBuffer ), - static_cast( countBufferOffset ), - maxDrawCount, - stride ); + VkBool32 result = + getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT( static_cast( m_physicalDevice ), queueFamilyIndex, &dfb ); + + return static_cast( result ); } +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - //=== VK_EXT_external_memory_host === + //=== VK_EXT_vertex_input_dynamic_state === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT - Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer ) const + VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexBindingDescriptions, + VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexAttributeDescriptions ) const + VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryHostPointerPropertiesEXT && - "Function needs extension enabled!" ); - - VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; - VkResult result = - getDispatcher()->vkGetMemoryHostPointerPropertiesEXT( static_cast( m_device ), - static_cast( handleType ), - pHostPointer, - reinterpret_cast( &memoryHostPointerProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT && + "Function requires or " ); - return memoryHostPointerProperties; + getDispatcher()->vkCmdSetVertexInputEXT( static_cast( m_commandBuffer ), + vertexBindingDescriptions.size(), + reinterpret_cast( vertexBindingDescriptions.data() ), + vertexAttributeDescriptions.size(), + reinterpret_cast( vertexAttributeDescriptions.data() ) ); } - //=== VK_AMD_buffer_marker === +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - uint32_t marker ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t + Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarkerAMD && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryZirconHandleFUCHSIA && "Function requires " ); - getDispatcher()->vkCmdWriteBufferMarkerAMD( static_cast( m_commandBuffer ), - static_cast( pipelineStage ), - static_cast( dstBuffer ), - static_cast( dstOffset ), - marker ); - } + zx_handle_t zirconHandle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetMemoryZirconHandleFUCHSIA( + static_cast( m_device ), reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); - //=== VK_EXT_calibrated_timestamps === + return zirconHandle; + } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getCalibrateableTimeDomainsEXT() const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA + Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA && + "Function requires " ); - std::vector timeDomains; - uint32_t timeDomainCount; - VkResult result; - do - { - result = - getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( static_cast( m_physicalDevice ), &timeDomainCount, nullptr ); - if ( ( result == VK_SUCCESS ) && timeDomainCount ) - { - timeDomains.resize( timeDomainCount ); - result = getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( - static_cast( m_physicalDevice ), &timeDomainCount, reinterpret_cast( timeDomains.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); - VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); - if ( timeDomainCount < timeDomains.size() ) - { - timeDomains.resize( timeDomainCount ); - } - return timeDomains; + VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA( + static_cast( m_device ), + static_cast( handleType ), + zirconHandle, + reinterpret_cast( &memoryZirconHandleProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); + + return memoryZirconHandleProperties; } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair, uint64_t> Device::getCalibratedTimestampsEXT( - VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos ) const +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + + VULKAN_HPP_INLINE void + Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsEXT && - "Function needs extension enabled!" ); - - std::pair, uint64_t> data( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); - std::vector & timestamps = data.first; - uint64_t & maxDeviation = data.second; - VkResult result = getDispatcher()->vkGetCalibratedTimestampsEXT( static_cast( m_device ), - timestampInfos.size(), - reinterpret_cast( timestampInfos.data() ), - timestamps.data(), - &maxDeviation ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA && + "Function requires " ); - return data; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA( + static_cast( m_device ), reinterpret_cast( &importSemaphoreZirconHandleInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair - Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t + Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA && + "Function requires " ); - std::pair data; - uint64_t & timestamp = data.first; - uint64_t & maxDeviation = data.second; - VkResult result = getDispatcher()->vkGetCalibratedTimestampsEXT( - static_cast( m_device ), 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); + zx_handle_t zirconHandle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA( + static_cast( m_device ), reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); - return data; + return zirconHandle; } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_NV_mesh_shader === +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksNV && "Function needs extension enabled!" ); + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateBufferCollectionFUCHSIA( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &collection ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createBufferCollectionFUCHSIA" ); +# endif + } - getDispatcher()->vkCmdDrawMeshTasksNV( static_cast( m_commandBuffer ), taskCount, firstTask ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA( + *this, *reinterpret_cast( &collection ), allocator ); } - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - uint32_t drawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void BufferCollectionFUCHSIA::setImageConstraints( const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA && + "Function requires " ); - getDispatcher()->vkCmdDrawMeshTasksIndirectNV( - static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast( m_device ), + static_cast( m_collection ), + reinterpret_cast( &imageConstraintsInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setImageConstraints" ); } - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + BufferCollectionFUCHSIA::setBufferConstraints( const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectCountNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA && + "Function requires " ); - getDispatcher()->vkCmdDrawMeshTasksIndirectCountNV( static_cast( m_commandBuffer ), - static_cast( buffer ), - static_cast( offset ), - static_cast( countBuffer ), - static_cast( countBufferOffset ), - maxDrawCount, - stride ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast( m_device ), + static_cast( m_collection ), + reinterpret_cast( &bufferConstraintsInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setBufferConstraints" ); } - //=== VK_NV_scissor_exclusive === - - VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( - uint32_t firstExclusiveScissor, VULKAN_HPP_NAMESPACE::ArrayProxy const & exclusiveScissors ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA BufferCollectionFUCHSIA::getProperties() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExclusiveScissorNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA && + "Function requires " ); - getDispatcher()->vkCmdSetExclusiveScissorNV( static_cast( m_commandBuffer ), - firstExclusiveScissor, - exclusiveScissors.size(), - reinterpret_cast( exclusiveScissors.data() ) ); + VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA( static_cast( m_device ), + static_cast( m_collection ), + reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::getProperties" ); + + return properties; } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_NV_device_diagnostic_checkpoints === + //=== VK_HUAWEI_subpass_shading === - template - VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCheckpointNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI && + "Function requires " ); - getDispatcher()->vkCmdSetCheckpointNV( static_cast( m_commandBuffer ), reinterpret_cast( &checkpointMarker ) ); + VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + static_cast( m_device ), static_cast( m_renderPass ), reinterpret_cast( &maxWorkgroupSize ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI" ); + + return maxWorkgroupSize; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV() const + VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointDataNV && - "Function needs extension enabled!" ); - - std::vector checkpointData; - uint32_t checkpointDataCount; - getDispatcher()->vkGetQueueCheckpointDataNV( static_cast( m_queue ), &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - getDispatcher()->vkGetQueueCheckpointDataNV( - static_cast( m_queue ), &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSubpassShadingHUAWEI && "Function requires " ); - VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); - if ( checkpointDataCount < checkpointData.size() ) - { - checkpointData.resize( checkpointDataCount ); - } - return checkpointData; + getDispatcher()->vkCmdSubpassShadingHUAWEI( static_cast( m_commandBuffer ) ); } - //=== VK_KHR_timeline_semaphore === + //=== VK_HUAWEI_invocation_mask === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValueKHR() const + VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreCounterValueKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindInvocationMaskHUAWEI && "Function requires " ); - uint64_t value; - VkResult result = getDispatcher()->vkGetSemaphoreCounterValueKHR( static_cast( m_device ), static_cast( m_semaphore ), &value ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValueKHR" ); - - return value; + getDispatcher()->vkCmdBindInvocationMaskHUAWEI( + static_cast( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, - uint64_t timeout ) const + //=== VK_NV_external_memory_rdma === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RemoteAddressNV + Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkWaitSemaphoresKHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryRemoteAddressNV && "Function requires " ); - VkResult result = - getDispatcher()->vkWaitSemaphoresKHR( static_cast( m_device ), reinterpret_cast( &waitInfo ), timeout ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); + VULKAN_HPP_NAMESPACE::RemoteAddressNV address; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetMemoryRemoteAddressNV( static_cast( m_device ), + reinterpret_cast( &memoryGetRemoteAddressInfo ), + reinterpret_cast( &address ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); - return static_cast( result ); + return address; } - VULKAN_HPP_INLINE void Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const + //=== VK_EXT_pipeline_properties === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BaseOutStructure + Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkSignalSemaphoreKHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelinePropertiesEXT && "Function requires " ); - VkResult result = - getDispatcher()->vkSignalSemaphoreKHR( static_cast( m_device ), reinterpret_cast( &signalInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); + VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPipelinePropertiesEXT( static_cast( m_device ), + reinterpret_cast( &pipelineInfo ), + reinterpret_cast( &pipelineProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); + + return pipelineProperties; } - //=== VK_INTEL_performance_query === + //=== VK_EXT_extended_dynamic_state2 === - VULKAN_HPP_INLINE void Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const + VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkInitializePerformanceApiINTEL && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPatchControlPointsEXT && + "Function requires or " ); - VkResult result = getDispatcher()->vkInitializePerformanceApiINTEL( static_cast( m_device ), - reinterpret_cast( &initializeInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); + getDispatcher()->vkCmdSetPatchControlPointsEXT( static_cast( m_commandBuffer ), patchControlPoints ); } - VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkUninitializePerformanceApiINTEL && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT && + "Function requires or or " ); - getDispatcher()->vkUninitializePerformanceApiINTEL( static_cast( m_device ) ); + getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); } - VULKAN_HPP_INLINE void CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo ) const + VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceMarkerINTEL && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnableEXT && + "Function requires or or " ); - VkResult result = getDispatcher()->vkCmdSetPerformanceMarkerINTEL( static_cast( m_commandBuffer ), - reinterpret_cast( &markerInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); + getDispatcher()->vkCmdSetDepthBiasEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); } - VULKAN_HPP_INLINE void CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const + VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT && + "Function requires or " ); - VkResult result = getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL( static_cast( m_commandBuffer ), - reinterpret_cast( &markerInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); + getDispatcher()->vkCmdSetLogicOpEXT( static_cast( m_commandBuffer ), static_cast( logicOp ) ); } - VULKAN_HPP_INLINE void CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const + VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceOverrideINTEL && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT && + "Function requires or or " ); - VkResult result = getDispatcher()->vkCmdSetPerformanceOverrideINTEL( static_cast( m_commandBuffer ), - reinterpret_cast( &overrideInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); + getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL - Device::acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const - { - return VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL( *this, acquireInfo ); - } +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === - VULKAN_HPP_INLINE void Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSetPerformanceConfigurationINTEL && - "Function needs extension enabled!" ); + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateScreenSurfaceQNX( + static_cast( m_instance ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &surface ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Instance::createScreenSurfaceQNX" ); +# endif + } - VkResult result = getDispatcher()->vkQueueSetPerformanceConfigurationINTEL( static_cast( m_queue ), - static_cast( configuration ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, *reinterpret_cast( &surface ), allocator ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PerformanceValueINTEL - Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 + PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPerformanceParameterINTEL && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX && + "Function requires " ); - VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; - VkResult result = getDispatcher()->vkGetPerformanceParameterINTEL( - static_cast( m_device ), static_cast( parameter ), reinterpret_cast( &value ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); + VkBool32 result = + getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX( static_cast( m_physicalDevice ), queueFamilyIndex, &window ); - return value; + return static_cast( result ); } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - //=== VK_AMD_display_native_hdr === + //=== VK_EXT_color_write_enable === - VULKAN_HPP_INLINE void SwapchainKHR::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteEnables ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkSetLocalDimmingAMD && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteEnableEXT && "Function requires " ); - getDispatcher()->vkSetLocalDimmingAMD( - static_cast( m_device ), static_cast( m_swapchain ), static_cast( localDimmingEnable ) ); + getDispatcher()->vkCmdSetColorWriteEnableEXT( + static_cast( m_commandBuffer ), colorWriteEnables.size(), reinterpret_cast( colorWriteEnables.data() ) ); } -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_imagepipe_surface === + //=== VK_KHR_ray_tracing_maintenance1 === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - Instance::createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); - } -# endif /*VK_USE_PLATFORM_FUCHSIA*/ - -# if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_surface === + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysIndirect2KHR && "Function requires " ); - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - Instance::createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + getDispatcher()->vkCmdTraceRaysIndirect2KHR( static_cast( m_commandBuffer ), static_cast( indirectDeviceAddress ) ); } -# endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_fragment_shading_rate === + //=== VK_EXT_multi_draw === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getFragmentShadingRatesKHR() const + VULKAN_HPP_INLINE void + CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & vertexInfo, + uint32_t instanceCount, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiEXT && "Function requires " ); - std::vector fragmentShadingRates; - uint32_t fragmentShadingRateCount; - VkResult result; - do - { - result = - getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR( static_cast( m_physicalDevice ), &fragmentShadingRateCount, nullptr ); - if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount ) - { - fragmentShadingRates.resize( fragmentShadingRateCount ); - result = getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR( - static_cast( m_physicalDevice ), - &fragmentShadingRateCount, - reinterpret_cast( fragmentShadingRates.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); - VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); - if ( fragmentShadingRateCount < fragmentShadingRates.size() ) - { - fragmentShadingRates.resize( fragmentShadingRateCount ); - } - return fragmentShadingRates; + getDispatcher()->vkCmdDrawMultiEXT( static_cast( m_commandBuffer ), + vertexInfo.size(), + reinterpret_cast( vertexInfo.data() ), + instanceCount, + firstInstance, + vertexInfo.stride() ); } VULKAN_HPP_INLINE void - CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize, - const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & indexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + Optional vertexOffset ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFragmentShadingRateKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiIndexedEXT && "Function requires " ); - getDispatcher()->vkCmdSetFragmentShadingRateKHR( static_cast( m_commandBuffer ), - reinterpret_cast( &fragmentSize ), - reinterpret_cast( combinerOps ) ); + getDispatcher()->vkCmdDrawMultiIndexedEXT( static_cast( m_commandBuffer ), + indexInfo.size(), + reinterpret_cast( indexInfo.data() ), + instanceCount, + firstInstance, + indexInfo.stride(), + static_cast( vertexOffset ) ); } - //=== VK_EXT_buffer_device_address === + //=== VK_EXT_opacity_micromap === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress - Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferDeviceAddressEXT && - "Function needs extension enabled!" ); - - VkDeviceAddress result = - getDispatcher()->vkGetBufferDeviceAddressEXT( static_cast( m_device ), reinterpret_cast( &info ) ); + VULKAN_HPP_NAMESPACE::MicromapEXT micromap; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateMicromapEXT( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( µmap ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createMicromapEXT" ); +# endif + } - return static_cast( result ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::MicromapEXT( *this, *reinterpret_cast( µmap ), allocator ); } - //=== VK_EXT_tooling_info === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getToolPropertiesEXT() const + VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildMicromapsEXT && "Function requires " ); - std::vector toolProperties; - uint32_t toolCount; - VkResult result; - do - { - result = getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT( static_cast( m_physicalDevice ), &toolCount, nullptr ); - if ( ( result == VK_SUCCESS ) && toolCount ) - { - toolProperties.resize( toolCount ); - result = getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT( - static_cast( m_physicalDevice ), &toolCount, reinterpret_cast( toolProperties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); - VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); - if ( toolCount < toolProperties.size() ) - { - toolProperties.resize( toolCount ); - } - return toolProperties; + getDispatcher()->vkCmdBuildMicromapsEXT( + static_cast( m_commandBuffer ), infos.size(), reinterpret_cast( infos.data() ) ); } - //=== VK_KHR_present_wait === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::waitForPresent( uint64_t presentId, uint64_t timeout ) const - { - VULKAN_HPP_ASSERT( getDispatcher()->vkWaitForPresentKHR && "Function needs extension enabled!" ); - - VkResult result = - getDispatcher()->vkWaitForPresentKHR( static_cast( m_device ), static_cast( m_swapchain ), presentId, timeout ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::waitForPresent", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkBuildMicromapsEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkBuildMicromapsEXT( static_cast( m_device ), + static_cast( deferredOperation ), + infos.size(), + reinterpret_cast( infos.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); return static_cast( result ); } - //=== VK_NV_cooperative_matrix === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getCooperativeMatrixPropertiesNV() const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV && - "Function needs extension enabled!" ); - - std::vector properties; - uint32_t propertyCount; - VkResult result; - do - { - result = - getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( static_cast( m_physicalDevice ), &propertyCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertyCount ) - { - properties.resize( propertyCount ); - result = getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( - static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return properties; - } + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMicromapEXT && "Function requires " ); - //=== VK_NV_coverage_reduction_mode === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV() const - { - VULKAN_HPP_ASSERT( - getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV && - "Function needs extension enabled!" ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkCopyMicromapEXT( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); - std::vector combinations; - uint32_t combinationCount; - VkResult result; - do - { - result = getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - static_cast( m_physicalDevice ), &combinationCount, nullptr ); - if ( ( result == VK_SUCCESS ) && combinationCount ) - { - combinations.resize( combinationCount ); - result = getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - static_cast( m_physicalDevice ), - &combinationCount, - reinterpret_cast( combinations.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); - VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); - if ( combinationCount < combinations.size() ) - { - combinations.resize( combinationCount ); - } - return combinations; + return static_cast( result ); } -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT && - "Function needs extension enabled!" ); - - std::vector presentModes; - uint32_t presentModeCount; - VkResult result; - do - { - result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast( m_physicalDevice ), - reinterpret_cast( &surfaceInfo ), - &presentModeCount, - nullptr ); - if ( ( result == VK_SUCCESS ) && presentModeCount ) - { - presentModes.resize( presentModeCount ); - result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast( m_physicalDevice ), - reinterpret_cast( &surfaceInfo ), - &presentModeCount, - reinterpret_cast( presentModes.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); - VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); - if ( presentModeCount < presentModes.size() ) - { - presentModes.resize( presentModeCount ); - } - return presentModes; - } + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMicromapToMemoryEXT && "Function requires " ); - VULKAN_HPP_INLINE void SwapchainKHR::acquireFullScreenExclusiveModeEXT() const - { - VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireFullScreenExclusiveModeEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkCopyMicromapToMemoryEXT( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); - VkResult result = getDispatcher()->vkAcquireFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( m_swapchain ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireFullScreenExclusiveModeEXT" ); + return static_cast( result ); } - VULKAN_HPP_INLINE void SwapchainKHR::releaseFullScreenExclusiveModeEXT() const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, + const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseFullScreenExclusiveModeEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToMicromapEXT && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkCopyMemoryToMicromapEXT( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); - VkResult result = getDispatcher()->vkReleaseFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( m_swapchain ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::releaseFullScreenExclusiveModeEXT" ); + return static_cast( result ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR - Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t dataSize, + size_t stride ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkWriteMicromapsPropertiesEXT && "Function requires " ); - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; - VkResult result = getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT( static_cast( m_device ), - reinterpret_cast( &surfaceInfo ), - reinterpret_cast( &modes ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); + VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); + std::vector data( dataSize / sizeof( DataType ) ); + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkWriteMicromapsPropertiesEXT( static_cast( m_device ), + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + data.size() * sizeof( DataType ), + reinterpret_cast( data.data() ), + stride ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" ); - return modes; + return data; } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_EXT_headless_surface === + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType + Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + size_t stride ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkWriteMicromapsPropertiesEXT && "Function requires " ); + + DataType data; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( getDispatcher()->vkWriteMicromapsPropertiesEXT( static_cast( m_device ), + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + sizeof( DataType ), + reinterpret_cast( &data ), + stride ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" ); + + return data; + } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - Instance::createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMicromapEXT && "Function requires " ); + + getDispatcher()->vkCmdCopyMicromapEXT( static_cast( m_commandBuffer ), reinterpret_cast( &info ) ); } - //=== VK_KHR_buffer_device_address === + VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMicromapToMemoryEXT && "Function requires " ); - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress - Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + getDispatcher()->vkCmdCopyMicromapToMemoryEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferDeviceAddressKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToMicromapEXT && "Function requires " ); - VkDeviceAddress result = - getDispatcher()->vkGetBufferDeviceAddressKHR( static_cast( m_device ), reinterpret_cast( &info ) ); + getDispatcher()->vkCmdCopyMemoryToMicromapEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &info ) ); + } - return static_cast( result ); + VULKAN_HPP_INLINE void + CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, + VULKAN_HPP_NAMESPACE::QueryType queryType, + VULKAN_HPP_NAMESPACE::QueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteMicromapsPropertiesEXT && + "Function requires " ); + + getDispatcher()->vkCmdWriteMicromapsPropertiesEXT( static_cast( m_commandBuffer ), + micromaps.size(), + reinterpret_cast( micromaps.data() ), + static_cast( queryType ), + static_cast( queryPool ), + firstQuery ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t - Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR + Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMicromapCompatibilityEXT && + "Function requires " ); - uint64_t result = - getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR( static_cast( m_device ), reinterpret_cast( &info ) ); + VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility; + getDispatcher()->vkGetDeviceMicromapCompatibilityEXT( static_cast( m_device ), + reinterpret_cast( &versionInfo ), + reinterpret_cast( &compatibility ) ); - return result; + return compatibility; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t - Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT + Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, + const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMicromapBuildSizesEXT && "Function requires " ); - uint64_t result = getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR( static_cast( m_device ), - reinterpret_cast( &info ) ); + VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo; + getDispatcher()->vkGetMicromapBuildSizesEXT( static_cast( m_device ), + static_cast( buildType ), + reinterpret_cast( &buildInfo ), + reinterpret_cast( &sizeInfo ) ); - return result; + return sizeInfo; } - //=== VK_EXT_line_rasterization === + //=== VK_HUAWEI_cluster_culling_shader === - VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEXT && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawClusterHUAWEI && "Function requires " ); - getDispatcher()->vkCmdSetLineStippleEXT( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); + getDispatcher()->vkCmdDrawClusterHUAWEI( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); } - //=== VK_EXT_host_query_reset === - - VULKAN_HPP_INLINE void QueryPool::resetEXT( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkResetQueryPoolEXT && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawClusterIndirectHUAWEI && + "Function requires " ); - getDispatcher()->vkResetQueryPoolEXT( static_cast( m_device ), static_cast( m_queryPool ), firstQuery, queryCount ); + getDispatcher()->vkCmdDrawClusterIndirectHUAWEI( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ) ); } - //=== VK_EXT_extended_dynamic_state === + //=== VK_EXT_pageable_device_local_memory === - VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void DeviceMemory::setPriorityEXT( float priority ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullModeEXT && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkSetDeviceMemoryPriorityEXT && + "Function requires " ); - getDispatcher()->vkCmdSetCullModeEXT( static_cast( m_commandBuffer ), static_cast( cullMode ) ); + getDispatcher()->vkSetDeviceMemoryPriorityEXT( static_cast( m_device ), static_cast( m_memory ), priority ); } - VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + //=== VK_KHR_maintenance4 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFaceEXT && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR && + "Function requires or " ); - getDispatcher()->vkCmdSetFrontFaceEXT( static_cast( m_commandBuffer ), static_cast( frontFace ) ); + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; } - VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopologyEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR && + "Function requires or " ); - getDispatcher()->vkCmdSetPrimitiveTopologyEXT( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; } - VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( - VULKAN_HPP_NAMESPACE::ArrayProxy const & viewports ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCountEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR && + "Function requires or " ); - getDispatcher()->vkCmdSetViewportWithCountEXT( - static_cast( m_commandBuffer ), viewports.size(), reinterpret_cast( viewports.data() ) ); + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; } - VULKAN_HPP_INLINE void - CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & scissors ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCountEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR && + "Function requires or " ); - getDispatcher()->vkCmdSetScissorWithCountEXT( - static_cast( m_commandBuffer ), scissors.size(), reinterpret_cast( scissors.data() ) ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; } - VULKAN_HPP_INLINE void - CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, - VULKAN_HPP_NAMESPACE::ArrayProxy const & buffers, - VULKAN_HPP_NAMESPACE::ArrayProxy const & offsets, - VULKAN_HPP_NAMESPACE::ArrayProxy const & sizes, - VULKAN_HPP_NAMESPACE::ArrayProxy const & strides ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2EXT && - "Function needs extension enabled!" ); - if ( buffers.size() != offsets.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" ); - } - if ( !sizes.empty() && buffers.size() != sizes.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" ); - } - if ( !strides.empty() && buffers.size() != strides.size() ) + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR && + "Function requires or " ); + + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR( + static_cast( m_device ), reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( &info ), + &sparseMemoryRequirementCount, + reinterpret_cast( sparseMemoryRequirements.data() ) ); + + VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); + if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); } - - getDispatcher()->vkCmdBindVertexBuffers2EXT( static_cast( m_commandBuffer ), - firstBinding, - buffers.size(), - reinterpret_cast( buffers.data() ), - reinterpret_cast( offsets.data() ), - reinterpret_cast( sizes.data() ), - reinterpret_cast( strides.data() ) ); + return sparseMemoryRequirements; } - VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + //=== VK_VALVE_descriptor_set_host_mapping === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE Device::getDescriptorSetLayoutHostMappingInfoVALVE( + const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnableEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutHostMappingInfoVALVE && + "Function requires " ); - getDispatcher()->vkCmdSetDepthTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping; + getDispatcher()->vkGetDescriptorSetLayoutHostMappingInfoVALVE( static_cast( m_device ), + reinterpret_cast( &bindingReference ), + reinterpret_cast( &hostMapping ) ); + + return hostMapping; } - VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * DescriptorSet::getHostMappingVALVE() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnableEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetHostMappingVALVE && + "Function requires " ); - getDispatcher()->vkCmdSetDepthWriteEnableEXT( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); + void * pData; + getDispatcher()->vkGetDescriptorSetHostMappingVALVE( static_cast( m_device ), static_cast( m_descriptorSet ), &pData ); + + return pData; } - VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + //=== VK_NV_copy_memory_indirect === + + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOpEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryIndirectNV && "Function requires " ); - getDispatcher()->vkCmdSetDepthCompareOpEXT( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); + getDispatcher()->vkCmdCopyMemoryIndirectNV( + static_cast( m_commandBuffer ), static_cast( copyBufferAddress ), copyCount, stride ); } - VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( + VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, + uint32_t stride, + VULKAN_HPP_NAMESPACE::Image dstImage, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, + VULKAN_HPP_NAMESPACE::ArrayProxy const & imageSubresources ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToImageIndirectNV && + "Function requires " ); - getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); + getDispatcher()->vkCmdCopyMemoryToImageIndirectNV( static_cast( m_commandBuffer ), + static_cast( copyBufferAddress ), + imageSubresources.size(), + stride, + static_cast( dstImage ), + static_cast( dstImageLayout ), + reinterpret_cast( imageSubresources.data() ) ); } - VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + //=== VK_NV_memory_decompression === + + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & decompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnableEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecompressMemoryNV && "Function requires " ); - getDispatcher()->vkCmdSetStencilTestEnableEXT( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); + getDispatcher()->vkCmdDecompressMemoryNV( static_cast( m_commandBuffer ), + decompressMemoryRegions.size(), + reinterpret_cast( decompressMemoryRegions.data() ) ); } - VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, - VULKAN_HPP_NAMESPACE::StencilOp failOp, - VULKAN_HPP_NAMESPACE::StencilOp passOp, - VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, - VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOpEXT && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecompressMemoryIndirectCountNV && + "Function requires " ); - getDispatcher()->vkCmdSetStencilOpEXT( static_cast( m_commandBuffer ), - static_cast( faceMask ), - static_cast( failOp ), - static_cast( passOp ), - static_cast( depthFailOp ), - static_cast( compareOp ) ); + getDispatcher()->vkCmdDecompressMemoryIndirectCountNV( static_cast( m_commandBuffer ), + static_cast( indirectCommandsAddress ), + static_cast( indirectCommandsCountAddress ), + stride ); } - //=== VK_KHR_deferred_host_operations === + //=== VK_NV_device_generated_commands_compute === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR - Device::createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR( *this, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t DeferredOperationKHR::getMaxConcurrency() const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV && + "Function requires " ); - uint32_t result = - getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR( static_cast( m_device ), static_cast( m_operation ) ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &memoryRequirements ) ); - return result; + return structureChain; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result DeferredOperationKHR::getResult() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeferredOperationResultKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdUpdatePipelineIndirectBufferNV && + "Function requires " ); - VkResult result = - getDispatcher()->vkGetDeferredOperationResultKHR( static_cast( m_device ), static_cast( m_operation ) ); - - return static_cast( result ); + getDispatcher()->vkCmdUpdatePipelineIndirectBufferNV( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result DeferredOperationKHR::join() const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkDeferredOperationJoinKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectDeviceAddressNV && + "Function requires " ); - VkResult result = getDispatcher()->vkDeferredOperationJoinKHR( static_cast( m_device ), static_cast( m_operation ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::DeferredOperationKHR::join", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); + VkDeviceAddress result = getDispatcher()->vkGetPipelineIndirectDeviceAddressNV( + static_cast( m_device ), reinterpret_cast( &info ) ); - return static_cast( result ); + return static_cast( result ); } - //=== VK_KHR_pipeline_executable_properties === + //=== VK_EXT_extended_dynamic_state3 === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const + VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutablePropertiesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampEnableEXT && + "Function requires or " ); - std::vector properties; - uint32_t executableCount; - VkResult result; - do - { - result = getDispatcher()->vkGetPipelineExecutablePropertiesKHR( - static_cast( m_device ), reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ); - if ( ( result == VK_SUCCESS ) && executableCount ) - { - properties.resize( executableCount ); - result = getDispatcher()->vkGetPipelineExecutablePropertiesKHR( static_cast( m_device ), - reinterpret_cast( &pipelineInfo ), - &executableCount, - reinterpret_cast( properties.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); - VULKAN_HPP_ASSERT( executableCount <= properties.size() ); - if ( executableCount < properties.size() ) - { - properties.resize( executableCount ); - } - return properties; + getDispatcher()->vkCmdSetDepthClampEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClampEnable ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const + VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutableStatisticsKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPolygonModeEXT && + "Function requires or " ); - std::vector statistics; - uint32_t statisticCount; - VkResult result; - do - { - result = getDispatcher()->vkGetPipelineExecutableStatisticsKHR( - static_cast( m_device ), reinterpret_cast( &executableInfo ), &statisticCount, nullptr ); - if ( ( result == VK_SUCCESS ) && statisticCount ) - { - statistics.resize( statisticCount ); - result = getDispatcher()->vkGetPipelineExecutableStatisticsKHR( static_cast( m_device ), - reinterpret_cast( &executableInfo ), - &statisticCount, - reinterpret_cast( statistics.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); - VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); - if ( statisticCount < statistics.size() ) - { - statistics.resize( statisticCount ); - } - return statistics; + getDispatcher()->vkCmdSetPolygonModeEXT( static_cast( m_commandBuffer ), static_cast( polygonMode ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const + VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationSamplesEXT && + "Function requires or " ); - std::vector internalRepresentations; - uint32_t internalRepresentationCount; - VkResult result; - do - { - result = getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR( - static_cast( m_device ), reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ); - if ( ( result == VK_SUCCESS ) && internalRepresentationCount ) - { - internalRepresentations.resize( internalRepresentationCount ); - result = getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR( - static_cast( m_device ), - reinterpret_cast( &executableInfo ), - &internalRepresentationCount, - reinterpret_cast( internalRepresentations.data() ) ); - } - } while ( result == VK_INCOMPLETE ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); - VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); - if ( internalRepresentationCount < internalRepresentations.size() ) + getDispatcher()->vkCmdSetRasterizationSamplesEXT( static_cast( m_commandBuffer ), + static_cast( rasterizationSamples ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, + VULKAN_HPP_NAMESPACE::ArrayProxy const & sampleMask ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleMaskEXT && + "Function requires or " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( sampleMask.size() == ( static_cast( samples ) + 31 ) / 32 ); +# else + if ( sampleMask.size() != ( static_cast( samples ) + 31 ) / 32 ) { - internalRepresentations.resize( internalRepresentationCount ); + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::CommandBuffer::setSampleMaskEXT: sampleMask.size() != ( static_cast( samples ) + 31 ) / 32" ); } - return internalRepresentations; +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdSetSampleMaskEXT( static_cast( m_commandBuffer ), + static_cast( samples ), + reinterpret_cast( sampleMask.data() ) ); } - //=== VK_NV_device_generated_commands === + VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT && + "Function requires or " ); - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 - Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT + getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToCoverageEnable ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToOneEnableEXT && + "Function requires or " ); - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + getDispatcher()->vkCmdSetAlphaToOneEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToOneEnable ) ); + } - return memoryRequirements; + VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetLogicOpEnableEXT( static_cast( m_commandBuffer ), static_cast( logicOpEnable ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( + uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEnables ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEnableEXT && + "Function requires or " ); - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); - getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + getDispatcher()->vkCmdSetColorBlendEnableEXT( static_cast( m_commandBuffer ), + firstAttachment, + colorBlendEnables.size(), + reinterpret_cast( colorBlendEnables.data() ) ); + } - return structureChain; + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( + uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendEquations ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEquationEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetColorBlendEquationEXT( static_cast( m_commandBuffer ), + firstAttachment, + colorBlendEquations.size(), + reinterpret_cast( colorBlendEquations.data() ) ); } - VULKAN_HPP_INLINE void - CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( + uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteMasks ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPreprocessGeneratedCommandsNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteMaskEXT && + "Function requires or " ); - getDispatcher()->vkCmdPreprocessGeneratedCommandsNV( static_cast( m_commandBuffer ), - reinterpret_cast( &generatedCommandsInfo ) ); + getDispatcher()->vkCmdSetColorWriteMaskEXT( static_cast( m_commandBuffer ), + firstAttachment, + colorWriteMasks.size(), + reinterpret_cast( colorWriteMasks.data() ) ); } VULKAN_HPP_INLINE void - CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, - const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdExecuteGeneratedCommandsNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetTessellationDomainOriginEXT && + "Function requires or " ); - getDispatcher()->vkCmdExecuteGeneratedCommandsNV( static_cast( m_commandBuffer ), - static_cast( isPreprocessed ), - reinterpret_cast( &generatedCommandsInfo ) ); + getDispatcher()->vkCmdSetTessellationDomainOriginEXT( static_cast( m_commandBuffer ), + static_cast( domainOrigin ) ); } - VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, - VULKAN_HPP_NAMESPACE::Pipeline pipeline, - uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindPipelineShaderGroupNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationStreamEXT && + "Function requires or " ); - getDispatcher()->vkCmdBindPipelineShaderGroupNV( static_cast( m_commandBuffer ), - static_cast( pipelineBindPoint ), - static_cast( pipeline ), - groupIndex ); + getDispatcher()->vkCmdSetRasterizationStreamEXT( static_cast( m_commandBuffer ), rasterizationStream ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV - Device::createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void CommandBuffer::setConservativeRasterizationModeEXT( + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV( *this, createInfo, allocator ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetConservativeRasterizationModeEXT && + "Function requires or " ); - //=== VK_EXT_acquire_drm_display === + getDispatcher()->vkCmdSetConservativeRasterizationModeEXT( static_cast( m_commandBuffer ), + static_cast( conservativeRasterizationMode ) ); + } - VULKAN_HPP_INLINE void PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const + VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireDrmDisplayEXT && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT && + "Function requires or " ); - VkResult result = - getDispatcher()->vkAcquireDrmDisplayEXT( static_cast( m_physicalDevice ), drmFd, static_cast( display ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); + getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast( m_commandBuffer ), extraPrimitiveOverestimationSize ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const + VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, drmFd, connectorId ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipEnableEXT && + "Function requires or " ); - //=== VK_EXT_private_data === + getDispatcher()->vkCmdSetDepthClipEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClipEnable ) ); + } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot - Device::createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, createInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEnableEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetSampleLocationsEnableEXT( static_cast( m_commandBuffer ), static_cast( sampleLocationsEnable ) ); } - VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - Optional allocator ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( + uint32_t firstAttachment, + VULKAN_HPP_NAMESPACE::ArrayProxy const & colorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkDestroyPrivateDataSlotEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendAdvancedEXT && + "Function requires or " ); - getDispatcher()->vkDestroyPrivateDataSlotEXT( - static_cast( m_device ), - static_cast( privateDataSlot ), - reinterpret_cast( static_cast( allocator ) ) ); + getDispatcher()->vkCmdSetColorBlendAdvancedEXT( static_cast( m_commandBuffer ), + firstAttachment, + colorBlendAdvanced.size(), + reinterpret_cast( colorBlendAdvanced.data() ) ); } - VULKAN_HPP_INLINE void Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, - uint64_t objectHandle, - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, - uint64_t data ) const + VULKAN_HPP_INLINE void + CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkSetPrivateDataEXT && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetProvokingVertexModeEXT && + "Function requires or " ); - VkResult result = getDispatcher()->vkSetPrivateDataEXT( - static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); + getDispatcher()->vkCmdSetProvokingVertexModeEXT( static_cast( m_commandBuffer ), + static_cast( provokingVertexMode ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, - uint64_t objectHandle, - VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPrivateDataEXT && "Function needs extension enabled!" ); - - uint64_t data; - getDispatcher()->vkGetPrivateDataEXT( - static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), &data ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineRasterizationModeEXT && + "Function requires or " ); - return data; + getDispatcher()->vkCmdSetLineRasterizationModeEXT( static_cast( m_commandBuffer ), + static_cast( lineRasterizationMode ) ); } -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_encode_queue === - - VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEncodeVideoKHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEnableEXT && + "Function requires or " ); - getDispatcher()->vkCmdEncodeVideoKHR( static_cast( m_commandBuffer ), reinterpret_cast( &encodeInfo ) ); + getDispatcher()->vkCmdSetLineStippleEnableEXT( static_cast( m_commandBuffer ), static_cast( stippledLineEnable ) ); } -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ -# if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT Device::exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkExportMetalObjectsEXT && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT && + "Function requires or " ); - VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo; - getDispatcher()->vkExportMetalObjectsEXT( static_cast( m_device ), reinterpret_cast( &metalObjectsInfo ) ); - - return metalObjectsInfo; + getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT( static_cast( m_commandBuffer ), static_cast( negativeOneToOne ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain Device::exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkExportMetalObjectsEXT && "Function needs extension enabled!" ); - - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get(); - getDispatcher()->vkExportMetalObjectsEXT( static_cast( m_device ), reinterpret_cast( &metalObjectsInfo ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingEnableNV && + "Function requires or " ); - return structureChain; + getDispatcher()->vkCmdSetViewportWScalingEnableNV( static_cast( m_commandBuffer ), static_cast( viewportWScalingEnable ) ); } -# endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_KHR_synchronization2 === - VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, - const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( + uint32_t firstViewport, + VULKAN_HPP_NAMESPACE::ArrayProxy const & viewportSwizzles ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent2KHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportSwizzleNV && + "Function requires or " ); - getDispatcher()->vkCmdSetEvent2KHR( - static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( &dependencyInfo ) ); + getDispatcher()->vkCmdSetViewportSwizzleNV( static_cast( m_commandBuffer ), + firstViewport, + viewportSwizzles.size(), + reinterpret_cast( viewportSwizzles.data() ) ); } - VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, - VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent2KHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorEnableNV && + "Function requires or " ); - getDispatcher()->vkCmdResetEvent2KHR( - static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); + getDispatcher()->vkCmdSetCoverageToColorEnableNV( static_cast( m_commandBuffer ), static_cast( coverageToColorEnable ) ); } - VULKAN_HPP_INLINE void - CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & events, - VULKAN_HPP_NAMESPACE::ArrayProxy const & dependencyInfos ) const + VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents2KHR && "Function needs extension enabled!" ); - if ( events.size() != dependencyInfos.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorLocationNV && + "Function requires or " ); - getDispatcher()->vkCmdWaitEvents2KHR( static_cast( m_commandBuffer ), - events.size(), - reinterpret_cast( events.data() ), - reinterpret_cast( dependencyInfos.data() ) ); + getDispatcher()->vkCmdSetCoverageToColorLocationNV( static_cast( m_commandBuffer ), coverageToColorLocation ); } - VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationModeNV && + "Function requires or " ); - getDispatcher()->vkCmdPipelineBarrier2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( &dependencyInfo ) ); + getDispatcher()->vkCmdSetCoverageModulationModeNV( static_cast( m_commandBuffer ), + static_cast( coverageModulationMode ) ); } - VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::QueryPool queryPool, - uint32_t query ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp2KHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableEnableNV && + "Function requires or " ); - getDispatcher()->vkCmdWriteTimestamp2KHR( - static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); + getDispatcher()->vkCmdSetCoverageModulationTableEnableNV( static_cast( m_commandBuffer ), + static_cast( coverageModulationTableEnable ) ); } - VULKAN_HPP_INLINE void Queue::submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, - VULKAN_HPP_NAMESPACE::Fence fence ) const + VULKAN_HPP_INLINE void + CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy const & coverageModulationTable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit2KHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableNV && + "Function requires or " ); - VkResult result = getDispatcher()->vkQueueSubmit2KHR( - static_cast( m_queue ), submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); + getDispatcher()->vkCmdSetCoverageModulationTableNV( + static_cast( m_commandBuffer ), coverageModulationTable.size(), coverageModulationTable.data() ); } - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - uint32_t marker ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarker2AMD && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetShadingRateImageEnableNV && + "Function requires or " ); - getDispatcher()->vkCmdWriteBufferMarker2AMD( static_cast( m_commandBuffer ), - static_cast( stage ), - static_cast( dstBuffer ), - static_cast( dstOffset ), - marker ); + getDispatcher()->vkCmdSetShadingRateImageEnableNV( static_cast( m_commandBuffer ), static_cast( shadingRateImageEnable ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Queue::getCheckpointData2NV() const + VULKAN_HPP_INLINE void + CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointData2NV && - "Function needs extension enabled!" ); - - std::vector checkpointData; - uint32_t checkpointDataCount; - getDispatcher()->vkGetQueueCheckpointData2NV( static_cast( m_queue ), &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - getDispatcher()->vkGetQueueCheckpointData2NV( - static_cast( m_queue ), &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV && + "Function requires or " ); - VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); - if ( checkpointDataCount < checkpointData.size() ) - { - checkpointData.resize( checkpointDataCount ); - } - return checkpointData; + getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV( static_cast( m_commandBuffer ), + static_cast( representativeFragmentTestEnable ) ); } - //=== VK_NV_fragment_shading_rate_enums === - VULKAN_HPP_INLINE void - CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, - const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFragmentShadingRateEnumNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageReductionModeNV && + "Function requires or " ); - getDispatcher()->vkCmdSetFragmentShadingRateEnumNV( static_cast( m_commandBuffer ), - static_cast( shadingRate ), - reinterpret_cast( combinerOps ) ); + getDispatcher()->vkCmdSetCoverageReductionModeNV( static_cast( m_commandBuffer ), + static_cast( coverageReductionMode ) ); } - //=== VK_EXT_mesh_shader === + //=== VK_EXT_shader_module_identifier === - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT ShaderModule::getIdentifierEXT() const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksEXT && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderModuleIdentifierEXT && + "Function requires " ); - getDispatcher()->vkCmdDrawMeshTasksEXT( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; + getDispatcher()->vkGetShaderModuleIdentifierEXT( + static_cast( m_device ), static_cast( m_shaderModule ), reinterpret_cast( &identifier ) ); + + return identifier; } - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - uint32_t drawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT + Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderModuleCreateInfoIdentifierEXT && + "Function requires " ); - getDispatcher()->vkCmdDrawMeshTasksIndirectEXT( - static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); + VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; + getDispatcher()->vkGetShaderModuleCreateInfoIdentifierEXT( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &identifier ) ); + + return identifier; } - VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer, - VULKAN_HPP_NAMESPACE::DeviceSize offset, - VULKAN_HPP_NAMESPACE::Buffer countBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT + //=== VK_NV_optical_flow === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectCountEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV && + "Function requires " ); - getDispatcher()->vkCmdDrawMeshTasksIndirectCountEXT( static_cast( m_commandBuffer ), - static_cast( buffer ), - static_cast( offset ), - static_cast( countBuffer ), - static_cast( countBufferOffset ), - maxDrawCount, - stride ); + std::vector imageFormatProperties; + uint32_t formatCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + static_cast( m_physicalDevice ), + reinterpret_cast( &opticalFlowImageFormatInfo ), + &formatCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && formatCount ) + { + imageFormatProperties.resize( formatCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + static_cast( m_physicalDevice ), + reinterpret_cast( &opticalFlowImageFormatInfo ), + &formatCount, + reinterpret_cast( imageFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); + VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); + if ( formatCount < imageFormatProperties.size() ) + { + imageFormatProperties.resize( formatCount ); + } + return imageFormatProperties; } - //=== VK_KHR_copy_commands2 === - - VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer2KHR && "Function needs extension enabled!" ); + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateOpticalFlowSessionNV( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &session ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createOpticalFlowSessionNV" ); +# endif + } - getDispatcher()->vkCmdCopyBuffer2KHR( static_cast( m_commandBuffer ), reinterpret_cast( ©BufferInfo ) ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV( *this, *reinterpret_cast( &session ), allocator ); } - VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void OpticalFlowSessionNV::bindImage( VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage2KHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkBindOpticalFlowSessionImageNV && "Function requires " ); - getDispatcher()->vkCmdCopyImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( ©ImageInfo ) ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkBindOpticalFlowSessionImageNV( static_cast( m_device ), + static_cast( m_session ), + static_cast( bindingPoint ), + static_cast( view ), + static_cast( layout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::OpticalFlowSessionNV::bindImage" ); } - VULKAN_HPP_INLINE void - CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdOpticalFlowExecuteNV && "Function requires " ); - getDispatcher()->vkCmdCopyBufferToImage2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( ©BufferToImageInfo ) ); + getDispatcher()->vkCmdOpticalFlowExecuteNV( static_cast( m_commandBuffer ), + static_cast( session ), + reinterpret_cast( &executeInfo ) ); } - VULKAN_HPP_INLINE void - CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + //=== VK_KHR_maintenance5 === + + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer2KHR && + "Function requires or " ); - getDispatcher()->vkCmdCopyImageToBuffer2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( ©ImageToBufferInfo ) ); + getDispatcher()->vkCmdBindIndexBuffer2KHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( size ), + static_cast( indexType ) ); } - VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage2KHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderingAreaGranularityKHR && + "Function requires or " ); - getDispatcher()->vkCmdBlitImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( &blitImageInfo ) ); + VULKAN_HPP_NAMESPACE::Extent2D granularity; + getDispatcher()->vkGetRenderingAreaGranularityKHR( static_cast( m_device ), + reinterpret_cast( &renderingAreaInfo ), + reinterpret_cast( &granularity ) ); + + return granularity; } - VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage2KHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR && + "Function requires or " ); - getDispatcher()->vkCmdResolveImage2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( &resolveImageInfo ) ); + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &layout ) ); + + return layout; } - //=== VK_EXT_image_compression_control === + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &layout ) ); - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT - Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2KHR && + "Function requires or or or " ); - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout; - getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast( m_device ), + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast( m_device ), static_cast( m_image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); return layout; } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2KHR && + "Function requires or or or " ); - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get(); - getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast( m_device ), + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast( m_device ), static_cast( m_image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); return structureChain; } -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_acquire_winrt_display === + //=== VK_AMD_anti_lag === - VULKAN_HPP_INLINE void DisplayKHR::acquireWinrtNV() const + VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireWinrtDisplayNV && - "Function needs extension enabled!" ); - - VkResult result = getDispatcher()->vkAcquireWinrtDisplayNV( static_cast( m_physicalDevice ), static_cast( m_display ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::acquireWinrtNV" ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkAntiLagUpdateAMD && "Function requires " ); - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId ) const - { - return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, deviceRelativeId ); + getDispatcher()->vkAntiLagUpdateAMD( static_cast( m_device ), reinterpret_cast( &data ) ); } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ -# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) - //=== VK_EXT_directfb_surface === + //=== VK_EXT_shader_object === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - Instance::createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type + Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const { - return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + std::vector shaders( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateShadersEXT( + static_cast( m_device ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( shaders.data() ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createShadersEXT" ); +# endif + } + + std::vector shadersRAII; + shadersRAII.reserve( shaders.size() ); + for ( auto & shader : shaders ) + { + shadersRAII.emplace_back( *this, *reinterpret_cast( &shader ), allocator, result ); + } + return shadersRAII; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 - PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT && - "Function needs extension enabled!" ); - - VkBool32 result = - getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT( static_cast( m_physicalDevice ), queueFamilyIndex, &dfb ); + VULKAN_HPP_NAMESPACE::ShaderEXT shader; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateShadersEXT( + static_cast( m_device ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &shader ) ) ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createShaderEXT" ); +# endif + } - return static_cast( result ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT( *this, *reinterpret_cast( &shader ), allocator, result ); } -# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - //=== VK_KHR_ray_tracing_pipeline === - - VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, - uint32_t width, - uint32_t height, - uint32_t depth ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector ShaderEXT::getBinaryData() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysKHR && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderBinaryDataEXT && "Function requires " ); - getDispatcher()->vkCmdTraceRaysKHR( static_cast( m_commandBuffer ), - reinterpret_cast( &raygenShaderBindingTable ), - reinterpret_cast( &missShaderBindingTable ), - reinterpret_cast( &hitShaderBindingTable ), - reinterpret_cast( &callableShaderBindingTable ), - width, - height, - depth ); + std::vector data; + size_t dataSize; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetShaderBinaryDataEXT( static_cast( m_device ), static_cast( m_shader ), &dataSize, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( getDispatcher()->vkGetShaderBinaryDataEXT( + static_cast( m_device ), static_cast( m_shader ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ShaderEXT::getBinaryData" ); + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + if ( dataSize < data.size() ) + { + data.resize( dataSize ); + } + return data; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::createRayTracingPipelinesKHR( - VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, + VULKAN_HPP_NAMESPACE::ArrayProxy const & shaders ) const { - return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, deferredOperation, pipelineCache, createInfos, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindShadersEXT && "Function requires " ); +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( stages.size() == shaders.size() ); +# else + if ( stages.size() != shaders.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + getDispatcher()->vkCmdBindShadersEXT( static_cast( m_commandBuffer ), + stages.size(), + reinterpret_cast( stages.data() ), + reinterpret_cast( shaders.data() ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createRayTracingPipelineKHR( - VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void + CommandBuffer::setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + Optional depthClampRange ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, deferredOperation, pipelineCache, createInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampRangeEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetDepthClampRangeEXT( + static_cast( m_commandBuffer ), + static_cast( depthClampMode ), + reinterpret_cast( static_cast( depthClampRange ) ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Pipeline::getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const + //=== VK_KHR_pipeline_binary === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type + Device::createPipelineBinariesKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR && - "Function needs extension enabled!" ); + std::vector pipelineBinaries; + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; + VULKAN_HPP_NAMESPACE::Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( getDispatcher()->vkCreatePipelineBinariesKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast( getDispatcher()->vkCreatePipelineBinariesKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( getDispatcher()->vkCreatePipelineBinariesKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + } - VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); - std::vector data( dataSize / sizeof( DataType ) ); - VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast( m_device ), - static_cast( m_pipeline ), - firstGroup, - groupCount, - data.size() * sizeof( DataType ), - reinterpret_cast( data.data() ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesKHR" ); + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncomplete ) && + ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createPipelineBinariesKHR" ); +# endif + } - return data; + std::vector pipelineBinariesRAII; + pipelineBinariesRAII.reserve( pipelineBinaries.size() ); + for ( auto & pipelineBinary : pipelineBinaries ) + { + pipelineBinariesRAII.emplace_back( *this, *reinterpret_cast( &pipelineBinary ), allocator, result ); + } + return pipelineBinariesRAII; } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR + Device::getPipelineKeyKHR( Optional pipelineCreateInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineKeyKHR && "Function requires " ); - DataType data; - VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast( m_device ), - static_cast( m_pipeline ), - firstGroup, - groupCount, - sizeof( DataType ), - reinterpret_cast( &data ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleKHR" ); + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR pipelineKey; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetPipelineKeyKHR( + static_cast( m_device ), + reinterpret_cast( static_cast( pipelineCreateInfo ) ), + reinterpret_cast( &pipelineKey ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineKeyKHR" ); - return data; + return pipelineKey; } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair> + Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineBinaryDataKHR && "Function requires " ); - VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); - std::vector data( dataSize / sizeof( DataType ) ); - VkResult result = getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast( m_device ), - static_cast( m_pipeline ), - firstGroup, - groupCount, - data.size() * sizeof( DataType ), - reinterpret_cast( data.data() ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + std::pair> data_; + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first; + std::vector & pipelineBinaryData = data_.second; + size_t pipelineBinaryDataSize; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPipelineBinaryDataKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + nullptr ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaryData.resize( pipelineBinaryDataSize ); + result = static_cast( + getDispatcher()->vkGetPipelineBinaryDataKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + reinterpret_cast( pipelineBinaryData.data() ) ) ); + } - return data; + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" ); + + return data_; } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const + VULKAN_HPP_INLINE void + Device::releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info, + Optional allocator ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR && - "Function needs extension enabled!" ); - - DataType data; - VkResult result = getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast( m_device ), - static_cast( m_pipeline ), - firstGroup, - groupCount, - sizeof( DataType ), - reinterpret_cast( &data ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseCapturedPipelineDataKHR && "Function requires " ); - return data; + getDispatcher()->vkReleaseCapturedPipelineDataKHR( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( static_cast( allocator ) ) ); } - VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable, - const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable, - VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + //=== VK_QCOM_tile_properties === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Framebuffer::getTilePropertiesQCOM() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysIndirectKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetFramebufferTilePropertiesQCOM && + "Function requires " ); - getDispatcher()->vkCmdTraceRaysIndirectKHR( static_cast( m_commandBuffer ), - reinterpret_cast( &raygenShaderBindingTable ), - reinterpret_cast( &missShaderBindingTable ), - reinterpret_cast( &hitShaderBindingTable ), - reinterpret_cast( &callableShaderBindingTable ), - static_cast( indirectDeviceAddress ) ); + std::vector properties; + uint32_t propertiesCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetFramebufferTilePropertiesQCOM( + static_cast( m_device ), static_cast( m_framebuffer ), &propertiesCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertiesCount ) + { + properties.resize( propertiesCount ); + result = static_cast( + getDispatcher()->vkGetFramebufferTilePropertiesQCOM( static_cast( m_device ), + static_cast( m_framebuffer ), + &propertiesCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + + VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); + if ( propertiesCount < properties.size() ) + { + properties.resize( propertiesCount ); + } + return properties; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize - Pipeline::getRayTracingShaderGroupStackSizeKHR( uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM + Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDynamicRenderingTilePropertiesQCOM && + "Function requires " ); - VkDeviceSize result = getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR( - static_cast( m_device ), static_cast( m_pipeline ), group, static_cast( groupShader ) ); + VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties; + getDispatcher()->vkGetDynamicRenderingTilePropertiesQCOM( static_cast( m_device ), + reinterpret_cast( &renderingInfo ), + reinterpret_cast( &properties ) ); - return static_cast( result ); + return properties; } - VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT + //=== VK_NV_cooperative_vector === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getCooperativeVectorPropertiesNV() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeVectorPropertiesNV && + "Function requires " ); - getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR( static_cast( m_commandBuffer ), pipelineStackSize ); + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceCooperativeVectorPropertiesNV( static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCooperativeVectorPropertiesNV( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeVectorPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; } - //=== VK_EXT_vertex_input_dynamic_state === + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result + Device::convertCooperativeVectorMatrixNV( const VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkConvertCooperativeVectorMatrixNV && + "Function requires " ); - VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( - VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexBindingDescriptions, - VULKAN_HPP_NAMESPACE::ArrayProxy const & vertexAttributeDescriptions ) const - VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkConvertCooperativeVectorMatrixNV( + static_cast( m_device ), reinterpret_cast( &info ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::convertCooperativeVectorMatrixNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); + + return static_cast( result ); + } + + VULKAN_HPP_INLINE void CommandBuffer::convertCooperativeVectorMatrixNV( + VULKAN_HPP_NAMESPACE::ArrayProxy const & infos ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdConvertCooperativeVectorMatrixNV && + "Function requires " ); - getDispatcher()->vkCmdSetVertexInputEXT( static_cast( m_commandBuffer ), - vertexBindingDescriptions.size(), - reinterpret_cast( vertexBindingDescriptions.data() ), - vertexAttributeDescriptions.size(), - reinterpret_cast( vertexAttributeDescriptions.data() ) ); + getDispatcher()->vkCmdConvertCooperativeVectorMatrixNV( + static_cast( m_commandBuffer ), infos.size(), reinterpret_cast( infos.data() ) ); } -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_memory === + //=== VK_NV_low_latency2 === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t - Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const + VULKAN_HPP_INLINE void SwapchainKHR::setLatencySleepModeNV( const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryZirconHandleFUCHSIA && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkSetLatencySleepModeNV && "Function requires " ); - zx_handle_t zirconHandle; - VkResult result = getDispatcher()->vkGetMemoryZirconHandleFUCHSIA( - static_cast( m_device ), reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); - - return zirconHandle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkSetLatencySleepModeNV( + static_cast( m_device ), static_cast( m_swapchain ), reinterpret_cast( &sleepModeInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::setLatencySleepModeNV" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA - Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle ) const + VULKAN_HPP_INLINE void SwapchainKHR::latencySleepNV( const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA && - "Function needs extension enabled!" ); - - VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties; - VkResult result = - getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA( static_cast( m_device ), - static_cast( handleType ), - zirconHandle, - reinterpret_cast( &memoryZirconHandleProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkLatencySleepNV && "Function requires " ); - return memoryZirconHandleProperties; + getDispatcher()->vkLatencySleepNV( + static_cast( m_device ), static_cast( m_swapchain ), reinterpret_cast( &sleepInfo ) ); } -# endif /*VK_USE_PLATFORM_FUCHSIA*/ - -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_semaphore === - VULKAN_HPP_INLINE void - Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const + VULKAN_HPP_INLINE void SwapchainKHR::setLatencyMarkerNV( const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkSetLatencyMarkerNV && "Function requires " ); - VkResult result = getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA( - static_cast( m_device ), reinterpret_cast( &importSemaphoreZirconHandleInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); + getDispatcher()->vkSetLatencyMarkerNV( static_cast( m_device ), + static_cast( m_swapchain ), + reinterpret_cast( &latencyMarkerInfo ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t - Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector SwapchainKHR::getLatencyTimingsNV() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetLatencyTimingsNV && "Function requires " ); - zx_handle_t zirconHandle; - VkResult result = getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA( - static_cast( m_device ), reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); + std::vector timings; + VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; + getDispatcher()->vkGetLatencyTimingsNV( + static_cast( m_device ), static_cast( m_swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); + timings.resize( latencyMarkerInfo.timingCount ); + latencyMarkerInfo.pTimings = timings.data(); + getDispatcher()->vkGetLatencyTimingsNV( + static_cast( m_device ), static_cast( m_swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); - return zirconHandle; + return timings; } -# endif /*VK_USE_PLATFORM_FUCHSIA*/ -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA - Device::createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo ) const VULKAN_HPP_NOEXCEPT { - return VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA( *this, createInfo, allocator ); + VULKAN_HPP_ASSERT( getDispatcher()->vkQueueNotifyOutOfBandNV && "Function requires " ); + + getDispatcher()->vkQueueNotifyOutOfBandNV( static_cast( m_queue ), reinterpret_cast( &queueTypeInfo ) ); } - VULKAN_HPP_INLINE void BufferCollectionFUCHSIA::setImageConstraints( const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo ) const + //=== VK_KHR_cooperative_matrix === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getCooperativeMatrixPropertiesKHR() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR && + "Function requires " ); - VkResult result = - getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast( m_device ), - static_cast( m_collection ), - reinterpret_cast( &imageConstraintsInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setImageConstraints" ); + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; } - VULKAN_HPP_INLINE void - BufferCollectionFUCHSIA::setBufferConstraints( const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + + VULKAN_HPP_INLINE void CommandBuffer::setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAttachmentFeedbackLoopEnableEXT && + "Function requires " ); - VkResult result = - getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast( m_device ), - static_cast( m_collection ), - reinterpret_cast( &bufferConstraintsInfo ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setBufferConstraints" ); + getDispatcher()->vkCmdSetAttachmentFeedbackLoopEnableEXT( static_cast( m_commandBuffer ), + static_cast( aspectMask ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA BufferCollectionFUCHSIA::getProperties() const +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX + Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetScreenBufferPropertiesQNX && + "Function requires " ); - VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties; - VkResult result = getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA( static_cast( m_device ), - static_cast( m_collection ), - reinterpret_cast( &properties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::getProperties" ); + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX properties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetScreenBufferPropertiesQNX( + static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); return properties; } -# endif /*VK_USE_PLATFORM_FUCHSIA*/ - - //=== VK_HUAWEI_subpass_shading === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair - RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI() const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetScreenBufferPropertiesQNX && + "Function requires " ); - VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; - VkResult result = getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( - static_cast( m_device ), static_cast( m_renderPass ), reinterpret_cast( &maxWorkgroupSize ) ); - resultCheck( static_cast( result ), - VULKAN_HPP_NAMESPACE_STRING "::RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX & properties = structureChain.template get(); + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetScreenBufferPropertiesQNX( + static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); - return std::make_pair( static_cast( result ), maxWorkgroupSize ); + return structureChain; } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI() const VULKAN_HPP_NOEXCEPT + //=== VK_KHR_line_rasterization === + + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleKHR( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSubpassShadingHUAWEI && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleKHR && + "Function requires or or " ); - getDispatcher()->vkCmdSubpassShadingHUAWEI( static_cast( m_commandBuffer ) ); + getDispatcher()->vkCmdSetLineStippleKHR( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); } - //=== VK_HUAWEI_invocation_mask === + //=== VK_KHR_calibrated_timestamps === - VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getCalibrateableTimeDomainsKHR() const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindInvocationMaskHUAWEI && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsKHR && + "Function requires or " ); - getDispatcher()->vkCmdBindInvocationMaskHUAWEI( - static_cast( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); + std::vector timeDomains; + uint32_t timeDomainCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( static_cast( m_physicalDevice ), &timeDomainCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( + static_cast( m_physicalDevice ), &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + if ( timeDomainCount < timeDomains.size() ) + { + timeDomains.resize( timeDomainCount ); + } + return timeDomains; } - //=== VK_NV_external_memory_rdma === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RemoteAddressNV - Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair, uint64_t> Device::getCalibratedTimestampsKHR( + VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryRemoteAddressNV && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsKHR && + "Function requires or " ); - VULKAN_HPP_NAMESPACE::RemoteAddressNV address; - VkResult result = getDispatcher()->vkGetMemoryRemoteAddressNV( static_cast( m_device ), - reinterpret_cast( &memoryGetRemoteAddressInfo ), - reinterpret_cast( &address ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); + std::pair, uint64_t> data_( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) ); + std::vector & timestamps = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetCalibratedTimestampsKHR( static_cast( m_device ), + timestampInfos.size(), + reinterpret_cast( timestampInfos.data() ), + timestamps.data(), + &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); - return address; + return data_; } - //=== VK_EXT_pipeline_properties === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BaseOutStructure - Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair + Device::getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelinePropertiesEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsKHR && + "Function requires or " ); - VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties; - VkResult result = getDispatcher()->vkGetPipelinePropertiesEXT( static_cast( m_device ), - reinterpret_cast( &pipelineInfo ), - reinterpret_cast( &pipelineProperties ) ); - resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); + std::pair data_; + uint64_t & timestamp = data_.first; + uint64_t & maxDeviation = data_.second; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetCalibratedTimestampsKHR( + static_cast( m_device ), 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" ); - return pipelineProperties; + return data_; } - //=== VK_EXT_extended_dynamic_state2 === + //=== VK_KHR_maintenance6 === - VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPatchControlPointsEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorSets2KHR && + "Function requires or " ); - getDispatcher()->vkCmdSetPatchControlPointsEXT( static_cast( m_commandBuffer ), patchControlPoints ); + getDispatcher()->vkCmdBindDescriptorSets2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &bindDescriptorSetsInfo ) ); } - VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushConstants2KHR && "Function requires or " ); - getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); + getDispatcher()->vkCmdPushConstants2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &pushConstantsInfo ) ); } - VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnableEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSet2KHR && + "Function requires or " ); - getDispatcher()->vkCmdSetDepthBiasEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); + getDispatcher()->vkCmdPushDescriptorSet2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( &pushDescriptorSetInfo ) ); } - VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate2KHR( + const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT && "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetWithTemplate2KHR && + "Function requires or " ); - getDispatcher()->vkCmdSetLogicOpEXT( static_cast( m_commandBuffer ), static_cast( logicOp ) ); + getDispatcher()->vkCmdPushDescriptorSetWithTemplate2KHR( + static_cast( m_commandBuffer ), reinterpret_cast( &pushDescriptorSetWithTemplateInfo ) ); } - VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsets2EXT( + const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT && - "Function needs extension enabled!" ); - - getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDescriptorBufferOffsets2EXT && + "Function requires " ); -# if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_screen_surface === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR - Instance::createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator ) const - { - return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator ); + getDispatcher()->vkCmdSetDescriptorBufferOffsets2EXT( static_cast( m_commandBuffer ), + reinterpret_cast( &setDescriptorBufferOffsetsInfo ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 - PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplers2EXT( + const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplers2EXT && + "Function requires " ); - VkBool32 result = - getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX( static_cast( m_physicalDevice ), queueFamilyIndex, &window ); - - return static_cast( result ); + getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + static_cast( m_commandBuffer ), + reinterpret_cast( &bindDescriptorBufferEmbeddedSamplersInfo ) ); } -# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - //=== VK_EXT_color_write_enable === + //=== VK_NV_cluster_acceleration_structure === - VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( - VULKAN_HPP_NAMESPACE::ArrayProxy const & colorWriteEnables ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR Device::getClusterAccelerationStructureBuildSizesNV( + const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteEnableEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetClusterAccelerationStructureBuildSizesNV && + "Function requires " ); - getDispatcher()->vkCmdSetColorWriteEnableEXT( - static_cast( m_commandBuffer ), colorWriteEnables.size(), reinterpret_cast( colorWriteEnables.data() ) ); - } + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + getDispatcher()->vkGetClusterAccelerationStructureBuildSizesNV( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &sizeInfo ) ); - //=== VK_KHR_ray_tracing_maintenance1 === + return sizeInfo; + } - VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::buildClusterAccelerationStructureIndirectNV( + const VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureCommandsInfoNV & commandInfos ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysIndirect2KHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildClusterAccelerationStructureIndirectNV && + "Function requires " ); - getDispatcher()->vkCmdTraceRaysIndirect2KHR( static_cast( m_commandBuffer ), static_cast( indirectDeviceAddress ) ); + getDispatcher()->vkCmdBuildClusterAccelerationStructureIndirectNV( + static_cast( m_commandBuffer ), reinterpret_cast( &commandInfos ) ); } - //=== VK_EXT_multi_draw === + //=== VK_NV_partitioned_acceleration_structure === - VULKAN_HPP_INLINE void - CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & vertexInfo, - uint32_t instanceCount, - uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR + Device::getPartitionedAccelerationStructuresBuildSizesNV( const VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV & info ) const + VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiEXT && "Function needs extension enabled!" ); - - getDispatcher()->vkCmdDrawMultiEXT( static_cast( m_commandBuffer ), - vertexInfo.size(), - reinterpret_cast( vertexInfo.data() ), - instanceCount, - firstInstance, - vertexInfo.stride() ); - } + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPartitionedAccelerationStructuresBuildSizesNV && + "Function requires " ); - VULKAN_HPP_INLINE void - CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy const & indexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - Optional vertexOffset ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiIndexedEXT && "Function needs extension enabled!" ); + VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo; + getDispatcher()->vkGetPartitionedAccelerationStructuresBuildSizesNV( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &sizeInfo ) ); - getDispatcher()->vkCmdDrawMultiIndexedEXT( static_cast( m_commandBuffer ), - indexInfo.size(), - reinterpret_cast( indexInfo.data() ), - instanceCount, - firstInstance, - indexInfo.stride(), - static_cast( vertexOffset ) ); + return sizeInfo; } - //=== VK_EXT_pageable_device_local_memory === - - VULKAN_HPP_INLINE void DeviceMemory::setPriorityEXT( float priority ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::buildPartitionedAccelerationStructuresNV( + const VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV & buildInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkSetDeviceMemoryPriorityEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildPartitionedAccelerationStructuresNV && + "Function requires " ); - getDispatcher()->vkSetDeviceMemoryPriorityEXT( static_cast( m_device ), static_cast( m_memory ), priority ); + getDispatcher()->vkCmdBuildPartitionedAccelerationStructuresNV( static_cast( m_commandBuffer ), + reinterpret_cast( &buildInfo ) ); } - //=== VK_KHR_maintenance4 === + //=== VK_EXT_device_generated_commands === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 - Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getGeneratedCommandsMemoryRequirementsEXT( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsEXT && + "Function requires " ); VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsEXT( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return memoryRequirements; } template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getGeneratedCommandsMemoryRequirementsEXT( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsEXT && + "Function requires " ); - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); - getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsEXT( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); return structureChain; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 - Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR && - "Function needs extension enabled!" ); - - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPreprocessGeneratedCommandsEXT && + "Function requires " ); - return memoryRequirements; + getDispatcher()->vkCmdPreprocessGeneratedCommandsEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &generatedCommandsInfo ), + static_cast( stateCommandBuffer ) ); } - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain - Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR && - "Function needs extension enabled!" ); - - StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); - getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdExecuteGeneratedCommandsEXT && + "Function requires " ); - return structureChain; + getDispatcher()->vkCmdExecuteGeneratedCommandsEXT( static_cast( m_commandBuffer ), + static_cast( isPreprocessed ), + reinterpret_cast( &generatedCommandsInfo ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT>::Type + Device::createIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR && - "Function needs extension enabled!" ); + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateIndirectCommandsLayoutEXT( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createIndirectCommandsLayoutEXT" ); +# endif + } - std::vector sparseMemoryRequirements; - uint32_t sparseMemoryRequirementCount; - getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR( - static_cast( m_device ), reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); - getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast( m_device ), - reinterpret_cast( &info ), - &sparseMemoryRequirementCount, - reinterpret_cast( sparseMemoryRequirements.data() ) ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT( + *this, *reinterpret_cast( &indirectCommandsLayout ), allocator ); + } - VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); - if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateIndirectExecutionSetEXT( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectExecutionSet ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createIndirectExecutionSetEXT" ); +# endif } - return sparseMemoryRequirements; - } - //=== VK_VALVE_descriptor_set_host_mapping === + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectExecutionSetEXT( + *this, *reinterpret_cast( &indirectExecutionSet ), allocator ); + } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE Device::getDescriptorSetLayoutHostMappingInfoVALVE( - const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void IndirectExecutionSetEXT::updatePipeline( + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutHostMappingInfoVALVE && - "Function needs extension enabled!" ); - - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping; - getDispatcher()->vkGetDescriptorSetLayoutHostMappingInfoVALVE( static_cast( m_device ), - reinterpret_cast( &bindingReference ), - reinterpret_cast( &hostMapping ) ); + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateIndirectExecutionSetPipelineEXT && + "Function requires " ); - return hostMapping; + getDispatcher()->vkUpdateIndirectExecutionSetPipelineEXT( static_cast( m_device ), + static_cast( m_indirectExecutionSet ), + executionSetWrites.size(), + reinterpret_cast( executionSetWrites.data() ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * DescriptorSet::getHostMappingVALVE() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void IndirectExecutionSetEXT::updateShader( + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetHostMappingVALVE && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateIndirectExecutionSetShaderEXT && + "Function requires " ); - void * pData; - getDispatcher()->vkGetDescriptorSetHostMappingVALVE( static_cast( m_device ), static_cast( m_descriptorSet ), &pData ); + getDispatcher()->vkUpdateIndirectExecutionSetShaderEXT( static_cast( m_device ), + static_cast( m_indirectExecutionSet ), + executionSetWrites.size(), + reinterpret_cast( executionSetWrites.data() ) ); + } - return pData; + //=== VK_NV_cooperative_matrix2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + static_cast( m_physicalDevice ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; } - //=== VK_EXT_shader_module_identifier === +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_external_memory_metal === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT ShaderModule::getIdentifierEXT() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * + Device::getMemoryMetalHandleEXT( const VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT & getMetalHandleInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderModuleIdentifierEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryMetalHandleEXT && "Function requires " ); - VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; - getDispatcher()->vkGetShaderModuleIdentifierEXT( - static_cast( m_device ), static_cast( m_shaderModule ), reinterpret_cast( &identifier ) ); + void * handle; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetMemoryMetalHandleEXT( + static_cast( m_device ), reinterpret_cast( &getMetalHandleInfo ), &handle ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryMetalHandleEXT" ); - return identifier; + return handle; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT - Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT + Device::getMemoryMetalHandlePropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HandleType const & handle ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderModuleCreateInfoIdentifierEXT && - "Function needs extension enabled!" ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryMetalHandlePropertiesEXT && + "Function requires " ); - VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier; - getDispatcher()->vkGetShaderModuleCreateInfoIdentifierEXT( static_cast( m_device ), - reinterpret_cast( &createInfo ), - reinterpret_cast( &identifier ) ); + VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT memoryMetalHandleProperties; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetMemoryMetalHandlePropertiesEXT( static_cast( m_device ), + static_cast( handleType ), + reinterpret_cast( &handle ), + reinterpret_cast( &memoryMetalHandleProperties ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryMetalHandlePropertiesEXT" ); - return identifier; + return memoryMetalHandleProperties; } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_QCOM_tile_properties === + //==================== + //=== RAII Helpers === + //==================== - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Framebuffer::getTilePropertiesQCOM() const + template + std::vector filterCppTypes( std::vector const & raiiTypes ) { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetFramebufferTilePropertiesQCOM && - "Function needs extension enabled!" ); + std::vector cppTypes( raiiTypes.size() ); + std::transform( raiiTypes.begin(), raiiTypes.end(), cppTypes.begin(), []( RAIIType const & d ) { return *d; } ); + return cppTypes; + } - std::vector properties; - uint32_t propertiesCount; - VkResult result; - do + template + std::vector filterCppTypes( std::vector const & raiiTypes, UnaryPredicate p ) + { + std::vector cppTypes; + for ( auto const & t : raiiTypes ) { - result = getDispatcher()->vkGetFramebufferTilePropertiesQCOM( - static_cast( m_device ), static_cast( m_framebuffer ), &propertiesCount, nullptr ); - if ( ( result == VK_SUCCESS ) && propertiesCount ) + if ( p( t ) ) { - properties.resize( propertiesCount ); - result = getDispatcher()->vkGetFramebufferTilePropertiesQCOM( static_cast( m_device ), - static_cast( m_framebuffer ), - &propertiesCount, - reinterpret_cast( properties.data() ) ); + cppTypes.push_back( *t ); } - } while ( result == VK_INCOMPLETE ); - - VULKAN_HPP_ASSERT( propertiesCount <= properties.size() ); - if ( propertiesCount < properties.size() ) - { - properties.resize( propertiesCount ); } - return properties; - } - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM - Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDynamicRenderingTilePropertiesQCOM && - "Function needs extension enabled!" ); - - VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties; - getDispatcher()->vkGetDynamicRenderingTilePropertiesQCOM( static_cast( m_device ), - reinterpret_cast( &renderingInfo ), - reinterpret_cast( &properties ) ); - - return properties; + return cppTypes; } -#endif } // namespace VULKAN_HPP_RAII_NAMESPACE } // namespace VULKAN_HPP_NAMESPACE #endif +#endif diff --git a/src/libraries/vulkanheaders/vulkan_screen.h b/src/libraries/vulkanheaders/vulkan_screen.h index f0ef40a6c..6c8bfd257 100644 --- a/src/libraries/vulkanheaders/vulkan_screen.h +++ b/src/libraries/vulkanheaders/vulkan_screen.h @@ -2,7 +2,7 @@ #define VULKAN_SCREEN_H_ 1 /* -** Copyright 2015-2022 The Khronos Group Inc. +** Copyright 2015-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -19,6 +19,7 @@ extern "C" { +// VK_QNX_screen_surface is a preprocessor guard. Do not pass it to API calls. #define VK_QNX_screen_surface 1 #define VK_QNX_SCREEN_SURFACE_SPEC_VERSION 1 #define VK_QNX_SCREEN_SURFACE_EXTENSION_NAME "VK_QNX_screen_surface" @@ -47,6 +48,59 @@ VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceScreenPresentationSupportQNX( struct _screen_window* window); #endif + +// VK_QNX_external_memory_screen_buffer is a preprocessor guard. Do not pass it to API calls. +#define VK_QNX_external_memory_screen_buffer 1 +#define VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_SPEC_VERSION 1 +#define VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_EXTENSION_NAME "VK_QNX_external_memory_screen_buffer" +typedef struct VkScreenBufferPropertiesQNX { + VkStructureType sType; + void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeBits; +} VkScreenBufferPropertiesQNX; + +typedef struct VkScreenBufferFormatPropertiesQNX { + VkStructureType sType; + void* pNext; + VkFormat format; + uint64_t externalFormat; + uint64_t screenUsage; + VkFormatFeatureFlags formatFeatures; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkScreenBufferFormatPropertiesQNX; + +typedef struct VkImportScreenBufferInfoQNX { + VkStructureType sType; + const void* pNext; + struct _screen_buffer* buffer; +} VkImportScreenBufferInfoQNX; + +typedef struct VkExternalFormatQNX { + VkStructureType sType; + void* pNext; + uint64_t externalFormat; +} VkExternalFormatQNX; + +typedef struct VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX { + VkStructureType sType; + void* pNext; + VkBool32 screenBufferImport; +} VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + +typedef VkResult (VKAPI_PTR *PFN_vkGetScreenBufferPropertiesQNX)(VkDevice device, const struct _screen_buffer* buffer, VkScreenBufferPropertiesQNX* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetScreenBufferPropertiesQNX( + VkDevice device, + const struct _screen_buffer* buffer, + VkScreenBufferPropertiesQNX* pProperties); +#endif + #ifdef __cplusplus } #endif diff --git a/src/libraries/vulkanheaders/vulkan_shared.hpp b/src/libraries/vulkanheaders/vulkan_shared.hpp new file mode 100644 index 000000000..ab37dd973 --- /dev/null +++ b/src/libraries/vulkanheaders/vulkan_shared.hpp @@ -0,0 +1,1159 @@ +// Copyright 2015-2025 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_SHARED_HPP +#define VULKAN_SHARED_HPP + +#include + +#if !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) +# include // std::atomic_size_t +#endif + +namespace VULKAN_HPP_NAMESPACE +{ +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + + template + class SharedHandleTraits; + + class NoDestructor + { + }; + + template + struct HasDestructorType : std::false_type + { + }; + + template + struct HasDestructorType::DestructorType() )> : std::true_type + { + }; + + template + struct GetDestructorType + { + using type = NoDestructor; + }; + + template + struct GetDestructorType::value>::type> + { + using type = typename SharedHandleTraits::DestructorType; + }; + + template + using DestructorTypeOf = typename GetDestructorType::type; + + template + struct HasDestructor : std::integral_constant, NoDestructor>::value> + { + }; + + template + struct HasPoolType : std::false_type + { + }; + + template + struct HasPoolType::deleter::PoolTypeExport() )> : std::true_type + { + }; + + template + struct GetPoolType + { + using type = NoDestructor; + }; + + template + struct GetPoolType::value>::type> + { + using type = typename SharedHandleTraits::deleter::PoolTypeExport; + }; + + //===================================================================================================================== + + template + class SharedHandle; + + template + struct SharedHeader + { + SharedHeader( SharedHandle parent, Deleter deleter = Deleter() ) VULKAN_HPP_NOEXCEPT + : parent( std::move( parent ) ) + , deleter( std::move( deleter ) ) + { + } + + SharedHandle parent; + Deleter deleter; + }; + + template + struct SharedHeader + { + SharedHeader( Deleter deleter = Deleter() ) VULKAN_HPP_NOEXCEPT : deleter( std::move( deleter ) ) {} + + Deleter deleter; + }; + + //===================================================================================================================== + + template + class ReferenceCounter + { + public: + template + ReferenceCounter( Args &&... control_args ) : m_header( std::forward( control_args )... ) + { + } + + ReferenceCounter( const ReferenceCounter & ) = delete; + ReferenceCounter & operator=( const ReferenceCounter & ) = delete; + + public: + size_t addRef() VULKAN_HPP_NOEXCEPT + { + // Relaxed memory order is sufficient since this does not impose any ordering on other operations + return m_ref_cnt.fetch_add( 1, std::memory_order_relaxed ); + } + + size_t release() VULKAN_HPP_NOEXCEPT + { + // A release memory order to ensure that all releases are ordered + return m_ref_cnt.fetch_sub( 1, std::memory_order_release ); + } + + public: + std::atomic_size_t m_ref_cnt{ 1 }; + HeaderType m_header{}; + }; + + //===================================================================================================================== + + template > + class SharedHandleBase + { + public: + SharedHandleBase() = default; + + template + SharedHandleBase( HandleType handle, Args &&... control_args ) + : m_control( new ReferenceCounter( std::forward( control_args )... ) ), m_handle( handle ) + { + } + + SharedHandleBase( const SharedHandleBase & o ) VULKAN_HPP_NOEXCEPT + { + o.addRef(); + m_handle = o.m_handle; + m_control = o.m_control; + } + + SharedHandleBase( SharedHandleBase && o ) VULKAN_HPP_NOEXCEPT + : m_control( o.m_control ) + , m_handle( o.m_handle ) + { + o.m_handle = nullptr; + o.m_control = nullptr; + } + + SharedHandleBase & operator=( const SharedHandleBase & o ) VULKAN_HPP_NOEXCEPT + { + SharedHandleBase( o ).swap( *this ); + return *this; + } + + SharedHandleBase & operator=( SharedHandleBase && o ) VULKAN_HPP_NOEXCEPT + { + SharedHandleBase( std::move( o ) ).swap( *this ); + return *this; + } + + ~SharedHandleBase() + { + // only this function owns the last reference to the control block + // the same principle is used in the default deleter of std::shared_ptr + if ( m_control && ( m_control->release() == 1 ) ) + { + // noop in x86, but does thread synchronization in ARM + // it is required to ensure that last thread is getting to destroy the control block + // by ordering all atomic operations before this fence + std::atomic_thread_fence( std::memory_order_acquire ); + ForwardType::internalDestroy( getHeader(), m_handle ); + delete m_control; + } + } + + public: + HandleType get() const VULKAN_HPP_NOEXCEPT + { + return m_handle; + } + + HandleType operator*() const VULKAN_HPP_NOEXCEPT + { + return m_handle; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return bool( m_handle ); + } + +# if defined( VULKAN_HPP_SMART_HANDLE_IMPLICIT_CAST ) + operator HandleType() const VULKAN_HPP_NOEXCEPT + { + return m_handle; + } +# endif + + const HandleType * operator->() const VULKAN_HPP_NOEXCEPT + { + return &m_handle; + } + + HandleType * operator->() VULKAN_HPP_NOEXCEPT + { + return &m_handle; + } + + void reset() VULKAN_HPP_NOEXCEPT + { + SharedHandleBase().swap( *this ); + } + + void swap( SharedHandleBase & o ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_handle, o.m_handle ); + std::swap( m_control, o.m_control ); + } + + template + typename std::enable_if::value, const SharedHandle> &>::type getDestructorType() const VULKAN_HPP_NOEXCEPT + { + return getHeader().parent; + } + + protected: + template + static typename std::enable_if::value, void>::type internalDestroy( const HeaderType & control, HandleType handle ) VULKAN_HPP_NOEXCEPT + { + control.deleter.destroy( handle ); + } + + template + static typename std::enable_if::value, void>::type internalDestroy( const HeaderType & control, HandleType handle ) VULKAN_HPP_NOEXCEPT + { + control.deleter.destroy( control.parent.get(), handle ); + } + + const HeaderType & getHeader() const VULKAN_HPP_NOEXCEPT + { + return m_control->m_header; + } + + private: + void addRef() const VULKAN_HPP_NOEXCEPT + { + if ( m_control ) + m_control->addRef(); + } + + protected: + ReferenceCounter * m_control = nullptr; + HandleType m_handle{}; + }; + + template + class SharedHandle : public SharedHandleBase, typename SharedHandleTraits::deleter>> + { + private: + using BaseType = SharedHandleBase, typename SharedHandleTraits::deleter>>; + using DeleterType = typename SharedHandleTraits::deleter; + friend BaseType; + + public: + SharedHandle() = default; + + template ::value && !HasPoolType::value>::type> + explicit SharedHandle( HandleType handle, SharedHandle> parent, DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT + : BaseType( handle, std::move( parent ), std::move( deleter ) ) + { + } + + template ::value && HasPoolType::value>::type> + explicit SharedHandle( HandleType handle, + SharedHandle> parent, + SharedHandle::type> pool, + const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : BaseType( handle, std::move( parent ), DeleterType{ std::move( pool ), dispatch } ) + { + } + + template ::value>::type> + explicit SharedHandle( HandleType handle, DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT : BaseType( handle, std::move( deleter ) ) + { + } + + protected: + using BaseType::internalDestroy; + }; + + namespace detail + { + +// Silence the function cast warnings. +# if defined( __GNUC__ ) && !defined( __clang__ ) && !defined( __INTEL_COMPILER ) +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + + template + class ObjectDestroyShared + { + public: + using DestructorType = typename SharedHandleTraits::DestructorType; + + template + using DestroyFunctionPointerType = + typename std::conditional::value, + void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const, + void ( HandleType::* )( const AllocationCallbacks *, const Dispatcher & ) const>::type; + + using SelectorType = typename std::conditional::value, DestructorType, HandleType>::type; + + template + ObjectDestroyShared( Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &SelectorType::destroy ) ) ) + , m_dispatch( &dispatch ) + , m_allocationCallbacks( allocationCallbacks ) + { + } + + public: + template + typename std::enable_if::value, void>::type destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch ); + } + + template + typename std::enable_if::value, void>::type destroy( HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( handle.*m_destroy )( m_allocationCallbacks, *m_dispatch ); + } + + private: + DestroyFunctionPointerType m_destroy = nullptr; + const detail::DispatchLoaderBase * m_dispatch = nullptr; + Optional m_allocationCallbacks = nullptr; + }; + + template + class ObjectFreeShared + { + public: + using DestructorType = typename SharedHandleTraits::DestructorType; + + template + using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const; + + template + ObjectFreeShared( Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &DestructorType::free ) ) ) + , m_dispatch( &dispatch ) + , m_allocationCallbacks( allocationCallbacks ) + { + } + + public: + void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch ); + } + + private: + DestroyFunctionPointerType m_destroy = nullptr; + const detail::DispatchLoaderBase * m_dispatch = nullptr; + Optional m_allocationCallbacks = nullptr; + }; + + template + class ObjectReleaseShared + { + public: + using DestructorType = typename SharedHandleTraits::DestructorType; + + template + using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const Dispatcher & ) const; + + template + ObjectReleaseShared( const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &DestructorType::release ) ) ) + , m_dispatch( &dispatch ) + { + } + + public: + void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( parent.*m_destroy )( handle, *m_dispatch ); + } + + private: + DestroyFunctionPointerType m_destroy = nullptr; + const detail::DispatchLoaderBase * m_dispatch = nullptr; + }; + + template + class PoolFreeShared + { + public: + using DestructorType = typename SharedHandleTraits::DestructorType; + + using PoolTypeExport = PoolType; + + template + using ReturnType = decltype( std::declval().free( PoolType(), 0u, nullptr, Dispatcher() ) ); + + template + using DestroyFunctionPointerType = ReturnType ( DestructorType::* )( PoolType, uint32_t, const HandleType *, const Dispatcher & ) const; + + PoolFreeShared() = default; + + template + PoolFreeShared( SharedHandle pool, const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &DestructorType::free ) ) ) + , m_dispatch( &dispatch ) + , m_pool( std::move( pool ) ) + { + } + + public: + void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch && m_pool ); + ( parent.*m_destroy )( m_pool.get(), 1u, &handle, *m_dispatch ); + } + + private: + DestroyFunctionPointerType m_destroy = nullptr; + const detail::DispatchLoaderBase * m_dispatch = nullptr; + SharedHandle m_pool{}; + }; + +# if defined( __GNUC__ ) && !defined( __clang__ ) && !defined( __INTEL_COMPILER ) +# pragma GCC diagnostic pop +# endif + + } // namespace detail + + //====================== + //=== SHARED HANDLEs === + //====================== + + //=== VK_VERSION_1_0 === + template <> + class SharedHandleTraits + { + public: + using DestructorType = NoDestructor; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedInstance = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = NoDestructor; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedDevice = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectFreeShared; + }; + + using SharedDeviceMemory = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedFence = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedSemaphore = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedEvent = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedQueryPool = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedBuffer = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedBufferView = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedImage = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedImageView = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedShaderModule = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedPipelineCache = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedPipeline = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedPipelineLayout = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedSampler = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedDescriptorPool = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::PoolFreeShared; + }; + + using SharedDescriptorSet = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedDescriptorSetLayout = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedFramebuffer = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedRenderPass = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedCommandPool = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::PoolFreeShared; + }; + + using SharedCommandBuffer = SharedHandle; + + //=== VK_VERSION_1_1 === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedSamplerYcbcrConversion = SharedHandle; + using SharedSamplerYcbcrConversionKHR = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedDescriptorUpdateTemplate = SharedHandle; + using SharedDescriptorUpdateTemplateKHR = SharedHandle; + + //=== VK_VERSION_1_3 === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedPrivateDataSlot = SharedHandle; + using SharedPrivateDataSlotEXT = SharedHandle; + + //=== VK_KHR_surface === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Instance; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedSurfaceKHR = SharedHandle; + + //=== VK_KHR_swapchain === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedSwapchainKHR = SharedHandle; + + //=== VK_KHR_display === + template <> + class SharedHandleTraits + { + public: + using DestructorType = PhysicalDevice; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedDisplayKHR = SharedHandle; + + //=== VK_EXT_debug_report === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Instance; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedDebugReportCallbackEXT = SharedHandle; + + //=== VK_KHR_video_queue === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedVideoSessionKHR = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedVideoSessionParametersKHR = SharedHandle; + + //=== VK_NVX_binary_import === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedCuModuleNVX = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedCuFunctionNVX = SharedHandle; + + //=== VK_EXT_debug_utils === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Instance; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedDebugUtilsMessengerEXT = SharedHandle; + + //=== VK_KHR_acceleration_structure === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedAccelerationStructureKHR = SharedHandle; + + //=== VK_EXT_validation_cache === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedValidationCacheEXT = SharedHandle; + + //=== VK_NV_ray_tracing === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedAccelerationStructureNV = SharedHandle; + + //=== VK_INTEL_performance_query === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedPerformanceConfigurationINTEL = SharedHandle; + + //=== VK_KHR_deferred_host_operations === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedDeferredOperationKHR = SharedHandle; + + //=== VK_NV_device_generated_commands === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedIndirectCommandsLayoutNV = SharedHandle; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedCudaModuleNV = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedCudaFunctionNV = SharedHandle; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedBufferCollectionFUCHSIA = SharedHandle; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedMicromapEXT = SharedHandle; + + //=== VK_NV_optical_flow === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedOpticalFlowSessionNV = SharedHandle; + + //=== VK_EXT_shader_object === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedShaderEXT = SharedHandle; + + //=== VK_KHR_pipeline_binary === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedPipelineBinaryKHR = SharedHandle; + + //=== VK_EXT_device_generated_commands === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedIndirectCommandsLayoutEXT = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedIndirectExecutionSetEXT = SharedHandle; + + enum class SwapchainOwns + { + no, + yes, + }; + + struct ImageHeader : SharedHeader, typename SharedHandleTraits::deleter> + { + ImageHeader( + SharedHandle> parent, + typename SharedHandleTraits::deleter deleter = typename SharedHandleTraits::deleter(), + SwapchainOwns swapchainOwned = SwapchainOwns::no ) VULKAN_HPP_NOEXCEPT + : SharedHeader, typename SharedHandleTraits::deleter>( std::move( parent ), + std::move( deleter ) ) + , swapchainOwned( swapchainOwned ) + { + } + + SwapchainOwns swapchainOwned = SwapchainOwns::no; + }; + + template <> + class SharedHandle : public SharedHandleBase + { + using BaseType = SharedHandleBase; + using DeleterType = typename SharedHandleTraits::deleter; + friend BaseType; + + public: + SharedHandle() = default; + + explicit SharedHandle( VULKAN_HPP_NAMESPACE::Image handle, + SharedHandle> parent, + SwapchainOwns swapchain_owned = SwapchainOwns::no, + DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT + : BaseType( handle, std::move( parent ), std::move( deleter ), swapchain_owned ) + { + } + + protected: + static void internalDestroy( const ImageHeader & control, VULKAN_HPP_NAMESPACE::Image handle ) VULKAN_HPP_NOEXCEPT + { + if ( control.swapchainOwned == SwapchainOwns::no ) + { + control.deleter.destroy( control.parent.get(), handle ); + } + } + }; + + struct SwapchainHeader + { + SwapchainHeader( SharedHandle surface, + SharedHandle> parent, + typename SharedHandleTraits::deleter deleter = + typename SharedHandleTraits::deleter() ) VULKAN_HPP_NOEXCEPT + : surface( std::move( surface ) ) + , parent( std::move( parent ) ) + , deleter( std::move( deleter ) ) + { + } + + SharedHandle surface{}; + SharedHandle> parent{}; + typename SharedHandleTraits::deleter deleter{}; + }; + + template <> + class SharedHandle : public SharedHandleBase + { + using BaseType = SharedHandleBase; + using DeleterType = typename SharedHandleTraits::deleter; + friend BaseType; + + public: + SharedHandle() = default; + + explicit SharedHandle( VULKAN_HPP_NAMESPACE::SwapchainKHR handle, + SharedHandle> parent, + SharedHandle surface, + DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT + : BaseType( handle, std::move( surface ), std::move( parent ), std::move( deleter ) ) + { + } + + public: + const SharedHandle & getSurface() const VULKAN_HPP_NOEXCEPT + { + return getHeader().surface; + } + + protected: + using BaseType::internalDestroy; + }; + + template + class SharedHandleBaseNoDestroy : public SharedHandleBase + { + public: + using SharedHandleBase::SharedHandleBase; + + const DestructorType & getDestructorType() const VULKAN_HPP_NOEXCEPT + { + return SharedHandleBase::getHeader(); + } + + protected: + static void internalDestroy( const DestructorType &, HandleType ) VULKAN_HPP_NOEXCEPT {} + }; + + //=== VK_VERSION_1_0 === + + template <> + class SharedHandle : public SharedHandleBaseNoDestroy + { + friend SharedHandleBase; + + public: + SharedHandle() = default; + + explicit SharedHandle( PhysicalDevice handle, SharedInstance parent ) noexcept + : SharedHandleBaseNoDestroy( handle, std::move( parent ) ) + { + } + }; + + using SharedPhysicalDevice = SharedHandle; + + template <> + class SharedHandle : public SharedHandleBaseNoDestroy + { + friend SharedHandleBase; + + public: + SharedHandle() = default; + + explicit SharedHandle( Queue handle, SharedDevice parent ) noexcept : SharedHandleBaseNoDestroy( handle, std::move( parent ) ) {} + }; + + using SharedQueue = SharedHandle; + + //=== VK_KHR_display === + + template <> + class SharedHandle : public SharedHandleBaseNoDestroy + { + friend SharedHandleBase; + + public: + SharedHandle() = default; + + explicit SharedHandle( DisplayModeKHR handle, SharedDisplayKHR parent ) noexcept + : SharedHandleBaseNoDestroy( handle, std::move( parent ) ) + { + } + }; + + using SharedDisplayModeKHR = SharedHandle; +#endif // !VULKAN_HPP_NO_SMART_HANDLE +} // namespace VULKAN_HPP_NAMESPACE +#endif // VULKAN_SHARED_HPP diff --git a/src/libraries/vulkanheaders/vulkan_static_assertions.hpp b/src/libraries/vulkanheaders/vulkan_static_assertions.hpp index ebcb5a699..88c12a997 100644 --- a/src/libraries/vulkanheaders/vulkan_static_assertions.hpp +++ b/src/libraries/vulkanheaders/vulkan_static_assertions.hpp @@ -1,12 +1,12 @@ -// Copyright 2015-2022 The Khronos Group Inc. +// Copyright 2015-2025 The Khronos Group Inc. // // SPDX-License-Identifier: Apache-2.0 OR MIT // // This header is generated from the Khronos Vulkan XML API Registry. -#ifndef VULKAN_STRUCTS_HPP -#define VULKAN_STRUCTS_HPP +#ifndef VULKAN_STATIC_ASSERTIONS_HPP +#define VULKAN_STATIC_ASSERTIONS_HPP #include @@ -105,6 +105,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Instance is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Instance is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" ); @@ -121,6 +122,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "MemoryType is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "PhysicalDevice is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PhysicalDevice is not nothrow_move_constructible!" ); @@ -160,6 +162,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Device is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Device is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" ); @@ -184,6 +187,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Queue is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Queue is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" ); @@ -201,6 +205,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DeviceMemory is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DeviceMemory is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" ); @@ -260,6 +265,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Fence is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Fence is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" ); @@ -268,6 +274,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Semaphore is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Semaphore is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" ); @@ -276,6 +283,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Event is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Event is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" ); @@ -284,6 +292,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "QueryPool is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "QueryPool is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" ); @@ -292,6 +301,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Buffer is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Buffer is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" ); @@ -300,6 +310,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "BufferView is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "BufferView is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" ); @@ -308,6 +319,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Image is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Image is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" ); @@ -332,6 +344,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageView is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageView is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" ); @@ -340,6 +353,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ShaderModule is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ShaderModule is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), @@ -349,6 +363,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PipelineCache is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PipelineCache is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), @@ -370,6 +385,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Pipeline is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Pipeline is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), @@ -477,6 +493,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "Viewport is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "PipelineLayout is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PipelineLayout is not nothrow_move_constructible!" ); @@ -492,6 +509,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Sampler is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Sampler is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" ); @@ -515,6 +533,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DescriptorPool is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DescriptorPool is not nothrow_move_constructible!" ); @@ -530,6 +549,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DescriptorSet is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DescriptorSet is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), @@ -539,6 +559,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DescriptorSetLayout is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DescriptorSetLayout is not nothrow_move_constructible!" ); @@ -571,6 +592,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Framebuffer is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Framebuffer is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), @@ -580,6 +602,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "RenderPass is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "RenderPass is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" ); @@ -598,6 +621,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CommandPool is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CommandPool is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), @@ -607,6 +631,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CommandBuffer is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CommandBuffer is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), @@ -983,11 +1008,15 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerYcbcrConversion is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SamplerYcbcrConversion is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "DescriptorUpdateTemplate is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DescriptorUpdateTemplate is not nothrow_move_constructible!" ); @@ -1516,6 +1545,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PrivateDataSlot is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PrivateDataSlot is not nothrow_move_constructible!" ); @@ -1781,9 +1811,337 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "DeviceImageMemoryRequirements is not nothrow_move_constructible!" ); +//=== VK_VERSION_1_4 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Features ) == sizeof( VkPhysicalDeviceVulkan14Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan14Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Properties ) == sizeof( VkPhysicalDeviceVulkan14Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan14Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfo ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceQueueGlobalPriorityCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeatures ) == sizeof( VkPhysicalDeviceGlobalPriorityQueryFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceGlobalPriorityQueryFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityProperties ) == sizeof( VkQueueFamilyGlobalPriorityProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyGlobalPriorityProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupRotateFeatures ) == sizeof( VkPhysicalDeviceShaderSubgroupRotateFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderSubgroupRotateFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloatControls2Features ) == sizeof( VkPhysicalDeviceShaderFloatControls2Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderFloatControls2Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderExpectAssumeFeatures ) == sizeof( VkPhysicalDeviceShaderExpectAssumeFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderExpectAssumeFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeatures ) == sizeof( VkPhysicalDeviceLineRasterizationFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLineRasterizationFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationProperties ) == sizeof( VkPhysicalDeviceLineRasterizationProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLineRasterizationProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfo ) == sizeof( VkPipelineRasterizationLineStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRasterizationLineStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorProperties ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexAttributeDivisorProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription ) == sizeof( VkVertexInputBindingDivisorDescription ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VertexInputBindingDivisorDescription is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfo ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineVertexInputDivisorStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeatures ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexAttributeDivisorFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8Features ) == sizeof( VkPhysicalDeviceIndexTypeUint8Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceIndexTypeUint8Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryMapInfo ) == sizeof( VkMemoryMapInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "MemoryMapInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryUnmapInfo ) == sizeof( VkMemoryUnmapInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryUnmapInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Features ) == sizeof( VkPhysicalDeviceMaintenance5Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance5Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Properties ) == sizeof( VkPhysicalDeviceMaintenance5Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance5Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAreaInfo ) == sizeof( VkRenderingAreaInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingAreaInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo ) == sizeof( VkDeviceImageSubresourceInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceImageSubresourceInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2 ) == sizeof( VkImageSubresource2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSubresource2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2 ) == sizeof( VkSubresourceLayout2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubresourceLayout2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfo ) == sizeof( VkPipelineCreateFlags2CreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCreateFlags2CreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfo ) == sizeof( VkBufferUsageFlags2CreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferUsageFlags2CreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorProperties ) == sizeof( VkPhysicalDevicePushDescriptorProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePushDescriptorProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingLocalReadFeatures ) == + sizeof( VkPhysicalDeviceDynamicRenderingLocalReadFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDynamicRenderingLocalReadFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo ) == sizeof( VkRenderingAttachmentLocationInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingAttachmentLocationInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo ) == sizeof( VkRenderingInputAttachmentIndexInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingInputAttachmentIndexInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Features ) == sizeof( VkPhysicalDeviceMaintenance6Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance6Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Properties ) == sizeof( VkPhysicalDeviceMaintenance6Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance6Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindMemoryStatus ) == sizeof( VkBindMemoryStatus ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindMemoryStatus is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo ) == sizeof( VkBindDescriptorSetsInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindDescriptorSetsInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushConstantsInfo ) == sizeof( VkPushConstantsInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PushConstantsInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo ) == sizeof( VkPushDescriptorSetInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PushDescriptorSetInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo ) == sizeof( VkPushDescriptorSetWithTemplateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PushDescriptorSetWithTemplateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeatures ) == + sizeof( VkPhysicalDevicePipelineProtectedAccessFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineProtectedAccessFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeatures ) == sizeof( VkPhysicalDevicePipelineRobustnessFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineRobustnessFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessProperties ) == sizeof( VkPhysicalDevicePipelineRobustnessProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineRobustnessProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfo ) == sizeof( VkPipelineRobustnessCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRobustnessCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeatures ) == sizeof( VkPhysicalDeviceHostImageCopyFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceHostImageCopyFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyProperties ) == sizeof( VkPhysicalDeviceHostImageCopyProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceHostImageCopyProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryToImageCopy ) == sizeof( VkMemoryToImageCopy ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryToImageCopy is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageToMemoryCopy ) == sizeof( VkImageToMemoryCopy ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageToMemoryCopy is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo ) == sizeof( VkCopyMemoryToImageInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryToImageInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo ) == sizeof( VkCopyImageToMemoryInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyImageToMemoryInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToImageInfo ) == sizeof( VkCopyImageToImageInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyImageToImageInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo ) == sizeof( VkHostImageLayoutTransitionInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HostImageLayoutTransitionInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySize ) == sizeof( VkSubresourceHostMemcpySize ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubresourceHostMemcpySize is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQuery ) == sizeof( VkHostImageCopyDevicePerformanceQuery ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HostImageCopyDevicePerformanceQuery is not nothrow_move_constructible!" ); + //=== VK_KHR_surface === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "SurfaceKHR is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SurfaceKHR is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), @@ -1806,6 +2164,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SwapchainKHR is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SwapchainKHR is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" ); @@ -1852,6 +2211,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DisplayKHR is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DisplayKHR is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), @@ -1861,6 +2221,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DisplayModeKHR is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DisplayModeKHR is not nothrow_move_constructible!" ); @@ -1961,6 +2322,8 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugReportCallbackEXT is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DebugReportCallbackEXT is not nothrow_move_constructible!" ); @@ -2000,15 +2363,17 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "DebugMarkerMarkerInfoEXT is not nothrow_move_constructible!" ); -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_queue === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionKHR ) == sizeof( VkVideoSessionKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "VideoSessionKHR is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "VideoSessionKHR is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR ) == sizeof( VkVideoSessionParametersKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "VideoSessionParametersKHR is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "VideoSessionParametersKHR is not nothrow_move_constructible!" ); @@ -2114,9 +2479,7 @@ VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKH VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "VideoCodingControlInfoKHR is not nothrow_move_constructible!" ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_decode_queue === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR ) == sizeof( VkVideoDecodeCapabilitiesKHR ), @@ -2135,7 +2498,6 @@ VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR ) == VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "VideoDecodeInfoKHR is not nothrow_move_constructible!" ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_NV_dedicated_allocation === @@ -2188,9 +2550,11 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CuModuleNVX is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CuModuleNVX is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionNVX ) == sizeof( VkCuFunctionNVX ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "CuFunctionNVX is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CuFunctionNVX is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX ) == sizeof( VkCuModuleCreateInfoNVX ), @@ -2199,6 +2563,13 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "CuModuleCreateInfoNVX is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleTexturingModeCreateInfoNVX ) == sizeof( VkCuModuleTexturingModeCreateInfoNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CuModuleTexturingModeCreateInfoNVX is not nothrow_move_constructible!" ); + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX ) == sizeof( VkCuFunctionCreateInfoNVX ), "struct and wrapper have different size!" ); VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -2224,231 +2595,261 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "ImageViewAddressPropertiesNVX is not nothrow_move_constructible!" ); -#if defined( VK_ENABLE_BETA_EXTENSIONS ) -//=== VK_EXT_video_encode_h264 === +//=== VK_KHR_video_encode_h264 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesKHR ) == sizeof( VkVideoEncodeH264CapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264CapabilitiesKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT ) == sizeof( VkVideoEncodeH264CapabilitiesEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264QualityLevelPropertiesKHR ) == sizeof( VkVideoEncodeH264QualityLevelPropertiesKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH264CapabilitiesEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264QualityLevelPropertiesKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT ) == - sizeof( VkVideoEncodeH264SessionParametersCreateInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoKHR ) == sizeof( VkVideoEncodeH264SessionCreateInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH264SessionParametersCreateInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionCreateInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT ) == sizeof( VkVideoEncodeH264SessionParametersAddInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoKHR ) == + sizeof( VkVideoEncodeH264SessionParametersCreateInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH264SessionParametersAddInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264VclFrameInfoEXT ) == sizeof( VkVideoEncodeH264VclFrameInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR ) == sizeof( VkVideoEncodeH264SessionParametersAddInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH264VclFrameInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionParametersAddInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT ) == sizeof( VkVideoEncodeH264ReferenceListsInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersGetInfoKHR ) == sizeof( VkVideoEncodeH264SessionParametersGetInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH264ReferenceListsInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionParametersGetInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264EmitPictureParametersInfoEXT ) == - sizeof( VkVideoEncodeH264EmitPictureParametersInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersFeedbackInfoKHR ) == + sizeof( VkVideoEncodeH264SessionParametersFeedbackInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH264EmitPictureParametersInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264SessionParametersFeedbackInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264PictureInfoKHR ) == sizeof( VkVideoEncodeH264PictureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264PictureInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT ) == sizeof( VkVideoEncodeH264DpbSlotInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoKHR ) == sizeof( VkVideoEncodeH264DpbSlotInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH264DpbSlotInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264DpbSlotInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT ) == sizeof( VkVideoEncodeH264NaluSliceInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR ) == sizeof( VkVideoEncodeH264NaluSliceInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH264NaluSliceInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264NaluSliceInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT ) == sizeof( VkVideoEncodeH264ProfileInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoKHR ) == sizeof( VkVideoEncodeH264ProfileInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH264ProfileInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264ProfileInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT ) == sizeof( VkVideoEncodeH264RateControlInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoKHR ) == sizeof( VkVideoEncodeH264RateControlInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH264RateControlInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264RateControlInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT ) == sizeof( VkVideoEncodeH264RateControlLayerInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoKHR ) == sizeof( VkVideoEncodeH264RateControlLayerInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH264RateControlLayerInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264RateControlLayerInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT ) == sizeof( VkVideoEncodeH264QpEXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH264QpEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR ) == sizeof( VkVideoEncodeH264QpKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264QpKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT ) == sizeof( VkVideoEncodeH264FrameSizeEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR ) == sizeof( VkVideoEncodeH264FrameSizeKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH264FrameSizeEXT is not nothrow_move_constructible!" ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264FrameSizeKHR is not nothrow_move_constructible!" ); -#if defined( VK_ENABLE_BETA_EXTENSIONS ) -//=== VK_EXT_video_encode_h265 === +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264GopRemainingFrameInfoKHR ) == sizeof( VkVideoEncodeH264GopRemainingFrameInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264GopRemainingFrameInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_encode_h265 === -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT ) == sizeof( VkVideoEncodeH265CapabilitiesEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesKHR ) == sizeof( VkVideoEncodeH265CapabilitiesKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH265CapabilitiesEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265CapabilitiesKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT ) == - sizeof( VkVideoEncodeH265SessionParametersCreateInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoKHR ) == sizeof( VkVideoEncodeH265SessionCreateInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH265SessionParametersCreateInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionCreateInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT ) == sizeof( VkVideoEncodeH265SessionParametersAddInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265QualityLevelPropertiesKHR ) == sizeof( VkVideoEncodeH265QualityLevelPropertiesKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH265SessionParametersAddInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265QualityLevelPropertiesKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265VclFrameInfoEXT ) == sizeof( VkVideoEncodeH265VclFrameInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoKHR ) == + sizeof( VkVideoEncodeH265SessionParametersCreateInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH265VclFrameInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265EmitPictureParametersInfoEXT ) == - sizeof( VkVideoEncodeH265EmitPictureParametersInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR ) == sizeof( VkVideoEncodeH265SessionParametersAddInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH265EmitPictureParametersInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionParametersAddInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT ) == sizeof( VkVideoEncodeH265DpbSlotInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersGetInfoKHR ) == sizeof( VkVideoEncodeH265SessionParametersGetInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH265DpbSlotInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionParametersGetInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT ) == sizeof( VkVideoEncodeH265NaluSliceSegmentInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersFeedbackInfoKHR ) == + sizeof( VkVideoEncodeH265SessionParametersFeedbackInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH265NaluSliceSegmentInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265SessionParametersFeedbackInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT ) == sizeof( VkVideoEncodeH265ProfileInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265PictureInfoKHR ) == sizeof( VkVideoEncodeH265PictureInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH265ProfileInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265PictureInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT ) == sizeof( VkVideoEncodeH265ReferenceListsInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoKHR ) == sizeof( VkVideoEncodeH265DpbSlotInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265DpbSlotInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR ) == sizeof( VkVideoEncodeH265NaluSliceSegmentInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH265ReferenceListsInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265NaluSliceSegmentInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoKHR ) == sizeof( VkVideoEncodeH265ProfileInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265ProfileInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT ) == sizeof( VkVideoEncodeH265RateControlInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoKHR ) == sizeof( VkVideoEncodeH265RateControlInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH265RateControlInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265RateControlInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT ) == sizeof( VkVideoEncodeH265RateControlLayerInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoKHR ) == sizeof( VkVideoEncodeH265RateControlLayerInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH265RateControlLayerInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265RateControlLayerInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT ) == sizeof( VkVideoEncodeH265QpEXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH265QpEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR ) == sizeof( VkVideoEncodeH265QpKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265QpKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT ) == sizeof( VkVideoEncodeH265FrameSizeEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR ) == sizeof( VkVideoEncodeH265FrameSizeKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoEncodeH265FrameSizeEXT is not nothrow_move_constructible!" ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) -//=== VK_EXT_video_decode_h264 === +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265FrameSizeKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoEXT ) == sizeof( VkVideoDecodeH264ProfileInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265GopRemainingFrameInfoKHR ) == sizeof( VkVideoEncodeH265GopRemainingFrameInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoDecodeH264ProfileInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265GopRemainingFrameInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesEXT ) == sizeof( VkVideoDecodeH264CapabilitiesEXT ), +//=== VK_KHR_video_decode_h264 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR ) == sizeof( VkVideoDecodeH264ProfileInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoDecodeH264CapabilitiesEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264ProfileInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoEXT ) == - sizeof( VkVideoDecodeH264SessionParametersCreateInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR ) == sizeof( VkVideoDecodeH264CapabilitiesKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoDecodeH264SessionParametersCreateInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264CapabilitiesKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT ) == sizeof( VkVideoDecodeH264SessionParametersAddInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR ) == + sizeof( VkVideoDecodeH264SessionParametersCreateInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoDecodeH264SessionParametersAddInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264SessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoEXT ) == sizeof( VkVideoDecodeH264PictureInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR ) == sizeof( VkVideoDecodeH264SessionParametersAddInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoDecodeH264PictureInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264SessionParametersAddInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264MvcInfoEXT ) == sizeof( VkVideoDecodeH264MvcInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR ) == sizeof( VkVideoDecodeH264PictureInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoDecodeH264MvcInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264PictureInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoEXT ) == sizeof( VkVideoDecodeH264DpbSlotInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR ) == sizeof( VkVideoDecodeH264DpbSlotInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoDecodeH264DpbSlotInfoEXT is not nothrow_move_constructible!" ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264DpbSlotInfoKHR is not nothrow_move_constructible!" ); //=== VK_AMD_texture_gather_bias_lod === @@ -2473,36 +2874,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "ShaderStatisticsInfoAMD is not nothrow_move_constructible!" ); -//=== VK_KHR_dynamic_rendering === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR ) == - sizeof( VkRenderingFragmentShadingRateAttachmentInfoKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "RenderingFragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT ) == - sizeof( VkRenderingFragmentDensityMapAttachmentInfoEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "RenderingFragmentDensityMapAttachmentInfoEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD ) == sizeof( VkAttachmentSampleCountInfoAMD ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "AttachmentSampleCountInfoAMD is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX ) == sizeof( VkMultiviewPerViewAttributesInfoNVX ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "MultiviewPerViewAttributesInfoNVX is not nothrow_move_constructible!" ); - #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === @@ -2604,30 +2975,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceASTCDecodeFeaturesEXT is not nothrow_move_constructible!" ); -//=== VK_EXT_pipeline_robustness === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT ) == - sizeof( VkPhysicalDevicePipelineRobustnessFeaturesEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDevicePipelineRobustnessFeaturesEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT ) == - sizeof( VkPhysicalDevicePipelineRobustnessPropertiesEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDevicePipelineRobustnessPropertiesEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT ) == sizeof( VkPipelineRobustnessCreateInfoEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PipelineRobustnessCreateInfoEXT is not nothrow_move_constructible!" ); - #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_memory_win32 === @@ -2728,15 +3075,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "SemaphoreGetFdInfoKHR is not nothrow_move_constructible!" ); -//=== VK_KHR_push_descriptor === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDevicePushDescriptorPropertiesKHR is not nothrow_move_constructible!" ); - //=== VK_EXT_conditional_rendering === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), @@ -2857,6 +3195,12 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX ) == sizeof( VkMultiviewPerViewAttributesInfoNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultiviewPerViewAttributesInfoNVX is not nothrow_move_constructible!" ); + //=== VK_NV_viewport_swizzle === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" ); @@ -2934,6 +3278,16 @@ VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XYColorEXT ) == sizeof( VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "XYColorEXT is not nothrow_move_constructible!" ); +//=== VK_IMG_relaxed_line_rasterization === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRelaxedLineRasterizationFeaturesIMG ) == + sizeof( VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRelaxedLineRasterizationFeaturesIMG is not nothrow_move_constructible!" ); + //=== VK_KHR_shared_presentable_image === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), @@ -3122,6 +3476,8 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugUtilsMessengerEXT is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DebugUtilsMessengerEXT is not nothrow_move_constructible!" ); @@ -3190,6 +3546,71 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderEnqueueFeaturesAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX ) == sizeof( VkPhysicalDeviceShaderEnqueuePropertiesAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderEnqueuePropertiesAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX ) == sizeof( VkExecutionGraphPipelineScratchSizeAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExecutionGraphPipelineScratchSizeAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX ) == sizeof( VkExecutionGraphPipelineCreateInfoAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExecutionGraphPipelineCreateInfoAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX ) == sizeof( VkDispatchGraphInfoAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DispatchGraphInfoAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX ) == sizeof( VkDispatchGraphCountInfoAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DispatchGraphCountInfoAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX ) == sizeof( VkPipelineShaderStageNodeCreateInfoAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineShaderStageNodeCreateInfoAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX ) == sizeof( VkDeviceOrHostAddressConstAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceOrHostAddressConstAMDX is not nothrow_move_constructible!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +//=== VK_AMD_mixed_attachment_samples === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD ) == sizeof( VkAttachmentSampleCountInfoAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentSampleCountInfoAMD is not nothrow_move_constructible!" ); + //=== VK_EXT_sample_locations === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" ); @@ -3366,6 +3787,8 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureKHR is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "AccelerationStructureKHR is not nothrow_move_constructible!" ); @@ -3433,21 +3856,79 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "AccelerationStructureBuildSizesInfoKHR is not nothrow_move_constructible!" ); -//=== VK_NV_framebuffer_mixed_samples === +//=== VK_KHR_ray_tracing_pipeline === -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV ) == - sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PipelineCoverageModulationStateCreateInfoNV is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RayTracingShaderGroupCreateInfoKHR is not nothrow_move_constructible!" ); -//=== VK_NV_shader_sm_builtins === +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RayTracingPipelineCreateInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR ) == + sizeof( VkPhysicalDeviceRayTracingPipelineFeaturesKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingPipelineFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR ) == + sizeof( VkPhysicalDeviceRayTracingPipelinePropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingPipelinePropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR ) == sizeof( VkStridedDeviceAddressRegionKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "StridedDeviceAddressRegionKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "TraceRaysIndirectCommandKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR ) == sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RayTracingPipelineInterfaceCreateInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_ray_query === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR ) == sizeof( VkPhysicalDeviceRayQueryFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayQueryFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_framebuffer_mixed_samples === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV ) == + sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCoverageModulationStateCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_NV_shader_sm_builtins === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PhysicalDeviceShaderSMBuiltinsPropertiesNV is not nothrow_move_constructible!" ); @@ -3520,6 +4001,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ValidationCacheEXT is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ValidationCacheEXT is not nothrow_move_constructible!" ); @@ -3651,6 +4133,8 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureNV is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "AccelerationStructureNV is not nothrow_move_constructible!" ); @@ -3758,14 +4242,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PipelineCompilerControlCreateInfoAMD is not nothrow_move_constructible!" ); -//=== VK_EXT_calibrated_timestamps === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "CalibratedTimestampInfoEXT is not nothrow_move_constructible!" ); - //=== VK_AMD_shader_core_properties === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), @@ -3775,72 +4251,46 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceShaderCorePropertiesAMD is not nothrow_move_constructible!" ); -#if defined( VK_ENABLE_BETA_EXTENSIONS ) -//=== VK_EXT_video_decode_h265 === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoEXT ) == sizeof( VkVideoDecodeH265ProfileInfoEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoDecodeH265ProfileInfoEXT is not nothrow_move_constructible!" ); +//=== VK_KHR_video_decode_h265 === -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesEXT ) == sizeof( VkVideoDecodeH265CapabilitiesEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR ) == sizeof( VkVideoDecodeH265ProfileInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoDecodeH265CapabilitiesEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265ProfileInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoEXT ) == - sizeof( VkVideoDecodeH265SessionParametersCreateInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR ) == sizeof( VkVideoDecodeH265CapabilitiesKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoDecodeH265SessionParametersCreateInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265CapabilitiesKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT ) == sizeof( VkVideoDecodeH265SessionParametersAddInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR ) == + sizeof( VkVideoDecodeH265SessionParametersCreateInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoDecodeH265SessionParametersAddInfoEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoEXT ) == sizeof( VkVideoDecodeH265PictureInfoEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoDecodeH265PictureInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265SessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoEXT ) == sizeof( VkVideoDecodeH265DpbSlotInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR ) == sizeof( VkVideoDecodeH265SessionParametersAddInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VideoDecodeH265DpbSlotInfoEXT is not nothrow_move_constructible!" ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -//=== VK_KHR_global_priority === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "DeviceQueueGlobalPriorityCreateInfoKHR is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265SessionParametersAddInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR ) == - sizeof( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR ) == sizeof( VkVideoDecodeH265PictureInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceGlobalPriorityQueryFeaturesKHR is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265PictureInfoKHR is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR ) == sizeof( VkQueueFamilyGlobalPriorityPropertiesKHR ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR ) == sizeof( VkVideoDecodeH265DpbSlotInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "QueueFamilyGlobalPriorityPropertiesKHR is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265DpbSlotInfoKHR is not nothrow_move_constructible!" ); //=== VK_AMD_memory_overallocation_behavior === @@ -3861,29 +4311,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceVertexAttributeDivisorPropertiesEXT is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VertexInputBindingDivisorDescriptionEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT ) == - sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PipelineVertexInputDivisorStateCreateInfoEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == - sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceVertexAttributeDivisorFeaturesEXT is not nothrow_move_constructible!" ); - #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_frame_token === @@ -3893,16 +4320,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceComputeShaderDerivativesFeaturesNV is not nothrow_move_constructible!" ); - //=== VK_NV_mesh_shader === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), @@ -3965,6 +4382,18 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "CheckpointDataNV is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV ) == sizeof( VkQueueFamilyCheckpointProperties2NV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyCheckpointProperties2NV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointData2NV ) == sizeof( VkCheckpointData2NV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CheckpointData2NV is not nothrow_move_constructible!" ); + //=== VK_INTEL_shader_integer_functions2 === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == @@ -4029,6 +4458,8 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceConfigurationINTEL is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PerformanceConfigurationINTEL is not nothrow_move_constructible!" ); @@ -4102,6 +4533,14 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "RenderPassFragmentDensityMapCreateInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT ) == + sizeof( VkRenderingFragmentDensityMapAttachmentInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingFragmentDensityMapAttachmentInfoEXT is not nothrow_move_constructible!" ); + //=== VK_KHR_fragment_shading_rate === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR ) == sizeof( VkFragmentShadingRateAttachmentInfoKHR ), @@ -4142,6 +4581,14 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceFragmentShadingRateKHR is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR ) == + sizeof( VkRenderingFragmentShadingRateAttachmentInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingFragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" ); + //=== VK_AMD_shader_core_properties2 === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), @@ -4170,6 +4617,15 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT is not nothrow_move_constructible!" ); +//=== VK_KHR_shader_quad_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderQuadControlFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderQuadControlFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderQuadControlFeaturesKHR is not nothrow_move_constructible!" ); + //=== VK_EXT_memory_budget === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), @@ -4367,31 +4823,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "HeadlessSurfaceCreateInfoEXT is not nothrow_move_constructible!" ); -//=== VK_EXT_line_rasterization === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceLineRasterizationFeaturesEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT ) == - sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceLineRasterizationPropertiesEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT ) == - sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PipelineRasterizationLineStateCreateInfoEXT is not nothrow_move_constructible!" ); - //=== VK_EXT_shader_atomic_float === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT ), @@ -4401,15 +4832,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceShaderAtomicFloatFeaturesEXT is not nothrow_move_constructible!" ); -//=== VK_EXT_index_type_uint8 === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceIndexTypeUint8FeaturesEXT is not nothrow_move_constructible!" ); - //=== VK_EXT_extended_dynamic_state === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT ) == @@ -4423,6 +4845,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DeferredOperationKHR is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DeferredOperationKHR is not nothrow_move_constructible!" ); @@ -4474,6 +4897,28 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PipelineExecutableInternalRepresentationKHR is not nothrow_move_constructible!" ); +//=== VK_EXT_map_memory_placed === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT ) == sizeof( VkPhysicalDeviceMapMemoryPlacedFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMapMemoryPlacedFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedPropertiesEXT ) == sizeof( VkPhysicalDeviceMapMemoryPlacedPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMapMemoryPlacedPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryMapPlacedInfoEXT ) == sizeof( VkMemoryMapPlacedInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryMapPlacedInfoEXT is not nothrow_move_constructible!" ); + //=== VK_EXT_shader_atomic_float2 === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT ) == @@ -4484,6 +4929,70 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceShaderAtomicFloat2FeaturesEXT is not nothrow_move_constructible!" ); +//=== VK_EXT_surface_maintenance1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT ) == sizeof( VkSurfacePresentModeEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfacePresentModeEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT ) == sizeof( VkSurfacePresentScalingCapabilitiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfacePresentScalingCapabilitiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT ) == sizeof( VkSurfacePresentModeCompatibilityEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfacePresentModeCompatibilityEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_swapchain_maintenance1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT ) == + sizeof( VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSwapchainMaintenance1FeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT ) == sizeof( VkSwapchainPresentFenceInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainPresentFenceInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT ) == sizeof( VkSwapchainPresentModesCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainPresentModesCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT ) == sizeof( VkSwapchainPresentModeInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainPresentModeInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT ) == sizeof( VkSwapchainPresentScalingCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainPresentScalingCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT ) == sizeof( VkReleaseSwapchainImagesInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ReleaseSwapchainImagesInfoEXT is not nothrow_move_constructible!" ); + //=== VK_NV_device_generated_commands === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) == @@ -4541,6 +5050,8 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutNV is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "IndirectCommandsLayoutNV is not nothrow_move_constructible!" ); @@ -4620,6 +5131,26 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "CommandBufferInheritanceRenderPassTransformInfoQCOM is not nothrow_move_constructible!" ); +//=== VK_EXT_depth_bias_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthBiasControlFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthBiasControlFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthBiasControlFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT ) == sizeof( VkDepthBiasInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DepthBiasInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DepthBiasRepresentationInfoEXT ) == sizeof( VkDepthBiasRepresentationInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DepthBiasRepresentationInfoEXT is not nothrow_move_constructible!" ); + //=== VK_EXT_device_memory_report === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT ) == @@ -4691,6 +5222,29 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PipelineLibraryCreateInfoKHR is not nothrow_move_constructible!" ); +//=== VK_NV_present_barrier === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV ) == sizeof( VkPhysicalDevicePresentBarrierFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePresentBarrierFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV ) == sizeof( VkSurfaceCapabilitiesPresentBarrierNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SurfaceCapabilitiesPresentBarrierNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV ) == sizeof( VkSwapchainPresentBarrierCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainPresentBarrierCreateInfoNV is not nothrow_move_constructible!" ); + //=== VK_KHR_present_id === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentIdKHR ) == sizeof( VkPresentIdKHR ), "struct and wrapper have different size!" ); @@ -4704,7 +5258,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDevicePresentIdFeaturesKHR is not nothrow_move_constructible!" ); -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_encode_queue === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR ) == sizeof( VkVideoEncodeInfoKHR ), "struct and wrapper have different size!" ); @@ -4718,6 +5271,13 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "VideoEncodeCapabilitiesKHR is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolVideoEncodeFeedbackCreateInfoKHR ) == sizeof( VkQueryPoolVideoEncodeFeedbackCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueryPoolVideoEncodeFeedbackCreateInfoKHR is not nothrow_move_constructible!" ); + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR ) == sizeof( VkVideoEncodeUsageInfoKHR ), "struct and wrapper have different size!" ); VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -4736,7 +5296,42 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "VideoEncodeRateControlLayerInfoKHR is not nothrow_move_constructible!" ); -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR ) == + sizeof( VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVideoEncodeQualityLevelInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR ) == sizeof( VkVideoEncodeQualityLevelPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeQualityLevelPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelInfoKHR ) == sizeof( VkVideoEncodeQualityLevelInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeQualityLevelInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR ) == sizeof( VkVideoEncodeSessionParametersGetInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeSessionParametersGetInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR ) == + sizeof( VkVideoEncodeSessionParametersFeedbackInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeSessionParametersFeedbackInfoKHR is not nothrow_move_constructible!" ); //=== VK_NV_device_diagnostics_config === @@ -4754,6 +5349,58 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "DeviceDiagnosticsConfigCreateInfoNV is not nothrow_move_constructible!" ); +#if defined( VK_ENABLE_BETA_EXTENSIONS ) +//=== VK_NV_cuda_kernel_launch === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaModuleNV ) == sizeof( VkCudaModuleNV ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "CudaModuleNV is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CudaModuleNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaFunctionNV ) == sizeof( VkCudaFunctionNV ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "CudaFunctionNV is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CudaFunctionNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV ) == sizeof( VkCudaModuleCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CudaModuleCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV ) == sizeof( VkCudaFunctionCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CudaFunctionCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaLaunchInfoNV ) == sizeof( VkCudaLaunchInfoNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CudaLaunchInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchFeaturesNV ) == sizeof( VkPhysicalDeviceCudaKernelLaunchFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCudaKernelLaunchFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchPropertiesNV ) == sizeof( VkPhysicalDeviceCudaKernelLaunchPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCudaKernelLaunchPropertiesNV is not nothrow_move_constructible!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +//=== VK_NV_low_latency === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV ) == sizeof( VkQueryLowLatencySupportNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueryLowLatencySupportNV is not nothrow_move_constructible!" ); + #if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_objects === @@ -4830,61 +5477,144 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "QueueFamilyCheckpointProperties2NV is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointData2NV ) == sizeof( VkCheckpointData2NV ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "CheckpointData2NV is not nothrow_move_constructible!" ); - -//=== VK_EXT_graphics_pipeline_library === +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorBufferPropertiesEXT is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT ) == - sizeof( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT ) == + sizeof( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT ) == - sizeof( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT ) == sizeof( VkPhysicalDeviceDescriptorBufferFeaturesEXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorBufferFeaturesEXT is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT ) == sizeof( VkGraphicsPipelineLibraryCreateInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT ) == sizeof( VkDescriptorAddressInfoEXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "GraphicsPipelineLibraryCreateInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorAddressInfoEXT is not nothrow_move_constructible!" ); -//=== VK_AMD_shader_early_and_late_fragment_tests === +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT ) == sizeof( VkDescriptorBufferBindingInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorBufferBindingInfoEXT is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD ) == - sizeof( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT ) == + sizeof( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorBufferBindingPushDescriptorBufferHandleEXT is not nothrow_move_constructible!" ); -//=== VK_KHR_fragment_shader_barycentric === +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorDataEXT ) == sizeof( VkDescriptorDataEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorDataEXT is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR ) == - sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT ) == sizeof( VkDescriptorGetInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DescriptorGetInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT ) == sizeof( VkBufferCaptureDescriptorDataInfoEXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT ) == sizeof( VkImageCaptureDescriptorDataInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT ) == sizeof( VkImageViewCaptureDescriptorDataInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT ) == sizeof( VkSamplerCaptureDescriptorDataInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT ) == sizeof( VkOpaqueCaptureDescriptorDataCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpaqueCaptureDescriptorDataCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT ) == + sizeof( VkAccelerationStructureCaptureDescriptorDataInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_graphics_pipeline_library === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT ) == + sizeof( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT ) == + sizeof( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT ) == sizeof( VkGraphicsPipelineLibraryCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GraphicsPipelineLibraryCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_AMD_shader_early_and_late_fragment_tests === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD ) == + sizeof( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD is not nothrow_move_constructible!" ); + +//=== VK_KHR_fragment_shader_barycentric === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR ) == + sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PhysicalDeviceFragmentShaderBarycentricFeaturesKHR is not nothrow_move_constructible!" ); @@ -5074,17 +5804,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "ImageCompressionControlEXT is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT ) == sizeof( VkSubresourceLayout2EXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "SubresourceLayout2EXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2EXT ) == sizeof( VkImageSubresource2EXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "ImageSubresource2EXT is not nothrow_move_constructible!" ); - VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT ) == sizeof( VkImageCompressionPropertiesEXT ), "struct and wrapper have different size!" ); VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -5110,82 +5829,61 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDevice4444FormatsFeaturesEXT is not nothrow_move_constructible!" ); -//=== VK_EXT_rgba10x6_formats === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT ) == sizeof( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceRGBA10X6FormatsFeaturesEXT is not nothrow_move_constructible!" ); - -#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) -//=== VK_EXT_directfb_surface === +//=== VK_EXT_device_fault === -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT ) == sizeof( VkDirectFBSurfaceCreateInfoEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT ) == sizeof( VkPhysicalDeviceFaultFeaturesEXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "DirectFBSurfaceCreateInfoEXT is not nothrow_move_constructible!" ); -#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFaultFeaturesEXT is not nothrow_move_constructible!" ); -//=== VK_KHR_ray_tracing_pipeline === +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT ) == sizeof( VkDeviceFaultCountsEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceFaultCountsEXT is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "RayTracingShaderGroupCreateInfoKHR is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT ) == sizeof( VkDeviceFaultInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceFaultInfoEXT is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT ) == sizeof( VkDeviceFaultAddressInfoEXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "RayTracingPipelineCreateInfoKHR is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceFaultAddressInfoEXT is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR ) == - sizeof( VkPhysicalDeviceRayTracingPipelineFeaturesKHR ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT ) == sizeof( VkDeviceFaultVendorInfoEXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceRayTracingPipelineFeaturesKHR is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceFaultVendorInfoEXT is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR ) == - sizeof( VkPhysicalDeviceRayTracingPipelinePropertiesKHR ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT ) == sizeof( VkDeviceFaultVendorBinaryHeaderVersionOneEXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceRayTracingPipelinePropertiesKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR ) == sizeof( VkStridedDeviceAddressRegionKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "StridedDeviceAddressRegionKHR is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceFaultVendorBinaryHeaderVersionOneEXT is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "TraceRaysIndirectCommandKHR is not nothrow_move_constructible!" ); +//=== VK_EXT_rgba10x6_formats === -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR ) == sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT ) == sizeof( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "RayTracingPipelineInterfaceCreateInfoKHR is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRGBA10X6FormatsFeaturesEXT is not nothrow_move_constructible!" ); -//=== VK_KHR_ray_query === +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) +//=== VK_EXT_directfb_surface === -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR ) == sizeof( VkPhysicalDeviceRayQueryFeaturesKHR ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT ) == sizeof( VkDirectFBSurfaceCreateInfoEXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceRayQueryFeaturesKHR is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DirectFBSurfaceCreateInfoEXT is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ //=== VK_EXT_vertex_input_dynamic_state === @@ -5218,6 +5916,23 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceDrmPropertiesEXT is not nothrow_move_constructible!" ); +//=== VK_EXT_device_address_binding_report === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT ) == + sizeof( VkPhysicalDeviceAddressBindingReportFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceAddressBindingReportFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT ) == sizeof( VkDeviceAddressBindingCallbackDataEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceAddressBindingCallbackDataEXT is not nothrow_move_constructible!" ); + //=== VK_EXT_depth_clip_control === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipControlFeaturesEXT ), @@ -5245,6 +5960,16 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT is not nothrow_move_constructible!" ); +//=== VK_EXT_present_mode_fifo_latest_ready === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT ) == + sizeof( VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT is not nothrow_move_constructible!" ); + #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === @@ -5292,6 +6017,8 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCollectionFUCHSIA is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "BufferCollectionFUCHSIA is not nothrow_move_constructible!" ); @@ -5424,6 +6151,20 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDevicePipelinePropertiesFeaturesEXT is not nothrow_move_constructible!" ); +//=== VK_EXT_frame_boundary === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFrameBoundaryFeaturesEXT ) == sizeof( VkPhysicalDeviceFrameBoundaryFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceFrameBoundaryFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FrameBoundaryEXT ) == sizeof( VkFrameBoundaryEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "FrameBoundaryEXT is not nothrow_move_constructible!" ); + //=== VK_EXT_multisampled_render_to_single_sampled === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT ) == @@ -5559,6 +6300,154 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceImage2DViewOf3DFeaturesEXT is not nothrow_move_constructible!" ); +//=== VK_EXT_shader_tile_image === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderTileImageFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderTileImageFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT ) == sizeof( VkPhysicalDeviceShaderTileImagePropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderTileImagePropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_opacity_micromap === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT ) == sizeof( VkMicromapBuildInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapBuildInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapUsageEXT ) == sizeof( VkMicromapUsageEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapUsageEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT ) == sizeof( VkMicromapCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapEXT ) == sizeof( VkMicromapEXT ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "MicromapEXT is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "MicromapEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT ) == sizeof( VkPhysicalDeviceOpacityMicromapFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceOpacityMicromapFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT ) == sizeof( VkPhysicalDeviceOpacityMicromapPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceOpacityMicromapPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT ) == sizeof( VkMicromapVersionInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapVersionInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT ) == sizeof( VkCopyMicromapToMemoryInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMicromapToMemoryInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT ) == sizeof( VkCopyMemoryToMicromapInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryToMicromapInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT ) == sizeof( VkCopyMicromapInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMicromapInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT ) == sizeof( VkMicromapBuildSizesInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapBuildSizesInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT ) == + sizeof( VkAccelerationStructureTrianglesOpacityMicromapEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureTrianglesOpacityMicromapEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapTriangleEXT ) == sizeof( VkMicromapTriangleEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MicromapTriangleEXT is not nothrow_move_constructible!" ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) +//=== VK_NV_displacement_micromap === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapFeaturesNV ) == + sizeof( VkPhysicalDeviceDisplacementMicromapFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDisplacementMicromapFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapPropertiesNV ) == + sizeof( VkPhysicalDeviceDisplacementMicromapPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDisplacementMicromapPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesDisplacementMicromapNV ) == + sizeof( VkAccelerationStructureTrianglesDisplacementMicromapNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureTrianglesDisplacementMicromapNV is not nothrow_move_constructible!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +//=== VK_HUAWEI_cluster_culling_shader === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI ) == + sizeof( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceClusterCullingShaderFeaturesHUAWEI is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI ) == + sizeof( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceClusterCullingShaderPropertiesHUAWEI is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI ) == + sizeof( VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI is not nothrow_move_constructible!" ); + //=== VK_EXT_border_color_swizzle === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT ) == @@ -5587,6 +6476,56 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT is not nothrow_move_constructible!" ); +//=== VK_ARM_shader_core_properties === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM ) == sizeof( VkPhysicalDeviceShaderCorePropertiesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderCorePropertiesARM is not nothrow_move_constructible!" ); + +//=== VK_ARM_scheduling_controls === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM ) == sizeof( VkDeviceQueueShaderCoreControlCreateInfoARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceQueueShaderCoreControlCreateInfoARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM ) == + sizeof( VkPhysicalDeviceSchedulingControlsFeaturesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSchedulingControlsFeaturesARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsPropertiesARM ) == + sizeof( VkPhysicalDeviceSchedulingControlsPropertiesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceSchedulingControlsPropertiesARM is not nothrow_move_constructible!" ); + +//=== VK_EXT_image_sliced_view_of_3d === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT ) == + sizeof( VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageSlicedViewOf3DFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewSlicedCreateInfoEXT ) == sizeof( VkImageViewSlicedCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageViewSlicedCreateInfoEXT is not nothrow_move_constructible!" ); + //=== VK_VALVE_descriptor_set_host_mapping === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE ) == @@ -5611,15 +6550,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "DescriptorSetLayoutHostMappingInfoVALVE is not nothrow_move_constructible!" ); -//=== VK_EXT_depth_clamp_zero_one === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClampZeroOneFeaturesEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceDepthClampZeroOneFeaturesEXT is not nothrow_move_constructible!" ); - //=== VK_EXT_non_seamless_cube_map === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT ) == @@ -5630,6 +6560,41 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceNonSeamlessCubeMapFeaturesEXT is not nothrow_move_constructible!" ); +//=== VK_ARM_render_pass_striped === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedFeaturesARM ) == sizeof( VkPhysicalDeviceRenderPassStripedFeaturesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRenderPassStripedFeaturesARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRenderPassStripedPropertiesARM ) == + sizeof( VkPhysicalDeviceRenderPassStripedPropertiesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRenderPassStripedPropertiesARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassStripeBeginInfoARM ) == sizeof( VkRenderPassStripeBeginInfoARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassStripeBeginInfoARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM ) == sizeof( VkRenderPassStripeInfoARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassStripeInfoARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassStripeSubmitInfoARM ) == sizeof( VkRenderPassStripeSubmitInfoARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderPassStripeSubmitInfoARM is not nothrow_move_constructible!" ); + //=== VK_QCOM_fragment_density_map_offset === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM ) == @@ -5655,31 +6620,150 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "SubpassFragmentDensityMapOffsetEndInfoQCOM is not nothrow_move_constructible!" ); -//=== VK_NV_linear_color_attachment === +//=== VK_NV_copy_memory_indirect === -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV ) == - sizeof( VkPhysicalDeviceLinearColorAttachmentFeaturesNV ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV ) == sizeof( VkCopyMemoryIndirectCommandNV ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryIndirectCommandNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV ) == sizeof( VkCopyMemoryToImageIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceLinearColorAttachmentFeaturesNV is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryToImageIndirectCommandNV is not nothrow_move_constructible!" ); -//=== VK_EXT_image_compression_control_swapchain === +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV ) == sizeof( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCopyMemoryIndirectFeaturesNV is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT ) == - sizeof( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV ) == + sizeof( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCopyMemoryIndirectPropertiesNV is not nothrow_move_constructible!" ); -//=== VK_QCOM_image_processing === +//=== VK_NV_memory_decompression === -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM ) == sizeof( VkImageViewSampleWeightCreateInfoQCOM ), +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV ) == sizeof( VkDecompressMemoryRegionNV ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DecompressMemoryRegionNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV ) == + sizeof( VkPhysicalDeviceMemoryDecompressionFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryDecompressionFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV ) == + sizeof( VkPhysicalDeviceMemoryDecompressionPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMemoryDecompressionPropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_device_generated_commands_compute === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV ) == sizeof( VkComputePipelineIndirectBufferInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ComputePipelineIndirectBufferInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV ) == sizeof( VkPipelineIndirectDeviceAddressInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineIndirectDeviceAddressInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV ) == sizeof( VkBindPipelineIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindPipelineIndirectCommandNV is not nothrow_move_constructible!" ); + +//=== VK_NV_ray_tracing_linear_swept_spheres === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV ) == + sizeof( VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryLinearSweptSpheresDataNV ) == + sizeof( VkAccelerationStructureGeometryLinearSweptSpheresDataNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometryLinearSweptSpheresDataNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometrySpheresDataNV ) == sizeof( VkAccelerationStructureGeometrySpheresDataNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureGeometrySpheresDataNV is not nothrow_move_constructible!" ); + +//=== VK_NV_linear_color_attachment === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV ) == + sizeof( VkPhysicalDeviceLinearColorAttachmentFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLinearColorAttachmentFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_KHR_shader_maximal_reconvergence === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR ) == + sizeof( VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_image_compression_control_swapchain === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT ) == + sizeof( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_QCOM_image_processing === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM ) == sizeof( VkImageViewSampleWeightCreateInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageViewSampleWeightCreateInfoQCOM is not nothrow_move_constructible!" ); @@ -5699,6 +6783,63 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceImageProcessingPropertiesQCOM is not nothrow_move_constructible!" ); +//=== VK_EXT_nested_command_buffer === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferFeaturesEXT ) == + sizeof( VkPhysicalDeviceNestedCommandBufferFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceNestedCommandBufferFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferPropertiesEXT ) == + sizeof( VkPhysicalDeviceNestedCommandBufferPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceNestedCommandBufferPropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_external_memory_acquire_unmodified === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryAcquireUnmodifiedEXT ) == sizeof( VkExternalMemoryAcquireUnmodifiedEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalMemoryAcquireUnmodifiedEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_extended_dynamic_state3 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT ) == + sizeof( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedDynamicState3FeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT ) == + sizeof( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedDynamicState3PropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT ) == sizeof( VkColorBlendEquationEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ColorBlendEquationEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT ) == sizeof( VkColorBlendAdvancedEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ColorBlendAdvancedEXT is not nothrow_move_constructible!" ); + //=== VK_EXT_subpass_merge_feedback === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT ) == @@ -5741,6 +6882,20 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "RenderPassSubpassFeedbackCreateInfoEXT is not nothrow_move_constructible!" ); +//=== VK_LUNARG_direct_driver_loading === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG ) == sizeof( VkDirectDriverLoadingInfoLUNARG ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DirectDriverLoadingInfoLUNARG is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG ) == sizeof( VkDirectDriverLoadingListLUNARG ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DirectDriverLoadingListLUNARG is not nothrow_move_constructible!" ); + //=== VK_EXT_shader_module_identifier === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT ) == @@ -5783,6 +6938,59 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT is not nothrow_move_constructible!" ); +//=== VK_NV_optical_flow === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV ) == sizeof( VkPhysicalDeviceOpticalFlowFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceOpticalFlowFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV ) == sizeof( VkPhysicalDeviceOpticalFlowPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceOpticalFlowPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV ) == sizeof( VkOpticalFlowImageFormatInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowImageFormatInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV ) == sizeof( VkOpticalFlowImageFormatPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowImageFormatPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV ) == sizeof( VkOpticalFlowSessionNV ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "OpticalFlowSessionNV is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowSessionNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV ) == sizeof( VkOpticalFlowSessionCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowSessionCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV ) == sizeof( VkOpticalFlowSessionCreatePrivateDataInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowSessionCreatePrivateDataInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV ) == sizeof( VkOpticalFlowExecuteInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OpticalFlowExecuteInfoNV is not nothrow_move_constructible!" ); + //=== VK_EXT_legacy_dithering === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT ) == sizeof( VkPhysicalDeviceLegacyDitheringFeaturesEXT ), @@ -5792,6 +7000,171 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceLegacyDitheringFeaturesEXT is not nothrow_move_constructible!" ); +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) +//=== VK_ANDROID_external_format_resolve === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolveFeaturesANDROID ) == + sizeof( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalFormatResolveFeaturesANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolvePropertiesANDROID ) == + sizeof( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalFormatResolvePropertiesANDROID is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatResolvePropertiesANDROID ) == + sizeof( VkAndroidHardwareBufferFormatResolvePropertiesANDROID ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AndroidHardwareBufferFormatResolvePropertiesANDROID is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +//=== VK_AMD_anti_lag === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAntiLagFeaturesAMD ) == sizeof( VkPhysicalDeviceAntiLagFeaturesAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceAntiLagFeaturesAMD is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AntiLagDataAMD ) == sizeof( VkAntiLagDataAMD ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AntiLagDataAMD is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD ) == sizeof( VkAntiLagPresentationInfoAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AntiLagPresentationInfoAMD is not nothrow_move_constructible!" ); + +//=== VK_KHR_ray_tracing_position_fetch === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR ) == + sizeof( VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingPositionFetchFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_shader_object === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderEXT ) == sizeof( VkShaderEXT ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "ShaderEXT is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ShaderEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderObjectFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderObjectFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT ) == sizeof( VkPhysicalDeviceShaderObjectPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderObjectPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT ) == sizeof( VkShaderCreateInfoEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ShaderCreateInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_pipeline_binary === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineBinaryFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineBinaryFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryPropertiesKHR ) == sizeof( VkPhysicalDevicePipelineBinaryPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineBinaryPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DevicePipelineBinaryInternalCacheControlKHR ) == + sizeof( VkDevicePipelineBinaryInternalCacheControlKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DevicePipelineBinaryInternalCacheControlKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR ) == sizeof( VkPipelineBinaryKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "PipelineBinaryKHR is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR ) == sizeof( VkPipelineBinaryKeyKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryKeyKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR ) == sizeof( VkPipelineBinaryDataKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryDataKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR ) == sizeof( VkPipelineBinaryKeysAndDataKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryKeysAndDataKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR ) == sizeof( VkPipelineBinaryCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryInfoKHR ) == sizeof( VkPipelineBinaryInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR ) == sizeof( VkReleaseCapturedPipelineDataInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ReleaseCapturedPipelineDataInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR ) == sizeof( VkPipelineBinaryDataInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryDataInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR ) == sizeof( VkPipelineCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR ) == sizeof( VkPipelineBinaryHandlesInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryHandlesInfoKHR is not nothrow_move_constructible!" ); + //=== VK_QCOM_tile_properties === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM ) == sizeof( VkPhysicalDeviceTilePropertiesFeaturesQCOM ), @@ -5821,6 +7194,82 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "AmigoProfilingSubmitInfoSEC is not nothrow_move_constructible!" ); +//=== VK_QCOM_multiview_per_view_viewports === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM ) == + sizeof( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM is not nothrow_move_constructible!" ); + +//=== VK_NV_ray_tracing_invocation_reorder === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV ) == + sizeof( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingInvocationReorderPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV ) == + sizeof( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingInvocationReorderFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_cooperative_vector === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeVectorPropertiesNV ) == + sizeof( VkPhysicalDeviceCooperativeVectorPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeVectorPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeVectorFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeVectorFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeVectorFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeVectorPropertiesNV ) == sizeof( VkCooperativeVectorPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CooperativeVectorPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConvertCooperativeVectorMatrixInfoNV ) == sizeof( VkConvertCooperativeVectorMatrixInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ConvertCooperativeVectorMatrixInfoNV is not nothrow_move_constructible!" ); + +//=== VK_NV_extended_sparse_address_space === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV ) == + sizeof( VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpacePropertiesNV ) == + sizeof( VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExtendedSparseAddressSpacePropertiesNV is not nothrow_move_constructible!" ); + //=== VK_EXT_mutable_descriptor_type === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT ) == @@ -5844,4 +7293,1242 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "MutableDescriptorTypeCreateInfoEXT is not nothrow_move_constructible!" ); +//=== VK_EXT_legacy_vertex_attributes === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesFeaturesEXT ) == + sizeof( VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLegacyVertexAttributesFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesPropertiesEXT ) == + sizeof( VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLegacyVertexAttributesPropertiesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_layer_settings === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LayerSettingsCreateInfoEXT ) == sizeof( VkLayerSettingsCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LayerSettingsCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LayerSettingEXT ) == sizeof( VkLayerSettingEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LayerSettingEXT is not nothrow_move_constructible!" ); + +//=== VK_ARM_shader_core_builtins === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM ) == + sizeof( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderCoreBuiltinsFeaturesARM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM ) == + sizeof( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderCoreBuiltinsPropertiesARM is not nothrow_move_constructible!" ); + +//=== VK_EXT_pipeline_library_group_handles === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT ) == + sizeof( VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_EXT_dynamic_rendering_unused_attachments === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT ) == + sizeof( VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_NV_low_latency2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV ) == sizeof( VkLatencySleepModeInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LatencySleepModeInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencySleepInfoNV ) == sizeof( VkLatencySleepInfoNV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LatencySleepInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV ) == sizeof( VkSetLatencyMarkerInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SetLatencyMarkerInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV ) == sizeof( VkGetLatencyMarkerInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GetLatencyMarkerInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV ) == sizeof( VkLatencyTimingsFrameReportNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LatencyTimingsFrameReportNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencySubmissionPresentIdNV ) == sizeof( VkLatencySubmissionPresentIdNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LatencySubmissionPresentIdNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainLatencyCreateInfoNV ) == sizeof( VkSwapchainLatencyCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SwapchainLatencyCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV ) == sizeof( VkOutOfBandQueueTypeInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "OutOfBandQueueTypeInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencySurfaceCapabilitiesNV ) == sizeof( VkLatencySurfaceCapabilitiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "LatencySurfaceCapabilitiesNV is not nothrow_move_constructible!" ); + +//=== VK_KHR_cooperative_matrix === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR ) == sizeof( VkCooperativeMatrixPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CooperativeMatrixPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesKHR ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrixFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesKHR ) == + sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrixPropertiesKHR is not nothrow_move_constructible!" ); + +//=== VK_QCOM_multiview_per_view_render_areas === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM ) == + sizeof( VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM ) == + sizeof( VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_KHR_compute_shader_derivatives === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesKHR ) == + sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceComputeShaderDerivativesFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesPropertiesKHR ) == + sizeof( VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceComputeShaderDerivativesPropertiesKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_decode_av1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeAV1ProfileInfoKHR ) == sizeof( VkVideoDecodeAV1ProfileInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeAV1ProfileInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeAV1CapabilitiesKHR ) == sizeof( VkVideoDecodeAV1CapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeAV1CapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeAV1SessionParametersCreateInfoKHR ) == + sizeof( VkVideoDecodeAV1SessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeAV1SessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeAV1PictureInfoKHR ) == sizeof( VkVideoDecodeAV1PictureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeAV1PictureInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeAV1DpbSlotInfoKHR ) == sizeof( VkVideoDecodeAV1DpbSlotInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeAV1DpbSlotInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_encode_av1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeAV1FeaturesKHR ) == sizeof( VkPhysicalDeviceVideoEncodeAV1FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVideoEncodeAV1FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilitiesKHR ) == sizeof( VkVideoEncodeAV1CapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1CapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QualityLevelPropertiesKHR ) == sizeof( VkVideoEncodeAV1QualityLevelPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1QualityLevelPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionCreateInfoKHR ) == sizeof( VkVideoEncodeAV1SessionCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1SessionCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionParametersCreateInfoKHR ) == + sizeof( VkVideoEncodeAV1SessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1SessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1PictureInfoKHR ) == sizeof( VkVideoEncodeAV1PictureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1PictureInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1DpbSlotInfoKHR ) == sizeof( VkVideoEncodeAV1DpbSlotInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1DpbSlotInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1ProfileInfoKHR ) == sizeof( VkVideoEncodeAV1ProfileInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1ProfileInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR ) == sizeof( VkVideoEncodeAV1QIndexKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1QIndexKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR ) == sizeof( VkVideoEncodeAV1FrameSizeKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1FrameSizeKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1GopRemainingFrameInfoKHR ) == sizeof( VkVideoEncodeAV1GopRemainingFrameInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1GopRemainingFrameInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlInfoKHR ) == sizeof( VkVideoEncodeAV1RateControlInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1RateControlInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlLayerInfoKHR ) == sizeof( VkVideoEncodeAV1RateControlLayerInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1RateControlLayerInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_maintenance1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance1FeaturesKHR ) == sizeof( VkPhysicalDeviceVideoMaintenance1FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVideoMaintenance1FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoInlineQueryInfoKHR ) == sizeof( VkVideoInlineQueryInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoInlineQueryInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_per_stage_descriptor_set === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerStageDescriptorSetFeaturesNV ) == + sizeof( VkPhysicalDevicePerStageDescriptorSetFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePerStageDescriptorSetFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_QCOM_image_processing2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2FeaturesQCOM ) == sizeof( VkPhysicalDeviceImageProcessing2FeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageProcessing2FeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2PropertiesQCOM ) == + sizeof( VkPhysicalDeviceImageProcessing2PropertiesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageProcessing2PropertiesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerBlockMatchWindowCreateInfoQCOM ) == sizeof( VkSamplerBlockMatchWindowCreateInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerBlockMatchWindowCreateInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_QCOM_filter_cubic_weights === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicWeightsFeaturesQCOM ) == sizeof( VkPhysicalDeviceCubicWeightsFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCubicWeightsFeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCubicWeightsCreateInfoQCOM ) == sizeof( VkSamplerCubicWeightsCreateInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerCubicWeightsCreateInfoQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BlitImageCubicWeightsInfoQCOM ) == sizeof( VkBlitImageCubicWeightsInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BlitImageCubicWeightsInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_QCOM_ycbcr_degamma === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrDegammaFeaturesQCOM ) == sizeof( VkPhysicalDeviceYcbcrDegammaFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceYcbcrDegammaFeaturesQCOM is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM ) == + sizeof( VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM is not nothrow_move_constructible!" ); + +//=== VK_QCOM_filter_cubic_clamp === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicClampFeaturesQCOM ) == sizeof( VkPhysicalDeviceCubicClampFeaturesQCOM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCubicClampFeaturesQCOM is not nothrow_move_constructible!" ); + +//=== VK_EXT_attachment_feedback_loop_dynamic_state === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT ) == + sizeof( VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) +//=== VK_QNX_external_memory_screen_buffer === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX ) == sizeof( VkScreenBufferPropertiesQNX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ScreenBufferPropertiesQNX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ScreenBufferFormatPropertiesQNX ) == sizeof( VkScreenBufferFormatPropertiesQNX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ScreenBufferFormatPropertiesQNX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportScreenBufferInfoQNX ) == sizeof( VkImportScreenBufferInfoQNX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportScreenBufferInfoQNX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFormatQNX ) == sizeof( VkExternalFormatQNX ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExternalFormatQNX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX ) == + sizeof( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +//=== VK_MSFT_layered_driver === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredDriverPropertiesMSFT ) == sizeof( VkPhysicalDeviceLayeredDriverPropertiesMSFT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLayeredDriverPropertiesMSFT is not nothrow_move_constructible!" ); + +//=== VK_KHR_calibrated_timestamps === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR ) == sizeof( VkCalibratedTimestampInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CalibratedTimestampInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_KHR_maintenance6 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT ) == sizeof( VkSetDescriptorBufferOffsetsInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SetDescriptorBufferOffsetsInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT ) == + sizeof( VkBindDescriptorBufferEmbeddedSamplersInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindDescriptorBufferEmbeddedSamplersInfoEXT is not nothrow_move_constructible!" ); + +//=== VK_NV_descriptor_pool_overallocation === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorPoolOverallocationFeaturesNV ) == + sizeof( VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDescriptorPoolOverallocationFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_display_stereo === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoCreateInfoNV ) == sizeof( VkDisplaySurfaceStereoCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplaySurfaceStereoCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeStereoPropertiesNV ) == sizeof( VkDisplayModeStereoPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModeStereoPropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_encode_quantization_map === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapCapabilitiesKHR ) == sizeof( VkVideoEncodeQuantizationMapCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeQuantizationMapCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoFormatQuantizationMapPropertiesKHR ) == sizeof( VkVideoFormatQuantizationMapPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoFormatQuantizationMapPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapInfoKHR ) == sizeof( VkVideoEncodeQuantizationMapInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeQuantizationMapInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapSessionParametersCreateInfoKHR ) == + sizeof( VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeQuantizationMapSessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR ) == + sizeof( VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264QuantizationMapCapabilitiesKHR ) == + sizeof( VkVideoEncodeH264QuantizationMapCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264QuantizationMapCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265QuantizationMapCapabilitiesKHR ) == + sizeof( VkVideoEncodeH265QuantizationMapCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265QuantizationMapCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoFormatH265QuantizationMapPropertiesKHR ) == + sizeof( VkVideoFormatH265QuantizationMapPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoFormatH265QuantizationMapPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QuantizationMapCapabilitiesKHR ) == + sizeof( VkVideoEncodeAV1QuantizationMapCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1QuantizationMapCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoFormatAV1QuantizationMapPropertiesKHR ) == sizeof( VkVideoFormatAV1QuantizationMapPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoFormatAV1QuantizationMapPropertiesKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_raw_access_chains === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRawAccessChainsFeaturesNV ) == sizeof( VkPhysicalDeviceRawAccessChainsFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRawAccessChainsFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_KHR_shader_relaxed_extended_instruction === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR ) == + sizeof( VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_command_buffer_inheritance === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCommandBufferInheritanceFeaturesNV ) == + sizeof( VkPhysicalDeviceCommandBufferInheritanceFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCommandBufferInheritanceFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_KHR_maintenance7 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7FeaturesKHR ) == sizeof( VkPhysicalDeviceMaintenance7FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance7FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7PropertiesKHR ) == sizeof( VkPhysicalDeviceMaintenance7PropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance7PropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesListKHR ) == sizeof( VkPhysicalDeviceLayeredApiPropertiesListKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLayeredApiPropertiesListKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR ) == sizeof( VkPhysicalDeviceLayeredApiPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLayeredApiPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiVulkanPropertiesKHR ) == + sizeof( VkPhysicalDeviceLayeredApiVulkanPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLayeredApiVulkanPropertiesKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_shader_atomic_float16_vector === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV ) == + sizeof( VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_shader_replicated_composites === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderReplicatedCompositesFeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderReplicatedCompositesFeaturesEXT is not nothrow_move_constructible!" ); + +//=== VK_NV_ray_tracing_validation === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingValidationFeaturesNV ) == + sizeof( VkPhysicalDeviceRayTracingValidationFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceRayTracingValidationFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_NV_cluster_acceleration_structure === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterAccelerationStructureFeaturesNV ) == + sizeof( VkPhysicalDeviceClusterAccelerationStructureFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceClusterAccelerationStructureFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterAccelerationStructurePropertiesNV ) == + sizeof( VkPhysicalDeviceClusterAccelerationStructurePropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceClusterAccelerationStructurePropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClustersBottomLevelInputNV ) == + sizeof( VkClusterAccelerationStructureClustersBottomLevelInputNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureClustersBottomLevelInputNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTriangleClusterInputNV ) == + sizeof( VkClusterAccelerationStructureTriangleClusterInputNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureTriangleClusterInputNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInputNV ) == + sizeof( VkClusterAccelerationStructureMoveObjectsInputNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureMoveObjectsInputNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpInputNV ) == sizeof( VkClusterAccelerationStructureOpInputNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureOpInputNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV ) == sizeof( VkClusterAccelerationStructureInputInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureInputInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureCommandsInfoNV ) == sizeof( VkClusterAccelerationStructureCommandsInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureCommandsInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV ) == sizeof( VkStridedDeviceAddressNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "StridedDeviceAddressNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV ) == + sizeof( VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInfoNV ) == + sizeof( VkClusterAccelerationStructureMoveObjectsInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureMoveObjectsInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildClustersBottomLevelInfoNV ) == + sizeof( VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureBuildClustersBottomLevelInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildTriangleClusterInfoNV ) == + sizeof( VkClusterAccelerationStructureBuildTriangleClusterInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureBuildTriangleClusterInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV ) == + sizeof( VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInstantiateClusterInfoNV ) == + sizeof( VkClusterAccelerationStructureInstantiateClusterInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ClusterAccelerationStructureInstantiateClusterInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineClusterAccelerationStructureCreateInfoNV ) == + sizeof( VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RayTracingPipelineClusterAccelerationStructureCreateInfoNV is not nothrow_move_constructible!" ); + +//=== VK_NV_partitioned_acceleration_structure === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePartitionedAccelerationStructureFeaturesNV ) == + sizeof( VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePartitionedAccelerationStructureFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePartitionedAccelerationStructurePropertiesNV ) == + sizeof( VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePartitionedAccelerationStructurePropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureFlagsNV ) == sizeof( VkPartitionedAccelerationStructureFlagsNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PartitionedAccelerationStructureFlagsNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureIndirectCommandNV ) == + sizeof( VkBuildPartitionedAccelerationStructureIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BuildPartitionedAccelerationStructureIndirectCommandNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureWriteInstanceDataNV ) == + sizeof( VkPartitionedAccelerationStructureWriteInstanceDataNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PartitionedAccelerationStructureWriteInstanceDataNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureUpdateInstanceDataNV ) == + sizeof( VkPartitionedAccelerationStructureUpdateInstanceDataNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PartitionedAccelerationStructureUpdateInstanceDataNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureWritePartitionTranslationDataNV ) == + sizeof( VkPartitionedAccelerationStructureWritePartitionTranslationDataNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PartitionedAccelerationStructureWritePartitionTranslationDataNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetPartitionedAccelerationStructureNV ) == + sizeof( VkWriteDescriptorSetPartitionedAccelerationStructureNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WriteDescriptorSetPartitionedAccelerationStructureNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV ) == + sizeof( VkPartitionedAccelerationStructureInstancesInputNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PartitionedAccelerationStructureInstancesInputNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BuildPartitionedAccelerationStructureInfoNV ) == + sizeof( VkBuildPartitionedAccelerationStructureInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BuildPartitionedAccelerationStructureInfoNV is not nothrow_move_constructible!" ); + +//=== VK_EXT_device_generated_commands === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT ) == sizeof( VkGeneratedCommandsMemoryRequirementsInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeneratedCommandsMemoryRequirementsInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT ) == sizeof( VkIndirectExecutionSetCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT ) == sizeof( VkIndirectExecutionSetInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT ) == sizeof( VkIndirectExecutionSetPipelineInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetPipelineInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT ) == sizeof( VkIndirectExecutionSetShaderInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetShaderInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT ) == sizeof( VkGeneratedCommandsInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeneratedCommandsInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT ) == sizeof( VkWriteIndirectExecutionSetPipelineEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WriteIndirectExecutionSetPipelineEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT ) == sizeof( VkIndirectCommandsLayoutCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT ) == sizeof( VkIndirectCommandsLayoutTokenEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutTokenEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndirectCountIndirectCommandEXT ) == sizeof( VkDrawIndirectCountIndirectCommandEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrawIndirectCountIndirectCommandEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT ) == sizeof( VkIndirectCommandsVertexBufferTokenEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsVertexBufferTokenEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandEXT ) == sizeof( VkBindVertexBufferIndirectCommandEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindVertexBufferIndirectCommandEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT ) == sizeof( VkIndirectCommandsIndexBufferTokenEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsIndexBufferTokenEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandEXT ) == sizeof( VkBindIndexBufferIndirectCommandEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindIndexBufferIndirectCommandEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT ) == sizeof( VkIndirectCommandsPushConstantTokenEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsPushConstantTokenEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT ) == sizeof( VkIndirectCommandsExecutionSetTokenEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsExecutionSetTokenEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT ) == sizeof( VkIndirectCommandsTokenDataEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsTokenDataEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT ) == sizeof( VkIndirectCommandsLayoutEXT ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "IndirectCommandsLayoutEXT is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT ) == sizeof( VkIndirectExecutionSetEXT ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "IndirectExecutionSetEXT is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT ) == sizeof( VkIndirectExecutionSetShaderLayoutInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetShaderLayoutInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsPipelineInfoEXT ) == sizeof( VkGeneratedCommandsPipelineInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeneratedCommandsPipelineInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsShaderInfoEXT ) == sizeof( VkGeneratedCommandsShaderInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeneratedCommandsShaderInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT ) == sizeof( VkWriteIndirectExecutionSetShaderEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WriteIndirectExecutionSetShaderEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_maintenance8 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance8FeaturesKHR ) == sizeof( VkPhysicalDeviceMaintenance8FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance8FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrierAccessFlags3KHR ) == sizeof( VkMemoryBarrierAccessFlags3KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryBarrierAccessFlags3KHR is not nothrow_move_constructible!" ); + +//=== VK_MESA_image_alignment_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlFeaturesMESA ) == + sizeof( VkPhysicalDeviceImageAlignmentControlFeaturesMESA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageAlignmentControlFeaturesMESA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlPropertiesMESA ) == + sizeof( VkPhysicalDeviceImageAlignmentControlPropertiesMESA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageAlignmentControlPropertiesMESA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageAlignmentControlCreateInfoMESA ) == sizeof( VkImageAlignmentControlCreateInfoMESA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageAlignmentControlCreateInfoMESA is not nothrow_move_constructible!" ); + +//=== VK_EXT_depth_clamp_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampControlFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClampControlFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthClampControlFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClampControlCreateInfoEXT ) == + sizeof( VkPipelineViewportDepthClampControlCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineViewportDepthClampControlCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DepthClampRangeEXT ) == sizeof( VkDepthClampRangeEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DepthClampRangeEXT is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_maintenance2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance2FeaturesKHR ) == sizeof( VkPhysicalDeviceVideoMaintenance2FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVideoMaintenance2FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264InlineSessionParametersInfoKHR ) == + sizeof( VkVideoDecodeH264InlineSessionParametersInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH264InlineSessionParametersInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265InlineSessionParametersInfoKHR ) == + sizeof( VkVideoDecodeH265InlineSessionParametersInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeH265InlineSessionParametersInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeAV1InlineSessionParametersInfoKHR ) == + sizeof( VkVideoDecodeAV1InlineSessionParametersInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoDecodeAV1InlineSessionParametersInfoKHR is not nothrow_move_constructible!" ); + +//=== VK_HUAWEI_hdr_vivid === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHdrVividFeaturesHUAWEI ) == sizeof( VkPhysicalDeviceHdrVividFeaturesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceHdrVividFeaturesHUAWEI is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HdrVividDynamicMetadataHUAWEI ) == sizeof( VkHdrVividDynamicMetadataHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HdrVividDynamicMetadataHUAWEI is not nothrow_move_constructible!" ); + +//=== VK_NV_cooperative_matrix2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV ) == + sizeof( VkCooperativeMatrixFlexibleDimensionsPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CooperativeMatrixFlexibleDimensionsPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2FeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrix2FeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrix2FeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2PropertiesNV ) == + sizeof( VkPhysicalDeviceCooperativeMatrix2PropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrix2PropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_ARM_pipeline_opacity_micromap === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineOpacityMicromapFeaturesARM ) == + sizeof( VkPhysicalDevicePipelineOpacityMicromapFeaturesARM ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineOpacityMicromapFeaturesARM is not nothrow_move_constructible!" ); + +#if defined( VK_USE_PLATFORM_METAL_EXT ) +//=== VK_EXT_external_memory_metal === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryMetalHandleInfoEXT ) == sizeof( VkImportMemoryMetalHandleInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImportMemoryMetalHandleInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryMetalHandlePropertiesEXT ) == sizeof( VkMemoryMetalHandlePropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryMetalHandlePropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetMetalHandleInfoEXT ) == sizeof( VkMemoryGetMetalHandleInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryGetMetalHandleInfoEXT is not nothrow_move_constructible!" ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +//=== VK_KHR_depth_clamp_zero_one === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesKHR ) == sizeof( VkPhysicalDeviceDepthClampZeroOneFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthClampZeroOneFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_EXT_vertex_attribute_robustness === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeRobustnessFeaturesEXT ) == + sizeof( VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexAttributeRobustnessFeaturesEXT is not nothrow_move_constructible!" ); + #endif diff --git a/src/libraries/vulkanheaders/vulkan_structs.hpp b/src/libraries/vulkanheaders/vulkan_structs.hpp index 76e9b5a4d..bd9c0e9e1 100644 --- a/src/libraries/vulkanheaders/vulkan_structs.hpp +++ b/src/libraries/vulkanheaders/vulkan_structs.hpp @@ -1,4 +1,4 @@ -// Copyright 2015-2022 The Khronos Group Inc. +// Copyright 2015-2025 The Khronos Group Inc. // // SPDX-License-Identifier: Apache-2.0 OR MIT // @@ -8,10 +8,14 @@ #ifndef VULKAN_STRUCTS_HPP #define VULKAN_STRUCTS_HPP +// include-what-you-use: make sure, vulkan.hpp is used by code-completers +// IWYU pragma: private; include "vulkan.hpp" + #include // strcmp namespace VULKAN_HPP_NAMESPACE { + //=============== //=== STRUCTS === //=============== @@ -20,24 +24,24 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkAabbPositionsKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AabbPositionsKHR( float minX_ = {}, float minY_ = {}, float minZ_ = {}, float maxX_ = {}, float maxY_ = {}, float maxZ_ = {} ) VULKAN_HPP_NOEXCEPT - : minX( minX_ ) - , minY( minY_ ) - , minZ( minZ_ ) - , maxX( maxX_ ) - , maxY( maxY_ ) - , maxZ( maxZ_ ) + : minX{ minX_ } + , minY{ minY_ } + , minZ{ minZ_ } + , maxX{ maxX_ } + , maxY{ maxY_ } + , maxZ{ maxZ_ } { } VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AabbPositionsKHR( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AabbPositionsKHR & operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -45,7 +49,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinX( float minX_ ) VULKAN_HPP_NOEXCEPT { minX = minX_; @@ -81,7 +85,7 @@ namespace VULKAN_HPP_NAMESPACE maxZ = maxZ_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAabbPositionsKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -131,19 +135,20 @@ namespace VULKAN_HPP_NAMESPACE float maxY = {}; float maxZ = {}; }; + using AabbPositionsNV = AabbPositionsKHR; union DeviceOrHostAddressConstKHR { using NativeType = VkDeviceOrHostAddressConstKHR; -#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {} VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {} -#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; @@ -155,7 +160,7 @@ namespace VULKAN_HPP_NAMESPACE hostAddress = hostAddress_; return *this; } -#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceOrHostAddressConstKHR const &() const { @@ -183,7 +188,7 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR( VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, @@ -193,14 +198,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , vertexFormat( vertexFormat_ ) - , vertexData( vertexData_ ) - , vertexStride( vertexStride_ ) - , maxVertex( maxVertex_ ) - , indexType( indexType_ ) - , indexData( indexData_ ) - , transformData( transformData_ ) + : pNext{ pNext_ } + , vertexFormat{ vertexFormat_ } + , vertexData{ vertexData_ } + , vertexStride{ vertexStride_ } + , maxVertex{ maxVertex_ } + , indexType{ indexType_ } + , indexData{ indexData_ } + , transformData{ transformData_ } { } @@ -211,9 +216,9 @@ namespace VULKAN_HPP_NAMESPACE : AccelerationStructureGeometryTrianglesDataKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureGeometryTrianglesDataKHR & operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureGeometryTrianglesDataKHR & operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -221,7 +226,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -235,14 +240,14 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & - setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT + setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT { vertexData = vertexData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & - setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT { vertexStride = vertexStride_; return *this; @@ -261,19 +266,19 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & - setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT + setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT { indexData = indexData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & - setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT + setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT { transformData = transformData_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureGeometryTrianglesDataKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -330,13 +335,13 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , data( data_ ) - , stride( stride_ ) + : pNext{ pNext_ } + , data{ data_ } + , stride{ stride_ } { } @@ -346,9 +351,9 @@ namespace VULKAN_HPP_NAMESPACE : AccelerationStructureGeometryAabbsDataKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureGeometryAabbsDataKHR & operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureGeometryAabbsDataKHR & operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -356,7 +361,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -364,7 +369,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & - setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT { data = data_; return *this; @@ -375,7 +380,7 @@ namespace VULKAN_HPP_NAMESPACE stride = stride_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureGeometryAabbsDataKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -422,13 +427,13 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , arrayOfPointers( arrayOfPointers_ ) - , data( data_ ) + : pNext{ pNext_ } + , arrayOfPointers{ arrayOfPointers_ } + , data{ data_ } { } @@ -439,9 +444,9 @@ namespace VULKAN_HPP_NAMESPACE : AccelerationStructureGeometryInstancesDataKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureGeometryInstancesDataKHR & operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureGeometryInstancesDataKHR & operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -449,7 +454,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -457,19 +462,19 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & - setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT + setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT { arrayOfPointers = arrayOfPointers_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & - setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT { data = data_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureGeometryInstancesDataKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -512,7 +517,7 @@ namespace VULKAN_HPP_NAMESPACE union AccelerationStructureGeometryDataKHR { using NativeType = VkAccelerationStructureGeometryDataKHR; -#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} ) : triangles( triangles_ ) @@ -525,30 +530,30 @@ namespace VULKAN_HPP_NAMESPACE : instances( instances_ ) { } -#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & - setTriangles( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT + setTriangles( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT { triangles = triangles_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & - setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT + setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT { aabbs = aabbs_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & - setInstances( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT + setInstances( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT { instances = instances_; return *this; } -#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureGeometryDataKHR const &() const { @@ -578,16 +583,16 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , geometryType( geometryType_ ) - , geometry( geometry_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , geometryType{ geometryType_ } + , geometry{ geometry_ } + , flags{ flags_ } { } @@ -597,9 +602,9 @@ namespace VULKAN_HPP_NAMESPACE : AccelerationStructureGeometryKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureGeometryKHR & operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -607,7 +612,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -621,7 +626,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & - setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT + setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT { geometry = geometry_; return *this; @@ -632,7 +637,7 @@ namespace VULKAN_HPP_NAMESPACE flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureGeometryKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -677,14 +682,14 @@ namespace VULKAN_HPP_NAMESPACE union DeviceOrHostAddressKHR { using NativeType = VkDeviceOrHostAddressKHR; -#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {} VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( void * hostAddress_ ) : hostAddress( hostAddress_ ) {} -#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; @@ -696,7 +701,7 @@ namespace VULKAN_HPP_NAMESPACE hostAddress = hostAddress_; return *this; } -#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceOrHostAddressKHR const &() const { @@ -724,7 +729,7 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, @@ -736,16 +741,16 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , type( type_ ) - , flags( flags_ ) - , mode( mode_ ) - , srcAccelerationStructure( srcAccelerationStructure_ ) - , dstAccelerationStructure( dstAccelerationStructure_ ) - , geometryCount( geometryCount_ ) - , pGeometries( pGeometries_ ) - , ppGeometries( ppGeometries_ ) - , scratchData( scratchData_ ) + : pNext{ pNext_ } + , type{ type_ } + , flags{ flags_ } + , mode{ mode_ } + , srcAccelerationStructure{ srcAccelerationStructure_ } + , dstAccelerationStructure{ dstAccelerationStructure_ } + , geometryCount{ geometryCount_ } + , pGeometries{ pGeometries_ } + , ppGeometries{ ppGeometries_ } + , scratchData{ scratchData_ } { } @@ -790,9 +795,9 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureBuildGeometryInfoKHR & operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureBuildGeometryInfoKHR & operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -800,7 +805,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -814,28 +819,28 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & - setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & - setMode( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + setMode( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & - setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT { srcAccelerationStructure = srcAccelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & - setDstAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + setDstAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT { dstAccelerationStructure = dstAccelerationStructure_; return *this; @@ -848,7 +853,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & - setPGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ ) VULKAN_HPP_NOEXCEPT + setPGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * pGeometries_ ) VULKAN_HPP_NOEXCEPT { pGeometries = pGeometries_; return *this; @@ -865,7 +870,7 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & - setPpGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT + setPpGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT { ppGeometries = ppGeometries_; return *this; @@ -883,12 +888,12 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & - setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT + setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT { scratchData = scratchData_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureBuildGeometryInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -947,15 +952,15 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkAccelerationStructureBuildRangeInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( uint32_t primitiveCount_ = {}, uint32_t primitiveOffset_ = {}, uint32_t firstVertex_ = {}, uint32_t transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT - : primitiveCount( primitiveCount_ ) - , primitiveOffset( primitiveOffset_ ) - , firstVertex( firstVertex_ ) - , transformOffset( transformOffset_ ) + : primitiveCount{ primitiveCount_ } + , primitiveOffset{ primitiveOffset_ } + , firstVertex{ firstVertex_ } + , transformOffset{ transformOffset_ } { } @@ -965,9 +970,9 @@ namespace VULKAN_HPP_NAMESPACE : AccelerationStructureBuildRangeInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureBuildRangeInfoKHR & operator=( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureBuildRangeInfoKHR & operator=( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -975,7 +980,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT { primitiveCount = primitiveCount_; @@ -999,7 +1004,7 @@ namespace VULKAN_HPP_NAMESPACE transformOffset = transformOffset_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureBuildRangeInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -1056,15 +1061,15 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildSizesInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , accelerationStructureSize( accelerationStructureSize_ ) - , updateScratchSize( updateScratchSize_ ) - , buildScratchSize( buildScratchSize_ ) + : pNext{ pNext_ } + , accelerationStructureSize{ accelerationStructureSize_ } + , updateScratchSize{ updateScratchSize_ } + , buildScratchSize{ buildScratchSize_ } { } @@ -1074,9 +1079,9 @@ namespace VULKAN_HPP_NAMESPACE : AccelerationStructureBuildSizesInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureBuildSizesInfoKHR & operator=( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureBuildSizesInfoKHR & operator=( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -1084,35 +1089,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & - setAccelerationStructureSize( VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ ) VULKAN_HPP_NOEXCEPT - { - accelerationStructureSize = accelerationStructureSize_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & - setUpdateScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ ) VULKAN_HPP_NOEXCEPT - { - updateScratchSize = updateScratchSize_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & - setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT - { - buildScratchSize = buildScratchSize_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkAccelerationStructureBuildSizesInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -1172,6 +1148,120 @@ namespace VULKAN_HPP_NAMESPACE using Type = AccelerationStructureBuildSizesInfoKHR; }; + struct AccelerationStructureCaptureDescriptorDataInfoEXT + { + using NativeType = VkAccelerationStructureCaptureDescriptorDataInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AccelerationStructureCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructureNV_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , accelerationStructure{ accelerationStructure_ } + , accelerationStructureNV{ accelerationStructureNV_ } + { + } + + VULKAN_HPP_CONSTEXPR + AccelerationStructureCaptureDescriptorDataInfoEXT( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureCaptureDescriptorDataInfoEXT( VkAccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureCaptureDescriptorDataInfoEXT & + operator=( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureCaptureDescriptorDataInfoEXT & operator=( VkAccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & + setAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructureNV_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureNV = accelerationStructureNV_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructure, accelerationStructureNV ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AccelerationStructureCaptureDescriptorDataInfoEXT const & ) const = default; +#else + bool operator==( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) && + ( accelerationStructureNV == rhs.accelerationStructureNV ); +# endif + } + + bool operator!=( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructureNV = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureCaptureDescriptorDataInfoEXT; + }; + struct AccelerationStructureCreateInfoKHR { using NativeType = VkAccelerationStructureCreateInfoKHR; @@ -1179,7 +1269,7 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, @@ -1188,13 +1278,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , createFlags( createFlags_ ) - , buffer( buffer_ ) - , offset( offset_ ) - , size( size_ ) - , type( type_ ) - , deviceAddress( deviceAddress_ ) + : pNext{ pNext_ } + , createFlags{ createFlags_ } + , buffer{ buffer_ } + , offset{ offset_ } + , size{ size_ } + , type{ type_ } + , deviceAddress{ deviceAddress_ } { } @@ -1204,9 +1294,9 @@ namespace VULKAN_HPP_NAMESPACE : AccelerationStructureCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureCreateInfoKHR & operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureCreateInfoKHR & operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -1214,7 +1304,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -1222,7 +1312,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & - setCreateFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ ) VULKAN_HPP_NOEXCEPT + setCreateFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ ) VULKAN_HPP_NOEXCEPT { createFlags = createFlags_; return *this; @@ -1257,7 +1347,7 @@ namespace VULKAN_HPP_NAMESPACE deviceAddress = deviceAddress_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -1331,7 +1421,7 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryTrianglesNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, uint32_t vertexCount_ = {}, @@ -1344,18 +1434,18 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , vertexData( vertexData_ ) - , vertexOffset( vertexOffset_ ) - , vertexCount( vertexCount_ ) - , vertexStride( vertexStride_ ) - , vertexFormat( vertexFormat_ ) - , indexData( indexData_ ) - , indexOffset( indexOffset_ ) - , indexCount( indexCount_ ) - , indexType( indexType_ ) - , transformData( transformData_ ) - , transformOffset( transformOffset_ ) + : pNext{ pNext_ } + , vertexData{ vertexData_ } + , vertexOffset{ vertexOffset_ } + , vertexCount{ vertexCount_ } + , vertexStride{ vertexStride_ } + , vertexFormat{ vertexFormat_ } + , indexData{ indexData_ } + , indexOffset{ indexOffset_ } + , indexCount{ indexCount_ } + , indexType{ indexType_ } + , transformData{ transformData_ } + , transformOffset{ transformOffset_ } { } @@ -1364,9 +1454,9 @@ namespace VULKAN_HPP_NAMESPACE GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryTrianglesNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ GeometryTrianglesNV & operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GeometryTrianglesNV & operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -1374,7 +1464,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -1446,7 +1536,7 @@ namespace VULKAN_HPP_NAMESPACE transformOffset = transformOffset_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGeometryTrianglesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -1544,26 +1634,26 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryAabbNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GeometryAABBNV( VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, uint32_t numAABBs_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , aabbData( aabbData_ ) - , numAABBs( numAABBs_ ) - , stride( stride_ ) - , offset( offset_ ) + : pNext{ pNext_ } + , aabbData{ aabbData_ } + , numAABBs{ numAABBs_ } + , stride{ stride_ } + , offset{ offset_ } { } VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryAABBNV( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GeometryAABBNV & operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -1571,7 +1661,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -1601,7 +1691,7 @@ namespace VULKAN_HPP_NAMESPACE offset = offset_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGeometryAABBNV const &() const VULKAN_HPP_NOEXCEPT { @@ -1668,20 +1758,20 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkGeometryDataNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GeometryDataNV( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {} ) VULKAN_HPP_NOEXCEPT - : triangles( triangles_ ) - , aabbs( aabbs_ ) + : triangles{ triangles_ } + , aabbs{ aabbs_ } { } VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryDataNV( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GeometryDataNV & operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -1689,7 +1779,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & triangles_ ) VULKAN_HPP_NOEXCEPT { triangles = triangles_; @@ -1701,7 +1791,7 @@ namespace VULKAN_HPP_NAMESPACE aabbs = aabbs_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGeometryDataNV const &() const VULKAN_HPP_NOEXCEPT { @@ -1755,24 +1845,24 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GeometryNV( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , geometryType( geometryType_ ) - , geometry( geometry_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , geometryType{ geometryType_ } + , geometry{ geometry_ } + , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryNV( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GeometryNV & operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -1780,7 +1870,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GeometryNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -1804,7 +1894,7 @@ namespace VULKAN_HPP_NAMESPACE flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGeometryNV const &() const VULKAN_HPP_NOEXCEPT { @@ -1871,19 +1961,19 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, uint32_t instanceCount_ = {}, uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , type( type_ ) - , flags( flags_ ) - , instanceCount( instanceCount_ ) - , geometryCount( geometryCount_ ) - , pGeometries( pGeometries_ ) + : pNext{ pNext_ } + , type{ type_ } + , flags{ flags_ } + , instanceCount{ instanceCount_ } + , geometryCount{ geometryCount_ } + , pGeometries{ pGeometries_ } { } @@ -1909,9 +1999,9 @@ namespace VULKAN_HPP_NAMESPACE { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureInfoNV & operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureInfoNV & operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -1919,7 +2009,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -1965,7 +2055,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT { @@ -2037,13 +2127,13 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , compactedSize( compactedSize_ ) - , info( info_ ) + : pNext{ pNext_ } + , compactedSize{ compactedSize_ } + , info{ info_ } { } @@ -2053,9 +2143,9 @@ namespace VULKAN_HPP_NAMESPACE : AccelerationStructureCreateInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureCreateInfoNV & operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -2063,7 +2153,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -2081,7 +2171,7 @@ namespace VULKAN_HPP_NAMESPACE info = info_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { @@ -2146,11 +2236,11 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , accelerationStructure( accelerationStructure_ ) + : pNext{ pNext_ } + , accelerationStructure{ accelerationStructure_ } { } @@ -2160,9 +2250,9 @@ namespace VULKAN_HPP_NAMESPACE : AccelerationStructureDeviceAddressInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureDeviceAddressInfoKHR & operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureDeviceAddressInfoKHR & operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -2170,7 +2260,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -2178,12 +2268,12 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & - setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureDeviceAddressInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -2237,6 +2327,213 @@ namespace VULKAN_HPP_NAMESPACE using Type = AccelerationStructureDeviceAddressInfoKHR; }; + struct AccelerationStructureGeometryLinearSweptSpheresDataNV + { + using NativeType = VkAccelerationStructureGeometryLinearSweptSpheresDataNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryLinearSweptSpheresDataNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV( + VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + VULKAN_HPP_NAMESPACE::Format radiusFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR radiusData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize radiusStride_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {}, + VULKAN_HPP_NAMESPACE::RayTracingLssIndexingModeNV indexingMode_ = VULKAN_HPP_NAMESPACE::RayTracingLssIndexingModeNV::eList, + VULKAN_HPP_NAMESPACE::RayTracingLssPrimitiveEndCapsModeNV endCapsMode_ = VULKAN_HPP_NAMESPACE::RayTracingLssPrimitiveEndCapsModeNV::eNone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexFormat{ vertexFormat_ } + , vertexData{ vertexData_ } + , vertexStride{ vertexStride_ } + , radiusFormat{ radiusFormat_ } + , radiusData{ radiusData_ } + , radiusStride{ radiusStride_ } + , indexType{ indexType_ } + , indexData{ indexData_ } + , indexStride{ indexStride_ } + , indexingMode{ indexingMode_ } + , endCapsMode{ endCapsMode_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureGeometryLinearSweptSpheresDataNV( AccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometryLinearSweptSpheresDataNV( VkAccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometryLinearSweptSpheresDataNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureGeometryLinearSweptSpheresDataNV & + operator=( AccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureGeometryLinearSweptSpheresDataNV & operator=( VkAccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexStride = vertexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setRadiusFormat( VULKAN_HPP_NAMESPACE::Format radiusFormat_ ) VULKAN_HPP_NOEXCEPT + { + radiusFormat = radiusFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setRadiusData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & radiusData_ ) VULKAN_HPP_NOEXCEPT + { + radiusData = radiusData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setRadiusStride( VULKAN_HPP_NAMESPACE::DeviceSize radiusStride_ ) VULKAN_HPP_NOEXCEPT + { + radiusStride = radiusStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT + { + indexData = indexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setIndexStride( VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT + { + indexStride = indexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setIndexingMode( VULKAN_HPP_NAMESPACE::RayTracingLssIndexingModeNV indexingMode_ ) VULKAN_HPP_NOEXCEPT + { + indexingMode = indexingMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & + setEndCapsMode( VULKAN_HPP_NAMESPACE::RayTracingLssPrimitiveEndCapsModeNV endCapsMode_ ) VULKAN_HPP_NOEXCEPT + { + endCapsMode = endCapsMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + vertexFormat, + vertexData, + vertexStride, + radiusFormat, + radiusData, + radiusStride, + indexType, + indexData, + indexStride, + indexingMode, + endCapsMode ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryLinearSweptSpheresDataNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + VULKAN_HPP_NAMESPACE::Format radiusFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR radiusData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize radiusStride = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indexStride = {}; + VULKAN_HPP_NAMESPACE::RayTracingLssIndexingModeNV indexingMode = VULKAN_HPP_NAMESPACE::RayTracingLssIndexingModeNV::eList; + VULKAN_HPP_NAMESPACE::RayTracingLssPrimitiveEndCapsModeNV endCapsMode = VULKAN_HPP_NAMESPACE::RayTracingLssPrimitiveEndCapsModeNV::eNone; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureGeometryLinearSweptSpheresDataNV; + }; + struct AccelerationStructureGeometryMotionTrianglesDataNV { using NativeType = VkAccelerationStructureGeometryMotionTrianglesDataNV; @@ -2244,11 +2541,11 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , vertexData( vertexData_ ) + : pNext{ pNext_ } + , vertexData{ vertexData_ } { } @@ -2259,10 +2556,10 @@ namespace VULKAN_HPP_NAMESPACE : AccelerationStructureGeometryMotionTrianglesDataNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureGeometryMotionTrianglesDataNV & operator=( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureGeometryMotionTrianglesDataNV & operator=( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -2270,7 +2567,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -2278,12 +2575,12 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & - setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT + setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT { vertexData = vertexData_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureGeometryMotionTrianglesDataNV const &() const VULKAN_HPP_NOEXCEPT { @@ -2319,19 +2616,183 @@ namespace VULKAN_HPP_NAMESPACE using Type = AccelerationStructureGeometryMotionTrianglesDataNV; }; + struct AccelerationStructureGeometrySpheresDataNV + { + using NativeType = VkAccelerationStructureGeometrySpheresDataNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometrySpheresDataNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV( VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + VULKAN_HPP_NAMESPACE::Format radiusFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR radiusData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize radiusStride_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexFormat{ vertexFormat_ } + , vertexData{ vertexData_ } + , vertexStride{ vertexStride_ } + , radiusFormat{ radiusFormat_ } + , radiusData{ radiusData_ } + , radiusStride{ radiusStride_ } + , indexType{ indexType_ } + , indexData{ indexData_ } + , indexStride{ indexStride_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV( AccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureGeometrySpheresDataNV( VkAccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureGeometrySpheresDataNV( *reinterpret_cast( &rhs ) ) + { + } + + AccelerationStructureGeometrySpheresDataNV & operator=( AccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureGeometrySpheresDataNV & operator=( VkAccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & + setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexStride = vertexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setRadiusFormat( VULKAN_HPP_NAMESPACE::Format radiusFormat_ ) VULKAN_HPP_NOEXCEPT + { + radiusFormat = radiusFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & + setRadiusData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & radiusData_ ) VULKAN_HPP_NOEXCEPT + { + radiusData = radiusData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setRadiusStride( VULKAN_HPP_NAMESPACE::DeviceSize radiusStride_ ) VULKAN_HPP_NOEXCEPT + { + radiusStride = radiusStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & + setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT + { + indexData = indexData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setIndexStride( VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT + { + indexStride = indexStride_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureGeometrySpheresDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureGeometrySpheresDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexFormat, vertexData, vertexStride, radiusFormat, radiusData, radiusStride, indexType, indexData, indexStride ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometrySpheresDataNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + VULKAN_HPP_NAMESPACE::Format radiusFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR radiusData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize radiusStride = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indexStride = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureGeometrySpheresDataNV; + }; + struct TransformMatrixKHR { using NativeType = VkTransformMatrixKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( std::array, 3> const & matrix_ = {} ) VULKAN_HPP_NOEXCEPT : matrix( matrix_ ) {} +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( std::array, 3> const & matrix_ = {} ) VULKAN_HPP_NOEXCEPT : matrix{ matrix_ } {} VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT : TransformMatrixKHR( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ TransformMatrixKHR & operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ TransformMatrixKHR & operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -2339,13 +2800,13 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR & setMatrix( std::array, 3> matrix_ ) VULKAN_HPP_NOEXCEPT { matrix = matrix_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkTransformMatrixKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -2390,25 +2851,26 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::ArrayWrapper2D matrix = {}; }; + using TransformMatrixNV = TransformMatrixKHR; struct AccelerationStructureInstanceKHR { using NativeType = VkAccelerationStructureInstanceKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT - : transform( transform_ ) - , instanceCustomIndex( instanceCustomIndex_ ) - , mask( mask_ ) - , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ) - , flags( flags_ ) - , accelerationStructureReference( accelerationStructureReference_ ) + : transform{ transform_ } + , instanceCustomIndex{ instanceCustomIndex_ } + , mask{ mask_ } + , instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ } + , flags{ flags_ } + , accelerationStructureReference{ accelerationStructureReference_ } { } @@ -2418,9 +2880,9 @@ namespace VULKAN_HPP_NAMESPACE : AccelerationStructureInstanceKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureInstanceKHR & operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -2428,7 +2890,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT { transform = transform_; @@ -2448,7 +2910,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & - setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT + setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT { instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; return *this; @@ -2465,7 +2927,7 @@ namespace VULKAN_HPP_NAMESPACE accelerationStructureReference = accelerationStructureReference_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureInstanceKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -2516,19 +2978,20 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {}; - uint32_t instanceCustomIndex : 24; - uint32_t mask : 8; + uint32_t instanceCustomIndex : 24; + uint32_t mask : 8; uint32_t instanceShaderBindingTableRecordOffset : 24; - VkGeometryInstanceFlagsKHR flags : 8; + VkGeometryInstanceFlagsKHR flags : 8; uint64_t accelerationStructureReference = {}; }; + using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR; struct AccelerationStructureMatrixMotionInstanceNV { using NativeType = VkAccelerationStructureMatrixMotionInstanceNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0_ = {}, VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1_ = {}, uint32_t instanceCustomIndex_ = {}, @@ -2536,13 +2999,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT - : transformT0( transformT0_ ) - , transformT1( transformT1_ ) - , instanceCustomIndex( instanceCustomIndex_ ) - , mask( mask_ ) - , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ) - , flags( flags_ ) - , accelerationStructureReference( accelerationStructureReference_ ) + : transformT0{ transformT0_ } + , transformT1{ transformT1_ } + , instanceCustomIndex{ instanceCustomIndex_ } + , mask{ mask_ } + , instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ } + , flags{ flags_ } + , accelerationStructureReference{ accelerationStructureReference_ } { } @@ -2553,9 +3016,9 @@ namespace VULKAN_HPP_NAMESPACE : AccelerationStructureMatrixMotionInstanceNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureMatrixMotionInstanceNV & operator=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureMatrixMotionInstanceNV & operator=( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -2563,16 +3026,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & - setTransformT0( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT0_ ) VULKAN_HPP_NOEXCEPT + setTransformT0( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT0_ ) VULKAN_HPP_NOEXCEPT { transformT0 = transformT0_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & - setTransformT1( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT1_ ) VULKAN_HPP_NOEXCEPT + setTransformT1( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transformT1_ ) VULKAN_HPP_NOEXCEPT { transformT1 = transformT1_; return *this; @@ -2591,7 +3054,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & - setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT + setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT { instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; return *this; @@ -2604,12 +3067,12 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & - setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT + setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT { accelerationStructureReference = accelerationStructureReference_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureMatrixMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT { @@ -2662,10 +3125,10 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT0 = {}; VULKAN_HPP_NAMESPACE::TransformMatrixKHR transformT1 = {}; - uint32_t instanceCustomIndex : 24; - uint32_t mask : 8; + uint32_t instanceCustomIndex : 24; + uint32_t mask : 8; uint32_t instanceShaderBindingTableRecordOffset : 24; - VkGeometryInstanceFlagsKHR flags : 8; + VkGeometryInstanceFlagsKHR flags : 8; uint64_t accelerationStructureReference = {}; }; @@ -2676,14 +3139,14 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject, VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , type( type_ ) - , accelerationStructure( accelerationStructure_ ) + : pNext{ pNext_ } + , type{ type_ } + , accelerationStructure{ accelerationStructure_ } { } @@ -2694,9 +3157,9 @@ namespace VULKAN_HPP_NAMESPACE : AccelerationStructureMemoryRequirementsInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureMemoryRequirementsInfoNV & operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureMemoryRequirementsInfoNV & operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -2704,7 +3167,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -2712,19 +3175,19 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & - setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & - setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT { @@ -2789,13 +3252,13 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMotionInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV( uint32_t maxInstances_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxInstances( maxInstances_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , maxInstances{ maxInstances_ } + , flags{ flags_ } { } @@ -2805,9 +3268,9 @@ namespace VULKAN_HPP_NAMESPACE : AccelerationStructureMotionInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureMotionInfoNV & operator=( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureMotionInfoNV & operator=( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -2815,7 +3278,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -2829,12 +3292,12 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & - setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureMotionInfoNV const &() const VULKAN_HPP_NOEXCEPT { @@ -2896,7 +3359,7 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkSRTDataNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SRTDataNV( float sx_ = {}, float a_ = {}, float b_ = {}, @@ -2913,31 +3376,31 @@ namespace VULKAN_HPP_NAMESPACE float tx_ = {}, float ty_ = {}, float tz_ = {} ) VULKAN_HPP_NOEXCEPT - : sx( sx_ ) - , a( a_ ) - , b( b_ ) - , pvx( pvx_ ) - , sy( sy_ ) - , c( c_ ) - , pvy( pvy_ ) - , sz( sz_ ) - , pvz( pvz_ ) - , qx( qx_ ) - , qy( qy_ ) - , qz( qz_ ) - , qw( qw_ ) - , tx( tx_ ) - , ty( ty_ ) - , tz( tz_ ) + : sx{ sx_ } + , a{ a_ } + , b{ b_ } + , pvx{ pvx_ } + , sy{ sy_ } + , c{ c_ } + , pvy{ pvy_ } + , sz{ sz_ } + , pvz{ pvz_ } + , qx{ qx_ } + , qy{ qy_ } + , qz{ qz_ } + , qw{ qw_ } + , tx{ tx_ } + , ty{ ty_ } + , tz{ tz_ } { } VULKAN_HPP_CONSTEXPR SRTDataNV( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; SRTDataNV( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : SRTDataNV( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SRTDataNV & operator=( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SRTDataNV & operator=( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -2945,7 +3408,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSx( float sx_ ) VULKAN_HPP_NOEXCEPT { sx = sx_; @@ -3041,7 +3504,7 @@ namespace VULKAN_HPP_NAMESPACE tz = tz_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSRTDataNV const &() const VULKAN_HPP_NOEXCEPT { @@ -3123,7 +3586,7 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkAccelerationStructureSRTMotionInstanceNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV( VULKAN_HPP_NAMESPACE::SRTDataNV transformT0_ = {}, VULKAN_HPP_NAMESPACE::SRTDataNV transformT1_ = {}, uint32_t instanceCustomIndex_ = {}, @@ -3131,13 +3594,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT - : transformT0( transformT0_ ) - , transformT1( transformT1_ ) - , instanceCustomIndex( instanceCustomIndex_ ) - , mask( mask_ ) - , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ) - , flags( flags_ ) - , accelerationStructureReference( accelerationStructureReference_ ) + : transformT0{ transformT0_ } + , transformT1{ transformT1_ } + , instanceCustomIndex{ instanceCustomIndex_ } + , mask{ mask_ } + , instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ } + , flags{ flags_ } + , accelerationStructureReference{ accelerationStructureReference_ } { } @@ -3147,9 +3610,9 @@ namespace VULKAN_HPP_NAMESPACE : AccelerationStructureSRTMotionInstanceNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureSRTMotionInstanceNV & operator=( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureSRTMotionInstanceNV & operator=( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -3157,16 +3620,16 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & - setTransformT0( VULKAN_HPP_NAMESPACE::SRTDataNV const & transformT0_ ) VULKAN_HPP_NOEXCEPT + setTransformT0( VULKAN_HPP_NAMESPACE::SRTDataNV const & transformT0_ ) VULKAN_HPP_NOEXCEPT { transformT0 = transformT0_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & - setTransformT1( VULKAN_HPP_NAMESPACE::SRTDataNV const & transformT1_ ) VULKAN_HPP_NOEXCEPT + setTransformT1( VULKAN_HPP_NAMESPACE::SRTDataNV const & transformT1_ ) VULKAN_HPP_NOEXCEPT { transformT1 = transformT1_; return *this; @@ -3185,7 +3648,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & - setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT + setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT { instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; return *this; @@ -3198,12 +3661,12 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & - setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT + setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT { accelerationStructureReference = accelerationStructureReference_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureSRTMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT { @@ -3256,17 +3719,17 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::SRTDataNV transformT0 = {}; VULKAN_HPP_NAMESPACE::SRTDataNV transformT1 = {}; - uint32_t instanceCustomIndex : 24; - uint32_t mask : 8; + uint32_t instanceCustomIndex : 24; + uint32_t mask : 8; uint32_t instanceShaderBindingTableRecordOffset : 24; - VkGeometryInstanceFlagsKHR flags : 8; + VkGeometryInstanceFlagsKHR flags : 8; uint64_t accelerationStructureReference = {}; }; union AccelerationStructureMotionInstanceDataNV { using NativeType = VkAccelerationStructureMotionInstanceDataNV; -#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR staticInstance_ = {} ) : staticInstance( staticInstance_ ) @@ -3282,11 +3745,11 @@ namespace VULKAN_HPP_NAMESPACE : srtMotionInstance( srtMotionInstance_ ) { } -#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & - setStaticInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const & staticInstance_ ) VULKAN_HPP_NOEXCEPT + setStaticInstance( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const & staticInstance_ ) VULKAN_HPP_NOEXCEPT { staticInstance = staticInstance_; return *this; @@ -3305,7 +3768,7 @@ namespace VULKAN_HPP_NAMESPACE srtMotionInstance = srtMotionInstance_; return *this; } -#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureMotionInstanceDataNV const &() const { @@ -3332,14 +3795,14 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkAccelerationStructureMotionInstanceNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , flags( flags_ ) - , data( data_ ) + : type{ type_ } + , flags{ flags_ } + , data{ data_ } { } @@ -3349,9 +3812,9 @@ namespace VULKAN_HPP_NAMESPACE : AccelerationStructureMotionInstanceNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureMotionInstanceNV & operator=( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureMotionInstanceNV & operator=( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -3359,28 +3822,28 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & - setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ ) VULKAN_HPP_NOEXCEPT + setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ ) VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & - setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + setFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & - setData( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV const & data_ ) VULKAN_HPP_NOEXCEPT + setData( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV const & data_ ) VULKAN_HPP_NOEXCEPT { data = data_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT { @@ -3412,6 +3875,643 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data = {}; }; + struct MicromapUsageEXT + { + using NativeType = VkMicromapUsageEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MicromapUsageEXT( uint32_t count_ = {}, uint32_t subdivisionLevel_ = {}, uint32_t format_ = {} ) VULKAN_HPP_NOEXCEPT + : count{ count_ } + , subdivisionLevel{ subdivisionLevel_ } + , format{ format_ } + { + } + + VULKAN_HPP_CONSTEXPR MicromapUsageEXT( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapUsageEXT( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapUsageEXT( *reinterpret_cast( &rhs ) ) {} + + MicromapUsageEXT & operator=( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MicromapUsageEXT & operator=( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setCount( uint32_t count_ ) VULKAN_HPP_NOEXCEPT + { + count = count_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setSubdivisionLevel( uint32_t subdivisionLevel_ ) VULKAN_HPP_NOEXCEPT + { + subdivisionLevel = subdivisionLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setFormat( uint32_t format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMicromapUsageEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapUsageEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( count, subdivisionLevel, format ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MicromapUsageEXT const & ) const = default; +#else + bool operator==( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( count == rhs.count ) && ( subdivisionLevel == rhs.subdivisionLevel ) && ( format == rhs.format ); +# endif + } + + bool operator!=( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t count = {}; + uint32_t subdivisionLevel = {}; + uint32_t format = {}; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct AccelerationStructureTrianglesDisplacementMicromapNV + { + using NativeType = VkAccelerationStructureTrianglesDisplacementMicromapNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureTrianglesDisplacementMicromapNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV( + VULKAN_HPP_NAMESPACE::Format displacementBiasAndScaleFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format displacementVectorFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize displacementBiasAndScaleStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementVectorBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize displacementVectorStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize displacedMicromapPrimitiveFlagsStride_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {}, + uint32_t baseTriangle_ = {}, + uint32_t usageCountsCount_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , displacementBiasAndScaleFormat{ displacementBiasAndScaleFormat_ } + , displacementVectorFormat{ displacementVectorFormat_ } + , displacementBiasAndScaleBuffer{ displacementBiasAndScaleBuffer_ } + , displacementBiasAndScaleStride{ displacementBiasAndScaleStride_ } + , displacementVectorBuffer{ displacementVectorBuffer_ } + , displacementVectorStride{ displacementVectorStride_ } + , displacedMicromapPrimitiveFlags{ displacedMicromapPrimitiveFlags_ } + , displacedMicromapPrimitiveFlagsStride{ displacedMicromapPrimitiveFlagsStride_ } + , indexType{ indexType_ } + , indexBuffer{ indexBuffer_ } + , indexStride{ indexStride_ } + , baseTriangle{ baseTriangle_ } + , usageCountsCount{ usageCountsCount_ } + , pUsageCounts{ pUsageCounts_ } + , ppUsageCounts{ ppUsageCounts_ } + , micromap{ micromap_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureTrianglesDisplacementMicromapNV( AccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureTrianglesDisplacementMicromapNV( VkAccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureTrianglesDisplacementMicromapNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesDisplacementMicromapNV( + VULKAN_HPP_NAMESPACE::Format displacementBiasAndScaleFormat_, + VULKAN_HPP_NAMESPACE::Format displacementVectorFormat_, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize displacementBiasAndScaleStride_, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementVectorBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize displacementVectorStride_, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags_, + VULKAN_HPP_NAMESPACE::DeviceSize displacedMicromapPrimitiveFlagsStride_, + VULKAN_HPP_NAMESPACE::IndexType indexType_, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize indexStride_, + uint32_t baseTriangle_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ) + , displacementBiasAndScaleFormat( displacementBiasAndScaleFormat_ ) + , displacementVectorFormat( displacementVectorFormat_ ) + , displacementBiasAndScaleBuffer( displacementBiasAndScaleBuffer_ ) + , displacementBiasAndScaleStride( displacementBiasAndScaleStride_ ) + , displacementVectorBuffer( displacementVectorBuffer_ ) + , displacementVectorStride( displacementVectorStride_ ) + , displacedMicromapPrimitiveFlags( displacedMicromapPrimitiveFlags_ ) + , displacedMicromapPrimitiveFlagsStride( displacedMicromapPrimitiveFlagsStride_ ) + , indexType( indexType_ ) + , indexBuffer( indexBuffer_ ) + , indexStride( indexStride_ ) + , baseTriangle( baseTriangle_ ) + , usageCountsCount( static_cast( usageCounts_.size() ) ) + , pUsageCounts( usageCounts_.data() ) + , ppUsageCounts( pUsageCounts_.data() ) + , micromap( micromap_ ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( usageCounts_.empty() || pUsageCounts_.empty() || ( usageCounts_.size() == pUsageCounts_.size() ) ); +# else + if ( !usageCounts_.empty() && !pUsageCounts_.empty() && ( usageCounts_.size() != pUsageCounts_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::AccelerationStructureTrianglesDisplacementMicromapNV::AccelerationStructureTrianglesDisplacementMicromapNV: !usageCounts_.empty() && !pUsageCounts_.empty() && ( usageCounts_.size() != pUsageCounts_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + AccelerationStructureTrianglesDisplacementMicromapNV & + operator=( AccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureTrianglesDisplacementMicromapNV & operator=( VkAccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementBiasAndScaleFormat( VULKAN_HPP_NAMESPACE::Format displacementBiasAndScaleFormat_ ) VULKAN_HPP_NOEXCEPT + { + displacementBiasAndScaleFormat = displacementBiasAndScaleFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementVectorFormat( VULKAN_HPP_NAMESPACE::Format displacementVectorFormat_ ) VULKAN_HPP_NOEXCEPT + { + displacementVectorFormat = displacementVectorFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementBiasAndScaleBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & displacementBiasAndScaleBuffer_ ) VULKAN_HPP_NOEXCEPT + { + displacementBiasAndScaleBuffer = displacementBiasAndScaleBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementBiasAndScaleStride( VULKAN_HPP_NAMESPACE::DeviceSize displacementBiasAndScaleStride_ ) VULKAN_HPP_NOEXCEPT + { + displacementBiasAndScaleStride = displacementBiasAndScaleStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementVectorBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & displacementVectorBuffer_ ) VULKAN_HPP_NOEXCEPT + { + displacementVectorBuffer = displacementVectorBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacementVectorStride( VULKAN_HPP_NAMESPACE::DeviceSize displacementVectorStride_ ) VULKAN_HPP_NOEXCEPT + { + displacementVectorStride = displacementVectorStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacedMicromapPrimitiveFlags( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & displacedMicromapPrimitiveFlags_ ) VULKAN_HPP_NOEXCEPT + { + displacedMicromapPrimitiveFlags = displacedMicromapPrimitiveFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setDisplacedMicromapPrimitiveFlagsStride( VULKAN_HPP_NAMESPACE::DeviceSize displacedMicromapPrimitiveFlagsStride_ ) VULKAN_HPP_NOEXCEPT + { + displacedMicromapPrimitiveFlagsStride = displacedMicromapPrimitiveFlagsStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + indexBuffer = indexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setIndexStride( VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT + { + indexStride = indexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setBaseTriangle( uint32_t baseTriangle_ ) VULKAN_HPP_NOEXCEPT + { + baseTriangle = baseTriangle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = usageCountsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + pUsageCounts = pUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesDisplacementMicromapNV & + setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( usageCounts_.size() ); + pUsageCounts = usageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + ppUsageCounts = ppUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesDisplacementMicromapNV & setPUsageCounts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( pUsageCounts_.size() ); + ppUsageCounts = pUsageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & + setMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ ) VULKAN_HPP_NOEXCEPT + { + micromap = micromap_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureTrianglesDisplacementMicromapNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureTrianglesDisplacementMicromapNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + displacementBiasAndScaleFormat, + displacementVectorFormat, + displacementBiasAndScaleBuffer, + displacementBiasAndScaleStride, + displacementVectorBuffer, + displacementVectorStride, + displacedMicromapPrimitiveFlags, + displacedMicromapPrimitiveFlagsStride, + indexType, + indexBuffer, + indexStride, + baseTriangle, + usageCountsCount, + pUsageCounts, + ppUsageCounts, + micromap ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureTrianglesDisplacementMicromapNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format displacementBiasAndScaleFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format displacementVectorFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize displacementBiasAndScaleStride = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacementVectorBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize displacementVectorStride = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize displacedMicromapPrimitiveFlagsStride = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indexStride = {}; + uint32_t baseTriangle = {}; + uint32_t usageCountsCount = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT micromap = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureTrianglesDisplacementMicromapNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct AccelerationStructureTrianglesOpacityMicromapEXT + { + using NativeType = VkAccelerationStructureTrianglesOpacityMicromapEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureTrianglesOpacityMicromapEXT( VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ = {}, + uint32_t baseTriangle_ = {}, + uint32_t usageCountsCount_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , indexType{ indexType_ } + , indexBuffer{ indexBuffer_ } + , indexStride{ indexStride_ } + , baseTriangle{ baseTriangle_ } + , usageCountsCount{ usageCountsCount_ } + , pUsageCounts{ pUsageCounts_ } + , ppUsageCounts{ ppUsageCounts_ } + , micromap{ micromap_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + AccelerationStructureTrianglesOpacityMicromapEXT( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AccelerationStructureTrianglesOpacityMicromapEXT( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : AccelerationStructureTrianglesOpacityMicromapEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesOpacityMicromapEXT( + VULKAN_HPP_NAMESPACE::IndexType indexType_, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize indexStride_, + uint32_t baseTriangle_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ) + , indexType( indexType_ ) + , indexBuffer( indexBuffer_ ) + , indexStride( indexStride_ ) + , baseTriangle( baseTriangle_ ) + , usageCountsCount( static_cast( !usageCounts_.empty() ? usageCounts_.size() : pUsageCounts_.size() ) ) + , pUsageCounts( usageCounts_.data() ) + , ppUsageCounts( pUsageCounts_.data() ) + , micromap( micromap_ ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1 ); +# else + if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::AccelerationStructureTrianglesOpacityMicromapEXT::AccelerationStructureTrianglesOpacityMicromapEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + AccelerationStructureTrianglesOpacityMicromapEXT & operator=( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AccelerationStructureTrianglesOpacityMicromapEXT & operator=( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & + setIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + indexBuffer = indexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & + setIndexStride( VULKAN_HPP_NAMESPACE::DeviceSize indexStride_ ) VULKAN_HPP_NOEXCEPT + { + indexStride = indexStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setBaseTriangle( uint32_t baseTriangle_ ) VULKAN_HPP_NOEXCEPT + { + baseTriangle = baseTriangle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = usageCountsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & + setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + pUsageCounts = pUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesOpacityMicromapEXT & + setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( usageCounts_.size() ); + pUsageCounts = usageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & + setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + ppUsageCounts = ppUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + AccelerationStructureTrianglesOpacityMicromapEXT & setPUsageCounts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( pUsageCounts_.size() ); + ppUsageCounts = pUsageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ ) VULKAN_HPP_NOEXCEPT + { + micromap = micromap_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAccelerationStructureTrianglesOpacityMicromapEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureTrianglesOpacityMicromapEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, indexType, indexBuffer, indexStride, baseTriangle, usageCountsCount, pUsageCounts, ppUsageCounts, micromap ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indexStride = {}; + uint32_t baseTriangle = {}; + uint32_t usageCountsCount = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT micromap = {}; + }; + + template <> + struct CppType + { + using Type = AccelerationStructureTrianglesOpacityMicromapEXT; + }; + struct AccelerationStructureVersionInfoKHR { using NativeType = VkAccelerationStructureVersionInfoKHR; @@ -3419,10 +4519,10 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureVersionInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pVersionData( pVersionData_ ) + : pNext{ pNext_ } + , pVersionData{ pVersionData_ } { } @@ -3432,9 +4532,9 @@ namespace VULKAN_HPP_NAMESPACE : AccelerationStructureVersionInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AccelerationStructureVersionInfoKHR & operator=( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureVersionInfoKHR & operator=( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -3442,7 +4542,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -3454,7 +4554,7 @@ namespace VULKAN_HPP_NAMESPACE pVersionData = pVersionData_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureVersionInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -3481,7 +4581,7 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AccelerationStructureVersionInfoKHR const & ) const = default; #else - bool operator==( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -3515,19 +4615,19 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint64_t timeout_ = {}, VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::Fence fence_ = {}, uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchain( swapchain_ ) - , timeout( timeout_ ) - , semaphore( semaphore_ ) - , fence( fence_ ) - , deviceMask( deviceMask_ ) + : pNext{ pNext_ } + , swapchain{ swapchain_ } + , timeout{ timeout_ } + , semaphore{ semaphore_ } + , fence{ fence_ } + , deviceMask{ deviceMask_ } { } @@ -3537,9 +4637,9 @@ namespace VULKAN_HPP_NAMESPACE : AcquireNextImageInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AcquireNextImageInfoKHR & operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -3547,7 +4647,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -3583,7 +4683,7 @@ namespace VULKAN_HPP_NAMESPACE deviceMask = deviceMask_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAcquireNextImageInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -3655,13 +4755,13 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {}, uint64_t timeout_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , timeout( timeout_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , timeout{ timeout_ } { } @@ -3671,9 +4771,9 @@ namespace VULKAN_HPP_NAMESPACE : AcquireProfilingLockInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AcquireProfilingLockInfoKHR & operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -3681,7 +4781,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -3699,7 +4799,7 @@ namespace VULKAN_HPP_NAMESPACE timeout = timeout_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAcquireProfilingLockInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -3754,23 +4854,43 @@ namespace VULKAN_HPP_NAMESPACE using Type = AcquireProfilingLockInfoKHR; }; + typedef void *( VKAPI_PTR * PFN_AllocationFunction )( void * pUserData, + size_t size, + size_t alignment, + VULKAN_HPP_NAMESPACE::SystemAllocationScope allocationScope ); + + typedef void *( VKAPI_PTR * PFN_ReallocationFunction )( + void * pUserData, void * pOriginal, size_t size, size_t alignment, VULKAN_HPP_NAMESPACE::SystemAllocationScope allocationScope ); + + typedef void( VKAPI_PTR * PFN_FreeFunction )( void * pUserData, void * pMemory ); + + typedef void( VKAPI_PTR * PFN_InternalAllocationNotification )( void * pUserData, + size_t size, + VULKAN_HPP_NAMESPACE::InternalAllocationType allocationType, + VULKAN_HPP_NAMESPACE::SystemAllocationScope allocationScope ); + + typedef void( VKAPI_PTR * PFN_InternalFreeNotification )( void * pUserData, + size_t size, + VULKAN_HPP_NAMESPACE::InternalAllocationType allocationType, + VULKAN_HPP_NAMESPACE::SystemAllocationScope allocationScope ); + struct AllocationCallbacks { using NativeType = VkAllocationCallbacks; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR AllocationCallbacks( void * pUserData_ = {}, - PFN_vkAllocationFunction pfnAllocation_ = {}, - PFN_vkReallocationFunction pfnReallocation_ = {}, - PFN_vkFreeFunction pfnFree_ = {}, - PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, - PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT - : pUserData( pUserData_ ) - , pfnAllocation( pfnAllocation_ ) - , pfnReallocation( pfnReallocation_ ) - , pfnFree( pfnFree_ ) - , pfnInternalAllocation( pfnInternalAllocation_ ) - , pfnInternalFree( pfnInternalFree_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AllocationCallbacks( void * pUserData_ = {}, + VULKAN_HPP_NAMESPACE::PFN_AllocationFunction pfnAllocation_ = {}, + VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction pfnReallocation_ = {}, + VULKAN_HPP_NAMESPACE::PFN_FreeFunction pfnFree_ = {}, + VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification pfnInternalAllocation_ = {}, + VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT + : pUserData{ pUserData_ } + , pfnAllocation{ pfnAllocation_ } + , pfnReallocation{ pfnReallocation_ } + , pfnFree{ pfnFree_ } + , pfnInternalAllocation{ pfnInternalAllocation_ } + , pfnInternalFree{ pfnInternalFree_ } { } @@ -3779,9 +4899,36 @@ namespace VULKAN_HPP_NAMESPACE AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT : AllocationCallbacks( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." ) + + AllocationCallbacks( void * pUserData_, + PFN_vkAllocationFunction pfnAllocation_, + PFN_vkReallocationFunction pfnReallocation_ = {}, + PFN_vkFreeFunction pfnFree_ = {}, + PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, + PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT + : AllocationCallbacks( pUserData_, + reinterpret_cast( pfnAllocation_ ), + reinterpret_cast( pfnReallocation_ ), + reinterpret_cast( pfnFree_ ), + reinterpret_cast( pfnInternalAllocation_ ), + reinterpret_cast( pfnInternalFree_ ) ) + { + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif AllocationCallbacks & operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -3789,43 +4936,82 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; } - VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnAllocation( VULKAN_HPP_NAMESPACE::PFN_AllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT { pfnAllocation = pfnAllocation_; return *this; } - VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnReallocation( VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT { pfnReallocation = pfnReallocation_; return *this; } - VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnFree( PFN_vkFreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnFree( VULKAN_HPP_NAMESPACE::PFN_FreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT { pfnFree = pfnFree_; return *this; } - VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & + setPfnInternalAllocation( VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT { pfnInternalAllocation = pfnInternalAllocation_; return *this; } - VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalFree( VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT { pfnInternalFree = pfnInternalFree_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnAllocation( reinterpret_cast( pfnAllocation_ ) ); + } + + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnReallocation( reinterpret_cast( pfnReallocation_ ) ); + } + + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnInternalAllocation( reinterpret_cast( pfnInternalAllocation_ ) ); + } + + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnInternalFree( reinterpret_cast( pfnInternalFree_ ) ); + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT { @@ -3842,11 +5028,11 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + VULKAN_HPP_NAMESPACE::PFN_AllocationFunction const &, + VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction const &, + VULKAN_HPP_NAMESPACE::PFN_FreeFunction const &, + VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification const &, + VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -3870,12 +5056,12 @@ namespace VULKAN_HPP_NAMESPACE } public: - void * pUserData = {}; - PFN_vkAllocationFunction pfnAllocation = {}; - PFN_vkReallocationFunction pfnReallocation = {}; - PFN_vkFreeFunction pfnFree = {}; - PFN_vkInternalAllocationNotification pfnInternalAllocation = {}; - PFN_vkInternalFreeNotification pfnInternalFree = {}; + void * pUserData = {}; + VULKAN_HPP_NAMESPACE::PFN_AllocationFunction pfnAllocation = {}; + VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction pfnReallocation = {}; + VULKAN_HPP_NAMESPACE::PFN_FreeFunction pfnFree = {}; + VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification pfnInternalAllocation = {}; + VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification pfnInternalFree = {}; }; struct AmigoProfilingSubmitInfoSEC @@ -3885,12 +5071,12 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAmigoProfilingSubmitInfoSEC; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AmigoProfilingSubmitInfoSEC( uint64_t firstDrawTimestamp_ = {}, uint64_t swapBufferTimestamp_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , firstDrawTimestamp( firstDrawTimestamp_ ) - , swapBufferTimestamp( swapBufferTimestamp_ ) + : pNext{ pNext_ } + , firstDrawTimestamp{ firstDrawTimestamp_ } + , swapBufferTimestamp{ swapBufferTimestamp_ } { } @@ -3900,9 +5086,9 @@ namespace VULKAN_HPP_NAMESPACE : AmigoProfilingSubmitInfoSEC( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AmigoProfilingSubmitInfoSEC & operator=( AmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AmigoProfilingSubmitInfoSEC & operator=( VkAmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -3910,7 +5096,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -3928,7 +5114,7 @@ namespace VULKAN_HPP_NAMESPACE swapBufferTimestamp = swapBufferTimestamp_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAmigoProfilingSubmitInfoSEC const &() const VULKAN_HPP_NOEXCEPT { @@ -3988,24 +5174,24 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkComponentMapping; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity ) VULKAN_HPP_NOEXCEPT - : r( r_ ) - , g( g_ ) - , b( b_ ) - , a( a_ ) + : r{ r_ } + , g{ g_ } + , b{ b_ } + , a{ a_ } { } VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default; ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT : ComponentMapping( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -4013,7 +5199,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setR( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ ) VULKAN_HPP_NOEXCEPT { r = r_; @@ -4037,7 +5223,7 @@ namespace VULKAN_HPP_NAMESPACE a = a_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT { @@ -4097,7 +5283,7 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint64_t externalFormat_ = {}, @@ -4108,15 +5294,15 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , format( format_ ) - , externalFormat( externalFormat_ ) - , formatFeatures( formatFeatures_ ) - , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) - , suggestedYcbcrModel( suggestedYcbcrModel_ ) - , suggestedYcbcrRange( suggestedYcbcrRange_ ) - , suggestedXChromaOffset( suggestedXChromaOffset_ ) - , suggestedYChromaOffset( suggestedYChromaOffset_ ) + : pNext{ pNext_ } + , format{ format_ } + , externalFormat{ externalFormat_ } + , formatFeatures{ formatFeatures_ } + , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ } + , suggestedYcbcrModel{ suggestedYcbcrModel_ } + , suggestedYcbcrRange{ suggestedYcbcrRange_ } + , suggestedXChromaOffset{ suggestedXChromaOffset_ } + , suggestedYChromaOffset{ suggestedYChromaOffset_ } { } @@ -4127,9 +5313,9 @@ namespace VULKAN_HPP_NAMESPACE : AndroidHardwareBufferFormatProperties2ANDROID( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AndroidHardwareBufferFormatProperties2ANDROID & operator=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AndroidHardwareBufferFormatProperties2ANDROID & operator=( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -4226,7 +5412,7 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint64_t externalFormat_ = {}, @@ -4237,15 +5423,15 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , format( format_ ) - , externalFormat( externalFormat_ ) - , formatFeatures( formatFeatures_ ) - , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) - , suggestedYcbcrModel( suggestedYcbcrModel_ ) - , suggestedYcbcrRange( suggestedYcbcrRange_ ) - , suggestedXChromaOffset( suggestedXChromaOffset_ ) - , suggestedYChromaOffset( suggestedYChromaOffset_ ) + : pNext{ pNext_ } + , format{ format_ } + , externalFormat{ externalFormat_ } + , formatFeatures{ formatFeatures_ } + , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ } + , suggestedYcbcrModel{ suggestedYcbcrModel_ } + , suggestedYcbcrRange{ suggestedYcbcrRange_ } + , suggestedXChromaOffset{ suggestedXChromaOffset_ } + , suggestedYChromaOffset{ suggestedYChromaOffset_ } { } @@ -4255,9 +5441,9 @@ namespace VULKAN_HPP_NAMESPACE : AndroidHardwareBufferFormatPropertiesANDROID( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AndroidHardwareBufferFormatPropertiesANDROID & operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AndroidHardwareBufferFormatPropertiesANDROID & operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -4346,6 +5532,94 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct AndroidHardwareBufferFormatResolvePropertiesANDROID + { + using NativeType = VkAndroidHardwareBufferFormatResolvePropertiesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + AndroidHardwareBufferFormatResolvePropertiesANDROID( VULKAN_HPP_NAMESPACE::Format colorAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , colorAttachmentFormat{ colorAttachmentFormat_ } + { + } + + VULKAN_HPP_CONSTEXPR + AndroidHardwareBufferFormatResolvePropertiesANDROID( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AndroidHardwareBufferFormatResolvePropertiesANDROID( VkAndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : AndroidHardwareBufferFormatResolvePropertiesANDROID( *reinterpret_cast( &rhs ) ) + { + } + + AndroidHardwareBufferFormatResolvePropertiesANDROID & + operator=( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AndroidHardwareBufferFormatResolvePropertiesANDROID & operator=( VkAndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, colorAttachmentFormat ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AndroidHardwareBufferFormatResolvePropertiesANDROID const & ) const = default; +# else + bool operator==( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentFormat == rhs.colorAttachmentFormat ); +# endif + } + + bool operator!=( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format colorAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = AndroidHardwareBufferFormatResolvePropertiesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + #if defined( VK_USE_PLATFORM_ANDROID_KHR ) struct AndroidHardwareBufferPropertiesANDROID { @@ -4354,13 +5628,13 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferPropertiesANDROID; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , allocationSize( allocationSize_ ) - , memoryTypeBits( memoryTypeBits_ ) + : pNext{ pNext_ } + , allocationSize{ allocationSize_ } + , memoryTypeBits{ memoryTypeBits_ } { } @@ -4370,9 +5644,9 @@ namespace VULKAN_HPP_NAMESPACE : AndroidHardwareBufferPropertiesANDROID( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AndroidHardwareBufferPropertiesANDROID & operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AndroidHardwareBufferPropertiesANDROID & operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -4442,10 +5716,10 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferUsageANDROID; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , androidHardwareBufferUsage( androidHardwareBufferUsage_ ) + : pNext{ pNext_ } + , androidHardwareBufferUsage{ androidHardwareBufferUsage_ } { } @@ -4455,9 +5729,9 @@ namespace VULKAN_HPP_NAMESPACE : AndroidHardwareBufferUsageANDROID( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AndroidHardwareBufferUsageANDROID & operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -4526,13 +5800,13 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, struct ANativeWindow * window_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , window( window_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , window{ window_ } { } @@ -4542,9 +5816,9 @@ namespace VULKAN_HPP_NAMESPACE : AndroidSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -4552,7 +5826,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -4570,7 +5844,7 @@ namespace VULKAN_HPP_NAMESPACE window = window_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAndroidSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -4629,6 +5903,230 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + struct AntiLagPresentationInfoAMD + { + using NativeType = VkAntiLagPresentationInfoAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAntiLagPresentationInfoAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AntiLagPresentationInfoAMD( VULKAN_HPP_NAMESPACE::AntiLagStageAMD stage_ = VULKAN_HPP_NAMESPACE::AntiLagStageAMD::eInput, + uint64_t frameIndex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stage{ stage_ } + , frameIndex{ frameIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR AntiLagPresentationInfoAMD( AntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AntiLagPresentationInfoAMD( VkAntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : AntiLagPresentationInfoAMD( *reinterpret_cast( &rhs ) ) + { + } + + AntiLagPresentationInfoAMD & operator=( AntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AntiLagPresentationInfoAMD & operator=( VkAntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setStage( VULKAN_HPP_NAMESPACE::AntiLagStageAMD stage_ ) VULKAN_HPP_NOEXCEPT + { + stage = stage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setFrameIndex( uint64_t frameIndex_ ) VULKAN_HPP_NOEXCEPT + { + frameIndex = frameIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAntiLagPresentationInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAntiLagPresentationInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stage, frameIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AntiLagPresentationInfoAMD const & ) const = default; +#else + bool operator==( AntiLagPresentationInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( frameIndex == rhs.frameIndex ); +# endif + } + + bool operator!=( AntiLagPresentationInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAntiLagPresentationInfoAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::AntiLagStageAMD stage = VULKAN_HPP_NAMESPACE::AntiLagStageAMD::eInput; + uint64_t frameIndex = {}; + }; + + template <> + struct CppType + { + using Type = AntiLagPresentationInfoAMD; + }; + + struct AntiLagDataAMD + { + using NativeType = VkAntiLagDataAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAntiLagDataAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AntiLagDataAMD( VULKAN_HPP_NAMESPACE::AntiLagModeAMD mode_ = VULKAN_HPP_NAMESPACE::AntiLagModeAMD::eDriverControl, + uint32_t maxFPS_ = {}, + const VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD * pPresentationInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mode{ mode_ } + , maxFPS{ maxFPS_ } + , pPresentationInfo{ pPresentationInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR AntiLagDataAMD( AntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AntiLagDataAMD( VkAntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT : AntiLagDataAMD( *reinterpret_cast( &rhs ) ) {} + + AntiLagDataAMD & operator=( AntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + AntiLagDataAMD & operator=( VkAntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setMode( VULKAN_HPP_NAMESPACE::AntiLagModeAMD mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setMaxFPS( uint32_t maxFPS_ ) VULKAN_HPP_NOEXCEPT + { + maxFPS = maxFPS_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & + setPPresentationInfo( const VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD * pPresentationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pPresentationInfo = pPresentationInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkAntiLagDataAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAntiLagDataAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mode, maxFPS, pPresentationInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AntiLagDataAMD const & ) const = default; +#else + bool operator==( AntiLagDataAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( maxFPS == rhs.maxFPS ) && + ( pPresentationInfo == rhs.pPresentationInfo ); +# endif + } + + bool operator!=( AntiLagDataAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAntiLagDataAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AntiLagModeAMD mode = VULKAN_HPP_NAMESPACE::AntiLagModeAMD::eDriverControl; + uint32_t maxFPS = {}; + const VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD * pPresentationInfo = {}; + }; + + template <> + struct CppType + { + using Type = AntiLagDataAMD; + }; + struct ApplicationInfo { using NativeType = VkApplicationInfo; @@ -4636,28 +6134,28 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ApplicationInfo( const char * pApplicationName_ = {}, uint32_t applicationVersion_ = {}, const char * pEngineName_ = {}, uint32_t engineVersion_ = {}, uint32_t apiVersion_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pApplicationName( pApplicationName_ ) - , applicationVersion( applicationVersion_ ) - , pEngineName( pEngineName_ ) - , engineVersion( engineVersion_ ) - , apiVersion( apiVersion_ ) + : pNext{ pNext_ } + , pApplicationName{ pApplicationName_ } + , applicationVersion{ applicationVersion_ } + , pEngineName{ pEngineName_ } + , engineVersion{ engineVersion_ } + , apiVersion{ apiVersion_ } { } VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ApplicationInfo( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -4665,7 +6163,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -4701,7 +6199,7 @@ namespace VULKAN_HPP_NAMESPACE apiVersion = apiVersion_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT { @@ -4788,7 +6286,7 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkAttachmentDescription; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentDescription( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, @@ -4799,15 +6297,15 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , format( format_ ) - , samples( samples_ ) - , loadOp( loadOp_ ) - , storeOp( storeOp_ ) - , stencilLoadOp( stencilLoadOp_ ) - , stencilStoreOp( stencilStoreOp_ ) - , initialLayout( initialLayout_ ) - , finalLayout( finalLayout_ ) + : flags{ flags_ } + , format{ format_ } + , samples{ samples_ } + , loadOp{ loadOp_ } + , storeOp{ storeOp_ } + , stencilLoadOp{ stencilLoadOp_ } + , stencilStoreOp{ stencilStoreOp_ } + , initialLayout{ initialLayout_ } + , finalLayout{ finalLayout_ } { } @@ -4817,9 +6315,9 @@ namespace VULKAN_HPP_NAMESPACE : AttachmentDescription( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AttachmentDescription & operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -4827,7 +6325,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; @@ -4881,7 +6379,7 @@ namespace VULKAN_HPP_NAMESPACE finalLayout = finalLayout_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAttachmentDescription const &() const VULKAN_HPP_NOEXCEPT { @@ -4952,7 +6450,7 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentDescription2( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, @@ -4963,16 +6461,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , format( format_ ) - , samples( samples_ ) - , loadOp( loadOp_ ) - , storeOp( storeOp_ ) - , stencilLoadOp( stencilLoadOp_ ) - , stencilStoreOp( stencilStoreOp_ ) - , initialLayout( initialLayout_ ) - , finalLayout( finalLayout_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , format{ format_ } + , samples{ samples_ } + , loadOp{ loadOp_ } + , storeOp{ storeOp_ } + , stencilLoadOp{ stencilLoadOp_ } + , stencilStoreOp{ stencilStoreOp_ } + , initialLayout{ initialLayout_ } + , finalLayout{ finalLayout_ } { } @@ -4982,9 +6480,9 @@ namespace VULKAN_HPP_NAMESPACE : AttachmentDescription2( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AttachmentDescription2 & operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -4992,7 +6490,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -5052,7 +6550,7 @@ namespace VULKAN_HPP_NAMESPACE finalLayout = finalLayout_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAttachmentDescription2 const &() const VULKAN_HPP_NOEXCEPT { @@ -5125,6 +6623,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = AttachmentDescription2; }; + using AttachmentDescription2KHR = AttachmentDescription2; struct AttachmentDescriptionStencilLayout @@ -5134,14 +6633,14 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescriptionStencilLayout; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stencilInitialLayout( stencilInitialLayout_ ) - , stencilFinalLayout( stencilFinalLayout_ ) + : pNext{ pNext_ } + , stencilInitialLayout{ stencilInitialLayout_ } + , stencilFinalLayout{ stencilFinalLayout_ } { } @@ -5151,9 +6650,9 @@ namespace VULKAN_HPP_NAMESPACE : AttachmentDescriptionStencilLayout( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AttachmentDescriptionStencilLayout & operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AttachmentDescriptionStencilLayout & operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -5161,7 +6660,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -5169,19 +6668,19 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & - setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT + setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT { stencilInitialLayout = stencilInitialLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & - setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT + setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT { stencilFinalLayout = stencilFinalLayout_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAttachmentDescriptionStencilLayout const &() const VULKAN_HPP_NOEXCEPT { @@ -5237,17 +6736,18 @@ namespace VULKAN_HPP_NAMESPACE { using Type = AttachmentDescriptionStencilLayout; }; + using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout; struct AttachmentReference { using NativeType = VkAttachmentReference; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentReference( uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT - : attachment( attachment_ ) - , layout( layout_ ) + : attachment{ attachment_ } + , layout{ layout_ } { } @@ -5256,9 +6756,9 @@ namespace VULKAN_HPP_NAMESPACE AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT : AttachmentReference( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AttachmentReference & operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -5266,7 +6766,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT { attachment = attachment_; @@ -5278,7 +6778,7 @@ namespace VULKAN_HPP_NAMESPACE layout = layout_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAttachmentReference const &() const VULKAN_HPP_NOEXCEPT { @@ -5332,15 +6832,15 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentReference2( uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , attachment( attachment_ ) - , layout( layout_ ) - , aspectMask( aspectMask_ ) + : pNext{ pNext_ } + , attachment{ attachment_ } + , layout{ layout_ } + , aspectMask{ aspectMask_ } { } @@ -5350,9 +6850,9 @@ namespace VULKAN_HPP_NAMESPACE : AttachmentReference2( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AttachmentReference2 & operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -5360,7 +6860,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -5384,7 +6884,7 @@ namespace VULKAN_HPP_NAMESPACE aspectMask = aspectMask_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAttachmentReference2 const &() const VULKAN_HPP_NOEXCEPT { @@ -5444,6 +6944,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = AttachmentReference2; }; + using AttachmentReference2KHR = AttachmentReference2; struct AttachmentReferenceStencilLayout @@ -5453,11 +6954,11 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stencilLayout( stencilLayout_ ) + : pNext{ pNext_ } + , stencilLayout{ stencilLayout_ } { } @@ -5467,9 +6968,9 @@ namespace VULKAN_HPP_NAMESPACE : AttachmentReferenceStencilLayout( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AttachmentReferenceStencilLayout & operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -5477,7 +6978,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -5489,7 +6990,7 @@ namespace VULKAN_HPP_NAMESPACE stencilLayout = stencilLayout_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAttachmentReferenceStencilLayout const &() const VULKAN_HPP_NOEXCEPT { @@ -5542,6 +7043,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = AttachmentReferenceStencilLayout; }; + using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout; struct AttachmentSampleCountInfoAMD @@ -5551,16 +7053,16 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentSampleCountInfoAMD; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentSampleCountInfoAMD( uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachmentSamples( pColorAttachmentSamples_ ) - , depthStencilAttachmentSamples( depthStencilAttachmentSamples_ ) + : pNext{ pNext_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentSamples{ pColorAttachmentSamples_ } + , depthStencilAttachmentSamples{ depthStencilAttachmentSamples_ } { } @@ -5583,9 +7085,9 @@ namespace VULKAN_HPP_NAMESPACE { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AttachmentSampleCountInfoAMD & operator=( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AttachmentSampleCountInfoAMD & operator=( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -5593,7 +7095,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -5607,7 +7109,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & - setPColorAttachmentSamples( const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT + setPColorAttachmentSamples( const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT { pColorAttachmentSamples = pColorAttachmentSamples_; return *this; @@ -5624,12 +7126,12 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & - setDepthStencilAttachmentSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT + setDepthStencilAttachmentSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT { depthStencilAttachmentSamples = depthStencilAttachmentSamples_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAttachmentSampleCountInfoAMD const &() const VULKAN_HPP_NOEXCEPT { @@ -5689,25 +7191,26 @@ namespace VULKAN_HPP_NAMESPACE { using Type = AttachmentSampleCountInfoAMD; }; + using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD; struct Extent2D { using NativeType = VkExtent2D; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT - : width( width_ ) - , height( height_ ) + : width{ width_ } + , height{ height_ } { } VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent2D( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -5715,7 +7218,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 Extent2D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT { width = width_; @@ -5727,7 +7230,7 @@ namespace VULKAN_HPP_NAMESPACE height = height_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT { @@ -5778,19 +7281,19 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkSampleLocationEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) + : x{ x_ } + , y{ y_ } { } VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT : SampleLocationEXT( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SampleLocationEXT & operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -5798,7 +7301,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT { x = x_; @@ -5810,7 +7313,7 @@ namespace VULKAN_HPP_NAMESPACE y = y_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -5864,18 +7367,18 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, uint32_t sampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , sampleLocationsPerPixel( sampleLocationsPerPixel_ ) - , sampleLocationGridSize( sampleLocationGridSize_ ) - , sampleLocationsCount( sampleLocationsCount_ ) - , pSampleLocations( pSampleLocations_ ) + : pNext{ pNext_ } + , sampleLocationsPerPixel{ sampleLocationsPerPixel_ } + , sampleLocationGridSize{ sampleLocationGridSize_ } + , sampleLocationsCount{ sampleLocationsCount_ } + , pSampleLocations{ pSampleLocations_ } { } @@ -5899,9 +7402,9 @@ namespace VULKAN_HPP_NAMESPACE { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SampleLocationsInfoEXT & operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -5909,7 +7412,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -5917,14 +7420,14 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & - setSampleLocationsPerPixel( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT + setSampleLocationsPerPixel( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT { sampleLocationsPerPixel = sampleLocationsPerPixel_; return *this; } VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & - setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT + setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT { sampleLocationGridSize = sampleLocationGridSize_; return *this; @@ -5937,7 +7440,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & - setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT + setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT { pSampleLocations = pSampleLocations_; return *this; @@ -5952,7 +7455,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSampleLocationsInfoEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -6020,11 +7523,11 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkAttachmentSampleLocationsEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT - : attachmentIndex( attachmentIndex_ ) - , sampleLocationsInfo( sampleLocationsInfo_ ) + : attachmentIndex{ attachmentIndex_ } + , sampleLocationsInfo{ sampleLocationsInfo_ } { } @@ -6034,9 +7537,9 @@ namespace VULKAN_HPP_NAMESPACE : AttachmentSampleLocationsEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ AttachmentSampleLocationsEXT & operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -6044,7 +7547,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setAttachmentIndex( uint32_t attachmentIndex_ ) VULKAN_HPP_NOEXCEPT { attachmentIndex = attachmentIndex_; @@ -6052,12 +7555,12 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & - setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT { sampleLocationsInfo = sampleLocationsInfo_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAttachmentSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -6108,20 +7611,20 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkBaseInStructure; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) BaseInStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo, const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : sType( sType_ ) - , pNext( pNext_ ) + : sType{ sType_ } + , pNext{ pNext_ } { } BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT : BaseInStructure( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -6129,13 +7632,13 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT { @@ -6186,20 +7689,20 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkBaseOutStructure; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) BaseOutStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo, struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : sType( sType_ ) - , pNext( pNext_ ) + : sType{ sType_ } + , pNext{ pNext_ } { } BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT : BaseOutStructure( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -6207,13 +7710,13 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT { @@ -6267,19 +7770,19 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindAccelerationStructureMemoryInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, uint32_t deviceIndexCount_ = {}, const uint32_t * pDeviceIndices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , accelerationStructure( accelerationStructure_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) - , deviceIndexCount( deviceIndexCount_ ) - , pDeviceIndices( pDeviceIndices_ ) + : pNext{ pNext_ } + , accelerationStructure{ accelerationStructure_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } + , deviceIndexCount{ deviceIndexCount_ } + , pDeviceIndices{ pDeviceIndices_ } { } @@ -6305,9 +7808,9 @@ namespace VULKAN_HPP_NAMESPACE { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindAccelerationStructureMemoryInfoNV & operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindAccelerationStructureMemoryInfoNV & operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -6315,7 +7818,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -6323,7 +7826,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & - setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; @@ -6362,7 +7865,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindAccelerationStructureMemoryInfoNV const &() const VULKAN_HPP_NOEXCEPT { @@ -6434,13 +7937,13 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryDeviceGroupInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, const uint32_t * pDeviceIndices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceIndexCount( deviceIndexCount_ ) - , pDeviceIndices( pDeviceIndices_ ) + : pNext{ pNext_ } + , deviceIndexCount{ deviceIndexCount_ } + , pDeviceIndices{ pDeviceIndices_ } { } @@ -6457,9 +7960,9 @@ namespace VULKAN_HPP_NAMESPACE { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -6467,7 +7970,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -6495,7 +7998,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindBufferMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT { @@ -6549,6 +8052,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = BindBufferMemoryDeviceGroupInfo; }; + using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; struct BindBufferMemoryInfo @@ -6558,15 +8062,15 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , buffer( buffer_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) + : pNext{ pNext_ } + , buffer{ buffer_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } { } @@ -6576,9 +8080,9 @@ namespace VULKAN_HPP_NAMESPACE : BindBufferMemoryInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -6586,7 +8090,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -6610,7 +8114,7 @@ namespace VULKAN_HPP_NAMESPACE memoryOffset = memoryOffset_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindBufferMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { @@ -6669,25 +8173,347 @@ namespace VULKAN_HPP_NAMESPACE { using Type = BindBufferMemoryInfo; }; + using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; + struct BindDescriptorBufferEmbeddedSamplersInfoEXT + { + using NativeType = VkBindDescriptorBufferEmbeddedSamplersInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindDescriptorBufferEmbeddedSamplersInfoEXT( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t set_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stageFlags{ stageFlags_ } + , layout{ layout_ } + , set{ set_ } + { + } + + VULKAN_HPP_CONSTEXPR BindDescriptorBufferEmbeddedSamplersInfoEXT( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindDescriptorBufferEmbeddedSamplersInfoEXT( VkBindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : BindDescriptorBufferEmbeddedSamplersInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + BindDescriptorBufferEmbeddedSamplersInfoEXT & operator=( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindDescriptorBufferEmbeddedSamplersInfoEXT & operator=( VkBindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & + setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT + { + set = set_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stageFlags, layout, set ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindDescriptorBufferEmbeddedSamplersInfoEXT const & ) const = default; +#else + bool operator==( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( set == rhs.set ); +# endif + } + + bool operator!=( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + uint32_t set = {}; + }; + + template <> + struct CppType + { + using Type = BindDescriptorBufferEmbeddedSamplersInfoEXT; + }; + + struct BindDescriptorSetsInfo + { + using NativeType = VkBindDescriptorSetsInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindDescriptorSetsInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindDescriptorSetsInfo( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t firstSet_ = {}, + uint32_t descriptorSetCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets_ = {}, + uint32_t dynamicOffsetCount_ = {}, + const uint32_t * pDynamicOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stageFlags{ stageFlags_ } + , layout{ layout_ } + , firstSet{ firstSet_ } + , descriptorSetCount{ descriptorSetCount_ } + , pDescriptorSets{ pDescriptorSets_ } + , dynamicOffsetCount{ dynamicOffsetCount_ } + , pDynamicOffsets{ pDynamicOffsets_ } + { + } + + VULKAN_HPP_CONSTEXPR BindDescriptorSetsInfo( BindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindDescriptorSetsInfo( VkBindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindDescriptorSetsInfo( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindDescriptorSetsInfo( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_, + uint32_t firstSet_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorSets_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicOffsets_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stageFlags( stageFlags_ ) + , layout( layout_ ) + , firstSet( firstSet_ ) + , descriptorSetCount( static_cast( descriptorSets_.size() ) ) + , pDescriptorSets( descriptorSets_.data() ) + , dynamicOffsetCount( static_cast( dynamicOffsets_.size() ) ) + , pDynamicOffsets( dynamicOffsets_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + BindDescriptorSetsInfo & operator=( BindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindDescriptorSetsInfo & operator=( VkBindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setFirstSet( uint32_t firstSet_ ) VULKAN_HPP_NOEXCEPT + { + firstSet = firstSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = descriptorSetCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorSets = pDescriptorSets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindDescriptorSetsInfo & + setDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorSets_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = static_cast( descriptorSets_.size() ); + pDescriptorSets = descriptorSets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setDynamicOffsetCount( uint32_t dynamicOffsetCount_ ) VULKAN_HPP_NOEXCEPT + { + dynamicOffsetCount = dynamicOffsetCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPDynamicOffsets( const uint32_t * pDynamicOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicOffsets = pDynamicOffsets_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + BindDescriptorSetsInfo & setDynamicOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicOffsets_ ) VULKAN_HPP_NOEXCEPT + { + dynamicOffsetCount = static_cast( dynamicOffsets_.size() ); + pDynamicOffsets = dynamicOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindDescriptorSetsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindDescriptorSetsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stageFlags, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindDescriptorSetsInfo const & ) const = default; +#else + bool operator==( BindDescriptorSetsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( firstSet == rhs.firstSet ) && + ( descriptorSetCount == rhs.descriptorSetCount ) && ( pDescriptorSets == rhs.pDescriptorSets ) && + ( dynamicOffsetCount == rhs.dynamicOffsetCount ) && ( pDynamicOffsets == rhs.pDynamicOffsets ); +# endif + } + + bool operator!=( BindDescriptorSetsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindDescriptorSetsInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + uint32_t firstSet = {}; + uint32_t descriptorSetCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets = {}; + uint32_t dynamicOffsetCount = {}; + const uint32_t * pDynamicOffsets = {}; + }; + + template <> + struct CppType + { + using Type = BindDescriptorSetsInfo; + }; + + using BindDescriptorSetsInfoKHR = BindDescriptorSetsInfo; + struct Offset2D { using NativeType = VkOffset2D; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) + : x{ x_ } + , y{ y_ } { } VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset2D( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -6695,7 +8521,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 Offset2D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT { x = x_; @@ -6707,7 +8533,7 @@ namespace VULKAN_HPP_NAMESPACE y = y_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT { @@ -6758,19 +8584,19 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkRect2D; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Rect2D( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT - : offset( offset_ ) - , extent( extent_ ) + : offset{ offset_ } + , extent{ extent_ } { } VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT : Rect2D( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -6778,7 +8604,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT { offset = offset_; @@ -6790,7 +8616,7 @@ namespace VULKAN_HPP_NAMESPACE extent = extent_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT { @@ -6844,17 +8670,17 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, const uint32_t * pDeviceIndices_ = {}, uint32_t splitInstanceBindRegionCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceIndexCount( deviceIndexCount_ ) - , pDeviceIndices( pDeviceIndices_ ) - , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ) - , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ ) + : pNext{ pNext_ } + , deviceIndexCount{ deviceIndexCount_ } + , pDeviceIndices{ pDeviceIndices_ } + , splitInstanceBindRegionCount{ splitInstanceBindRegionCount_ } + , pSplitInstanceBindRegions{ pSplitInstanceBindRegions_ } { } @@ -6877,9 +8703,9 @@ namespace VULKAN_HPP_NAMESPACE { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -6887,7 +8713,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -6923,7 +8749,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & - setPSplitInstanceBindRegions( const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT + setPSplitInstanceBindRegions( const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT { pSplitInstanceBindRegions = pSplitInstanceBindRegions_; return *this; @@ -6938,7 +8764,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindImageMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT { @@ -7000,6 +8826,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = BindImageMemoryDeviceGroupInfo; }; + using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; struct BindImageMemoryInfo @@ -7009,15 +8836,15 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image( image_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) + : pNext{ pNext_ } + , image{ image_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } { } @@ -7026,9 +8853,9 @@ namespace VULKAN_HPP_NAMESPACE BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindImageMemoryInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -7036,7 +8863,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -7060,7 +8887,7 @@ namespace VULKAN_HPP_NAMESPACE memoryOffset = memoryOffset_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindImageMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { @@ -7119,6 +8946,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = BindImageMemoryInfo; }; + using BindImageMemoryInfoKHR = BindImageMemoryInfo; struct BindImageMemorySwapchainInfoKHR @@ -7128,13 +8956,13 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint32_t imageIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchain( swapchain_ ) - , imageIndex( imageIndex_ ) + : pNext{ pNext_ } + , swapchain{ swapchain_ } + , imageIndex{ imageIndex_ } { } @@ -7144,9 +8972,9 @@ namespace VULKAN_HPP_NAMESPACE : BindImageMemorySwapchainInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -7154,7 +8982,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -7172,7 +9000,7 @@ namespace VULKAN_HPP_NAMESPACE imageIndex = imageIndex_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindImageMemorySwapchainInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -7234,11 +9062,11 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , planeAspect( planeAspect_ ) + : pNext{ pNext_ } + , planeAspect{ planeAspect_ } { } @@ -7248,9 +9076,9 @@ namespace VULKAN_HPP_NAMESPACE : BindImagePlaneMemoryInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -7258,7 +9086,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -7270,7 +9098,7 @@ namespace VULKAN_HPP_NAMESPACE planeAspect = planeAspect_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindImagePlaneMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { @@ -7323,20 +9151,118 @@ namespace VULKAN_HPP_NAMESPACE { using Type = BindImagePlaneMemoryInfo; }; + using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; + struct BindIndexBufferIndirectCommandEXT + { + using NativeType = VkBindIndexBufferIndirectCommandEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BindIndexBufferIndirectCommandEXT( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT + : bufferAddress{ bufferAddress_ } + , size{ size_ } + , indexType{ indexType_ } + { + } + + VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandEXT( BindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindIndexBufferIndirectCommandEXT( VkBindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : BindIndexBufferIndirectCommandEXT( *reinterpret_cast( &rhs ) ) + { + } + + BindIndexBufferIndirectCommandEXT & operator=( BindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindIndexBufferIndirectCommandEXT & operator=( VkBindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindIndexBufferIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindIndexBufferIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( bufferAddress, size, indexType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindIndexBufferIndirectCommandEXT const & ) const = default; +#else + bool operator==( BindIndexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( indexType == rhs.indexType ); +# endif + } + + bool operator!=( BindIndexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + }; + struct BindIndexBufferIndirectCommandNV { using NativeType = VkBindIndexBufferIndirectCommandNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT - : bufferAddress( bufferAddress_ ) - , size( size_ ) - , indexType( indexType_ ) + : bufferAddress{ bufferAddress_ } + , size{ size_ } + , indexType{ indexType_ } { } @@ -7346,9 +9272,9 @@ namespace VULKAN_HPP_NAMESPACE : BindIndexBufferIndirectCommandNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindIndexBufferIndirectCommandNV & operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -7356,7 +9282,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; @@ -7374,7 +9300,7 @@ namespace VULKAN_HPP_NAMESPACE indexType = indexType_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindIndexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { @@ -7422,12 +9348,185 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; }; + struct BindMemoryStatus + { + using NativeType = VkBindMemoryStatus; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindMemoryStatus; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindMemoryStatus( VULKAN_HPP_NAMESPACE::Result * pResult_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pResult{ pResult_ } + { + } + + VULKAN_HPP_CONSTEXPR BindMemoryStatus( BindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindMemoryStatus( VkBindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT : BindMemoryStatus( *reinterpret_cast( &rhs ) ) {} + + BindMemoryStatus & operator=( BindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindMemoryStatus & operator=( VkBindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindMemoryStatus & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindMemoryStatus & setPResult( VULKAN_HPP_NAMESPACE::Result * pResult_ ) VULKAN_HPP_NOEXCEPT + { + pResult = pResult_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindMemoryStatus const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindMemoryStatus &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pResult ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindMemoryStatus const & ) const = default; +#else + bool operator==( BindMemoryStatus const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pResult == rhs.pResult ); +# endif + } + + bool operator!=( BindMemoryStatus const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindMemoryStatus; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Result * pResult = {}; + }; + + template <> + struct CppType + { + using Type = BindMemoryStatus; + }; + + using BindMemoryStatusKHR = BindMemoryStatus; + + struct BindPipelineIndirectCommandNV + { + using NativeType = VkBindPipelineIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindPipelineIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineAddress{ pipelineAddress_ } + { + } + + VULKAN_HPP_CONSTEXPR BindPipelineIndirectCommandNV( BindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindPipelineIndirectCommandNV( VkBindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindPipelineIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + BindPipelineIndirectCommandNV & operator=( BindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindPipelineIndirectCommandNV & operator=( VkBindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindPipelineIndirectCommandNV & setPipelineAddress( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress_ ) VULKAN_HPP_NOEXCEPT + { + pipelineAddress = pipelineAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindPipelineIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindPipelineIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( pipelineAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindPipelineIndirectCommandNV const & ) const = default; +#else + bool operator==( BindPipelineIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( pipelineAddress == rhs.pipelineAddress ); +# endif + } + + bool operator!=( BindPipelineIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress = {}; + }; + struct BindShaderGroupIndirectCommandNV { using NativeType = VkBindShaderGroupIndirectCommandNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( uint32_t groupIndex_ = {} ) VULKAN_HPP_NOEXCEPT : groupIndex( groupIndex_ ) {} +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( uint32_t groupIndex_ = {} ) VULKAN_HPP_NOEXCEPT : groupIndex{ groupIndex_ } {} VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -7435,9 +9534,9 @@ namespace VULKAN_HPP_NAMESPACE : BindShaderGroupIndirectCommandNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindShaderGroupIndirectCommandNV & operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -7445,13 +9544,13 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindShaderGroupIndirectCommandNV & setGroupIndex( uint32_t groupIndex_ ) VULKAN_HPP_NOEXCEPT { groupIndex = groupIndex_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindShaderGroupIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { @@ -7501,26 +9600,26 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkSparseMemoryBind; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SparseMemoryBind( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : resourceOffset( resourceOffset_ ) - , size( size_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) - , flags( flags_ ) + : resourceOffset{ resourceOffset_ } + , size{ size_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } + , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT : SparseMemoryBind( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SparseMemoryBind & operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -7528,7 +9627,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setResourceOffset( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT { resourceOffset = resourceOffset_; @@ -7558,7 +9657,7 @@ namespace VULKAN_HPP_NAMESPACE flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSparseMemoryBind const &() const VULKAN_HPP_NOEXCEPT { @@ -7617,13 +9716,13 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkSparseBufferMemoryBindInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) - , bindCount( bindCount_ ) - , pBinds( pBinds_ ) + : buffer{ buffer_ } + , bindCount{ bindCount_ } + , pBinds{ pBinds_ } { } @@ -7641,9 +9740,9 @@ namespace VULKAN_HPP_NAMESPACE { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SparseBufferMemoryBindInfo & operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SparseBufferMemoryBindInfo & operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -7651,7 +9750,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT { buffer = buffer_; @@ -7679,7 +9778,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSparseBufferMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT { @@ -7731,13 +9830,13 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkSparseImageOpaqueMemoryBindInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT - : image( image_ ) - , bindCount( bindCount_ ) - , pBinds( pBinds_ ) + : image{ image_ } + , bindCount{ bindCount_ } + , pBinds{ pBinds_ } { } @@ -7755,9 +9854,9 @@ namespace VULKAN_HPP_NAMESPACE { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SparseImageOpaqueMemoryBindInfo & operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -7765,7 +9864,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT { image = image_; @@ -7793,7 +9892,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSparseImageOpaqueMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT { @@ -7845,21 +9944,21 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkImageSubresource; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , mipLevel( mipLevel_ ) - , arrayLayer( arrayLayer_ ) + : aspectMask{ aspectMask_ } + , mipLevel{ mipLevel_ } + , arrayLayer{ arrayLayer_ } { } VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT : ImageSubresource( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -7867,7 +9966,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; @@ -7885,7 +9984,7 @@ namespace VULKAN_HPP_NAMESPACE arrayLayer = arrayLayer_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT { @@ -7937,11 +10036,11 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkOffset3D; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) - , z( z_ ) + : x{ x_ } + , y{ y_ } + , z{ z_ } { } @@ -7950,9 +10049,9 @@ namespace VULKAN_HPP_NAMESPACE Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset3D( *reinterpret_cast( &rhs ) ) {} explicit Offset3D( Offset2D const & offset2D, int32_t z_ = {} ) : x( offset2D.x ), y( offset2D.y ), z( z_ ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -7960,7 +10059,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 Offset3D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT { x = x_; @@ -7978,7 +10077,7 @@ namespace VULKAN_HPP_NAMESPACE z = z_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT { @@ -8030,11 +10129,11 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkExtent3D; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT - : width( width_ ) - , height( height_ ) - , depth( depth_ ) + : width{ width_ } + , height{ height_ } + , depth{ depth_ } { } @@ -8043,9 +10142,9 @@ namespace VULKAN_HPP_NAMESPACE Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent3D( *reinterpret_cast( &rhs ) ) {} explicit Extent3D( Extent2D const & extent2D, uint32_t depth_ = {} ) : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -8053,7 +10152,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 Extent3D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT { width = width_; @@ -8071,7 +10170,7 @@ namespace VULKAN_HPP_NAMESPACE depth = depth_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT { @@ -8123,19 +10222,19 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkSparseImageMemoryBind; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : subresource( subresource_ ) - , offset( offset_ ) - , extent( extent_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) - , flags( flags_ ) + : subresource{ subresource_ } + , offset{ offset_ } + , extent{ extent_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } + , flags{ flags_ } { } @@ -8145,9 +10244,9 @@ namespace VULKAN_HPP_NAMESPACE : SparseImageMemoryBind( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SparseImageMemoryBind & operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SparseImageMemoryBind & operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -8155,7 +10254,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT { subresource = subresource_; @@ -8191,7 +10290,7 @@ namespace VULKAN_HPP_NAMESPACE flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSparseImageMemoryBind const &() const VULKAN_HPP_NOEXCEPT { @@ -8252,13 +10351,13 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkSparseImageMemoryBindInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT - : image( image_ ) - , bindCount( bindCount_ ) - , pBinds( pBinds_ ) + : image{ image_ } + , bindCount{ bindCount_ } + , pBinds{ pBinds_ } { } @@ -8276,9 +10375,9 @@ namespace VULKAN_HPP_NAMESPACE { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SparseImageMemoryBindInfo & operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SparseImageMemoryBindInfo & operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -8286,7 +10385,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT { image = image_; @@ -8314,7 +10413,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSparseImageMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT { @@ -8369,7 +10468,7 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindSparseInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindSparseInfo( uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, uint32_t bufferBindCount_ = {}, @@ -8381,17 +10480,17 @@ namespace VULKAN_HPP_NAMESPACE uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphores( pWaitSemaphores_ ) - , bufferBindCount( bufferBindCount_ ) - , pBufferBinds( pBufferBinds_ ) - , imageOpaqueBindCount( imageOpaqueBindCount_ ) - , pImageOpaqueBinds( pImageOpaqueBinds_ ) - , imageBindCount( imageBindCount_ ) - , pImageBinds( pImageBinds_ ) - , signalSemaphoreCount( signalSemaphoreCount_ ) - , pSignalSemaphores( pSignalSemaphores_ ) + : pNext{ pNext_ } + , waitSemaphoreCount{ waitSemaphoreCount_ } + , pWaitSemaphores{ pWaitSemaphores_ } + , bufferBindCount{ bufferBindCount_ } + , pBufferBinds{ pBufferBinds_ } + , imageOpaqueBindCount{ imageOpaqueBindCount_ } + , pImageOpaqueBinds{ pImageOpaqueBinds_ } + , imageBindCount{ imageBindCount_ } + , pImageBinds{ pImageBinds_ } + , signalSemaphoreCount{ signalSemaphoreCount_ } + , pSignalSemaphores{ pSignalSemaphores_ } { } @@ -8420,9 +10519,9 @@ namespace VULKAN_HPP_NAMESPACE { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindSparseInfo & operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -8430,7 +10529,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -8488,7 +10587,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & - setPImageOpaqueBinds( const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT + setPImageOpaqueBinds( const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT { pImageOpaqueBinds = pImageOpaqueBinds_; return *this; @@ -8547,7 +10646,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT { @@ -8636,17 +10735,113 @@ namespace VULKAN_HPP_NAMESPACE using Type = BindSparseInfo; }; + struct BindVertexBufferIndirectCommandEXT + { + using NativeType = VkBindVertexBufferIndirectCommandEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandEXT( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferAddress{ bufferAddress_ } + , size{ size_ } + , stride{ stride_ } + { + } + + VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandEXT( BindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindVertexBufferIndirectCommandEXT( VkBindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : BindVertexBufferIndirectCommandEXT( *reinterpret_cast( &rhs ) ) + { + } + + BindVertexBufferIndirectCommandEXT & operator=( BindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BindVertexBufferIndirectCommandEXT & operator=( VkBindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBindVertexBufferIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindVertexBufferIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( bufferAddress, size, stride ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindVertexBufferIndirectCommandEXT const & ) const = default; +#else + bool operator==( BindVertexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( stride == rhs.stride ); +# endif + } + + bool operator!=( BindVertexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + uint32_t stride = {}; + }; + struct BindVertexBufferIndirectCommandNV { using NativeType = VkBindVertexBufferIndirectCommandNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT - : bufferAddress( bufferAddress_ ) - , size( size_ ) - , stride( stride_ ) + : bufferAddress{ bufferAddress_ } + , size{ size_ } + , stride{ stride_ } { } @@ -8656,9 +10851,9 @@ namespace VULKAN_HPP_NAMESPACE : BindVertexBufferIndirectCommandNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindVertexBufferIndirectCommandNV & operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -8666,7 +10861,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; @@ -8684,7 +10879,7 @@ namespace VULKAN_HPP_NAMESPACE stride = stride_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindVertexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { @@ -8732,7 +10927,6 @@ namespace VULKAN_HPP_NAMESPACE uint32_t stride = {}; }; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) struct BindVideoSessionMemoryInfoKHR { using NativeType = VkBindVideoSessionMemoryInfoKHR; @@ -8740,17 +10934,17 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindVideoSessionMemoryInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindVideoSessionMemoryInfoKHR( uint32_t memoryBindIndex_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryBindIndex( memoryBindIndex_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) - , memorySize( memorySize_ ) + : pNext{ pNext_ } + , memoryBindIndex{ memoryBindIndex_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } + , memorySize{ memorySize_ } { } @@ -8760,9 +10954,9 @@ namespace VULKAN_HPP_NAMESPACE : BindVideoSessionMemoryInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BindVideoSessionMemoryInfoKHR & operator=( BindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindVideoSessionMemoryInfoKHR & operator=( VkBindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -8770,7 +10964,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -8800,7 +10994,7 @@ namespace VULKAN_HPP_NAMESPACE memorySize = memorySize_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindVideoSessionMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -8812,41 +11006,41 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memoryBindIndex, memory, memoryOffset, memorySize ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindVideoSessionMemoryInfoKHR const & ) const = default; -# else +#else bool operator==( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else +# else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && ( memorySize == rhs.memorySize ); -# endif +# endif } bool operator!=( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindVideoSessionMemoryInfoKHR; @@ -8862,21 +11056,118 @@ namespace VULKAN_HPP_NAMESPACE { using Type = BindVideoSessionMemoryInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct BlitImageCubicWeightsInfoQCOM + { + using NativeType = VkBlitImageCubicWeightsInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageCubicWeightsInfoQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BlitImageCubicWeightsInfoQCOM( VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cubicWeights{ cubicWeights_ } + { + } + + VULKAN_HPP_CONSTEXPR BlitImageCubicWeightsInfoQCOM( BlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BlitImageCubicWeightsInfoQCOM( VkBlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : BlitImageCubicWeightsInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + + BlitImageCubicWeightsInfoQCOM & operator=( BlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BlitImageCubicWeightsInfoQCOM & operator=( VkBlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BlitImageCubicWeightsInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BlitImageCubicWeightsInfoQCOM & setCubicWeights( VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ ) VULKAN_HPP_NOEXCEPT + { + cubicWeights = cubicWeights_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBlitImageCubicWeightsInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBlitImageCubicWeightsInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cubicWeights ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BlitImageCubicWeightsInfoQCOM const & ) const = default; +#else + bool operator==( BlitImageCubicWeightsInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cubicWeights == rhs.cubicWeights ); +# endif + } + + bool operator!=( BlitImageCubicWeightsInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageCubicWeightsInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom; + }; + + template <> + struct CppType + { + using Type = BlitImageCubicWeightsInfoQCOM; + }; struct ImageSubresourceLayers { using NativeType = VkImageSubresourceLayers; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , mipLevel( mipLevel_ ) - , baseArrayLayer( baseArrayLayer_ ) - , layerCount( layerCount_ ) + : aspectMask{ aspectMask_ } + , mipLevel{ mipLevel_ } + , baseArrayLayer{ baseArrayLayer_ } + , layerCount{ layerCount_ } { } @@ -8886,9 +11177,9 @@ namespace VULKAN_HPP_NAMESPACE : ImageSubresourceLayers( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageSubresourceLayers & operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -8896,7 +11187,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; @@ -8920,7 +11211,7 @@ namespace VULKAN_HPP_NAMESPACE layerCount = layerCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageSubresourceLayers const &() const VULKAN_HPP_NOEXCEPT { @@ -8976,26 +11267,26 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 ImageBlit2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array const & srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array const & dstOffsets_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcSubresource( srcSubresource_ ) - , srcOffsets( srcOffsets_ ) - , dstSubresource( dstSubresource_ ) - , dstOffsets( dstOffsets_ ) + : pNext{ pNext_ } + , srcSubresource{ srcSubresource_ } + , srcOffsets{ srcOffsets_ } + , dstSubresource{ dstSubresource_ } + , dstOffsets{ dstOffsets_ } { } VULKAN_HPP_CONSTEXPR_14 ImageBlit2( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageBlit2( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit2( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ ImageBlit2 & operator=( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageBlit2 & operator=( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -9003,7 +11294,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -9033,7 +11324,7 @@ namespace VULKAN_HPP_NAMESPACE dstOffsets = dstOffsets_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageBlit2 const &() const VULKAN_HPP_NOEXCEPT { @@ -9095,6 +11386,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = ImageBlit2; }; + using ImageBlit2KHR = ImageBlit2; struct BlitImageInfo2 @@ -9104,7 +11396,7 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, @@ -9113,14 +11405,14 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ = {}, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcImage( srcImage_ ) - , srcImageLayout( srcImageLayout_ ) - , dstImage( dstImage_ ) - , dstImageLayout( dstImageLayout_ ) - , regionCount( regionCount_ ) - , pRegions( pRegions_ ) - , filter( filter_ ) + : pNext{ pNext_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } + , filter{ filter_ } { } @@ -9147,9 +11439,9 @@ namespace VULKAN_HPP_NAMESPACE { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BlitImageInfo2 & operator=( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BlitImageInfo2 & operator=( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -9157,7 +11449,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -9214,7 +11506,7 @@ namespace VULKAN_HPP_NAMESPACE filter = filter_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBlitImageInfo2 const &() const VULKAN_HPP_NOEXCEPT { @@ -9283,8 +11575,105 @@ namespace VULKAN_HPP_NAMESPACE { using Type = BlitImageInfo2; }; + using BlitImageInfo2KHR = BlitImageInfo2; + struct BufferCaptureDescriptorDataInfoEXT + { + using NativeType = VkBufferCaptureDescriptorDataInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCaptureDescriptorDataInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , buffer{ buffer_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferCaptureDescriptorDataInfoEXT( BufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferCaptureDescriptorDataInfoEXT( VkBufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + BufferCaptureDescriptorDataInfoEXT & operator=( BufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferCaptureDescriptorDataInfoEXT & operator=( VkBufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferCaptureDescriptorDataInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferCaptureDescriptorDataInfoEXT const & ) const = default; +#else + bool operator==( BufferCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif + } + + bool operator!=( BufferCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCaptureDescriptorDataInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + + template <> + struct CppType + { + using Type = BufferCaptureDescriptorDataInfoEXT; + }; + #if defined( VK_USE_PLATFORM_FUCHSIA ) struct BufferCollectionBufferCreateInfoFUCHSIA { @@ -9293,13 +11682,13 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , collection( collection_ ) - , index( index_ ) + : pNext{ pNext_ } + , collection{ collection_ } + , index{ index_ } { } @@ -9309,9 +11698,9 @@ namespace VULKAN_HPP_NAMESPACE : BufferCollectionBufferCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferCollectionBufferCreateInfoFUCHSIA & operator=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferCollectionBufferCreateInfoFUCHSIA & operator=( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -9319,7 +11708,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -9327,7 +11716,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & - setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT + setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT { collection = collection_; return *this; @@ -9338,7 +11727,7 @@ namespace VULKAN_HPP_NAMESPACE index = index_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferCollectionBufferCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { @@ -9402,19 +11791,19 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( uint32_t minBufferCount_ = {}, uint32_t maxBufferCount_ = {}, uint32_t minBufferCountForCamping_ = {}, uint32_t minBufferCountForDedicatedSlack_ = {}, uint32_t minBufferCountForSharedSlack_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minBufferCount( minBufferCount_ ) - , maxBufferCount( maxBufferCount_ ) - , minBufferCountForCamping( minBufferCountForCamping_ ) - , minBufferCountForDedicatedSlack( minBufferCountForDedicatedSlack_ ) - , minBufferCountForSharedSlack( minBufferCountForSharedSlack_ ) + : pNext{ pNext_ } + , minBufferCount{ minBufferCount_ } + , maxBufferCount{ maxBufferCount_ } + , minBufferCountForCamping{ minBufferCountForCamping_ } + , minBufferCountForDedicatedSlack{ minBufferCountForDedicatedSlack_ } + , minBufferCountForSharedSlack{ minBufferCountForSharedSlack_ } { } @@ -9424,9 +11813,9 @@ namespace VULKAN_HPP_NAMESPACE : BufferCollectionConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferCollectionConstraintsInfoFUCHSIA & operator=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferCollectionConstraintsInfoFUCHSIA & operator=( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -9434,7 +11823,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -9460,19 +11849,19 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & - setMinBufferCountForDedicatedSlack( uint32_t minBufferCountForDedicatedSlack_ ) VULKAN_HPP_NOEXCEPT + setMinBufferCountForDedicatedSlack( uint32_t minBufferCountForDedicatedSlack_ ) VULKAN_HPP_NOEXCEPT { minBufferCountForDedicatedSlack = minBufferCountForDedicatedSlack_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & - setMinBufferCountForSharedSlack( uint32_t minBufferCountForSharedSlack_ ) VULKAN_HPP_NOEXCEPT + setMinBufferCountForSharedSlack( uint32_t minBufferCountForSharedSlack_ ) VULKAN_HPP_NOEXCEPT { minBufferCountForSharedSlack = minBufferCountForSharedSlack_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferCollectionConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { @@ -9547,10 +11936,10 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionCreateInfoFUCHSIA; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( zx_handle_t collectionToken_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , collectionToken( collectionToken_ ) + : pNext{ pNext_ } + , collectionToken{ collectionToken_ } { } @@ -9560,9 +11949,9 @@ namespace VULKAN_HPP_NAMESPACE : BufferCollectionCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferCollectionCreateInfoFUCHSIA & operator=( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferCollectionCreateInfoFUCHSIA & operator=( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -9570,7 +11959,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -9582,7 +11971,7 @@ namespace VULKAN_HPP_NAMESPACE collectionToken = collectionToken_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferCollectionCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { @@ -9651,13 +12040,13 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , collection( collection_ ) - , index( index_ ) + : pNext{ pNext_ } + , collection{ collection_ } + , index{ index_ } { } @@ -9667,9 +12056,9 @@ namespace VULKAN_HPP_NAMESPACE : BufferCollectionImageCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferCollectionImageCreateInfoFUCHSIA & operator=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferCollectionImageCreateInfoFUCHSIA & operator=( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -9677,7 +12066,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -9685,7 +12074,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & - setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT + setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT { collection = collection_; return *this; @@ -9696,7 +12085,7 @@ namespace VULKAN_HPP_NAMESPACE index = index_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferCollectionImageCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { @@ -9760,10 +12149,10 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSysmemColorSpaceFUCHSIA; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( uint32_t colorSpace_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , colorSpace( colorSpace_ ) + : pNext{ pNext_ } + , colorSpace{ colorSpace_ } { } @@ -9773,9 +12162,9 @@ namespace VULKAN_HPP_NAMESPACE : SysmemColorSpaceFUCHSIA( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ SysmemColorSpaceFUCHSIA & operator=( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SysmemColorSpaceFUCHSIA & operator=( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -9783,7 +12172,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -9795,7 +12184,7 @@ namespace VULKAN_HPP_NAMESPACE colorSpace = colorSpace_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSysmemColorSpaceFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { @@ -9858,7 +12247,7 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionPropertiesFUCHSIA; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA( uint32_t memoryTypeBits_ = {}, uint32_t bufferCount_ = {}, @@ -9872,18 +12261,18 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryTypeBits( memoryTypeBits_ ) - , bufferCount( bufferCount_ ) - , createInfoIndex( createInfoIndex_ ) - , sysmemPixelFormat( sysmemPixelFormat_ ) - , formatFeatures( formatFeatures_ ) - , sysmemColorSpaceIndex( sysmemColorSpaceIndex_ ) - , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) - , suggestedYcbcrModel( suggestedYcbcrModel_ ) - , suggestedYcbcrRange( suggestedYcbcrRange_ ) - , suggestedXChromaOffset( suggestedXChromaOffset_ ) - , suggestedYChromaOffset( suggestedYChromaOffset_ ) + : pNext{ pNext_ } + , memoryTypeBits{ memoryTypeBits_ } + , bufferCount{ bufferCount_ } + , createInfoIndex{ createInfoIndex_ } + , sysmemPixelFormat{ sysmemPixelFormat_ } + , formatFeatures{ formatFeatures_ } + , sysmemColorSpaceIndex{ sysmemColorSpaceIndex_ } + , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ } + , suggestedYcbcrModel{ suggestedYcbcrModel_ } + , suggestedYcbcrRange{ suggestedYcbcrRange_ } + , suggestedXChromaOffset{ suggestedXChromaOffset_ } + , suggestedYChromaOffset{ suggestedYChromaOffset_ } { } @@ -9893,9 +12282,9 @@ namespace VULKAN_HPP_NAMESPACE : BufferCollectionPropertiesFUCHSIA( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferCollectionPropertiesFUCHSIA & operator=( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferCollectionPropertiesFUCHSIA & operator=( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -9903,87 +12292,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setMemoryTypeBits( uint32_t memoryTypeBits_ ) VULKAN_HPP_NOEXCEPT - { - memoryTypeBits = memoryTypeBits_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setBufferCount( uint32_t bufferCount_ ) VULKAN_HPP_NOEXCEPT - { - bufferCount = bufferCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setCreateInfoIndex( uint32_t createInfoIndex_ ) VULKAN_HPP_NOEXCEPT - { - createInfoIndex = createInfoIndex_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT - { - sysmemPixelFormat = sysmemPixelFormat_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & - setFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ ) VULKAN_HPP_NOEXCEPT - { - formatFeatures = formatFeatures_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & - setSysmemColorSpaceIndex( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA const & sysmemColorSpaceIndex_ ) VULKAN_HPP_NOEXCEPT - { - sysmemColorSpaceIndex = sysmemColorSpaceIndex_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & - setSamplerYcbcrConversionComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & samplerYcbcrConversionComponents_ ) VULKAN_HPP_NOEXCEPT - { - samplerYcbcrConversionComponents = samplerYcbcrConversionComponents_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & - setSuggestedYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ ) VULKAN_HPP_NOEXCEPT - { - suggestedYcbcrModel = suggestedYcbcrModel_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & - setSuggestedYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ ) VULKAN_HPP_NOEXCEPT - { - suggestedYcbcrRange = suggestedYcbcrRange_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & - setSuggestedXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ ) VULKAN_HPP_NOEXCEPT - { - suggestedXChromaOffset = suggestedXChromaOffset_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 BufferCollectionPropertiesFUCHSIA & - setSuggestedYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ ) VULKAN_HPP_NOEXCEPT - { - suggestedYChromaOffset = suggestedYChromaOffset_; - return *this; - } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkBufferCollectionPropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -10082,7 +12390,7 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, @@ -10090,13 +12398,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , size( size_ ) - , usage( usage_ ) - , sharingMode( sharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , size{ size_ } + , usage{ usage_ } + , sharingMode{ sharingMode_ } + , queueFamilyIndexCount{ queueFamilyIndexCount_ } + , pQueueFamilyIndices{ pQueueFamilyIndices_ } { } @@ -10121,9 +12429,9 @@ namespace VULKAN_HPP_NAMESPACE { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -10131,7 +12439,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -10182,7 +12490,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT { @@ -10257,15 +12565,15 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferConstraintsInfoFUCHSIA; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCreateInfo createInfo_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , createInfo( createInfo_ ) - , requiredFormatFeatures( requiredFormatFeatures_ ) - , bufferCollectionConstraints( bufferCollectionConstraints_ ) + : pNext{ pNext_ } + , createInfo{ createInfo_ } + , requiredFormatFeatures{ requiredFormatFeatures_ } + , bufferCollectionConstraints{ bufferCollectionConstraints_ } { } @@ -10275,9 +12583,9 @@ namespace VULKAN_HPP_NAMESPACE : BufferConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferConstraintsInfoFUCHSIA & operator=( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferConstraintsInfoFUCHSIA & operator=( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -10285,7 +12593,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -10299,7 +12607,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & - setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT + setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT { requiredFormatFeatures = requiredFormatFeatures_; return *this; @@ -10311,7 +12619,7 @@ namespace VULKAN_HPP_NAMESPACE bufferCollectionConstraints = bufferCollectionConstraints_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { @@ -10377,22 +12685,22 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkBufferCopy; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCopy( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT - : srcOffset( srcOffset_ ) - , dstOffset( dstOffset_ ) - , size( size_ ) + : srcOffset{ srcOffset_ } + , dstOffset{ dstOffset_ } + , size{ size_ } { } VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCopy( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -10400,7 +12708,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; @@ -10418,7 +12726,7 @@ namespace VULKAN_HPP_NAMESPACE size = size_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT { @@ -10473,24 +12781,24 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCopy2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCopy2( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcOffset( srcOffset_ ) - , dstOffset( dstOffset_ ) - , size( size_ ) + : pNext{ pNext_ } + , srcOffset{ srcOffset_ } + , dstOffset{ dstOffset_ } + , size{ size_ } { } VULKAN_HPP_CONSTEXPR BufferCopy2( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferCopy2( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCopy2( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferCopy2 & operator=( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferCopy2 & operator=( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -10498,7 +12806,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -10522,7 +12830,7 @@ namespace VULKAN_HPP_NAMESPACE size = size_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferCopy2 const &() const VULKAN_HPP_NOEXCEPT { @@ -10581,6 +12889,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = BufferCopy2; }; + using BufferCopy2KHR = BufferCopy2; struct BufferDeviceAddressCreateInfoEXT @@ -10590,11 +12899,11 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceAddress( deviceAddress_ ) + : pNext{ pNext_ } + , deviceAddress{ deviceAddress_ } { } @@ -10604,9 +12913,9 @@ namespace VULKAN_HPP_NAMESPACE : BufferDeviceAddressCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferDeviceAddressCreateInfoEXT & operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -10614,7 +12923,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -10626,7 +12935,7 @@ namespace VULKAN_HPP_NAMESPACE deviceAddress = deviceAddress_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferDeviceAddressCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -10687,10 +12996,10 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , buffer( buffer_ ) + : pNext{ pNext_ } + , buffer{ buffer_ } { } @@ -10700,9 +13009,9 @@ namespace VULKAN_HPP_NAMESPACE : BufferDeviceAddressInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferDeviceAddressInfo & operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -10710,7 +13019,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -10722,7 +13031,7 @@ namespace VULKAN_HPP_NAMESPACE buffer = buffer_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferDeviceAddressInfo const &() const VULKAN_HPP_NOEXCEPT { @@ -10775,6 +13084,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = BufferDeviceAddressInfo; }; + using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo; using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo; @@ -10782,28 +13092,28 @@ namespace VULKAN_HPP_NAMESPACE { using NativeType = VkBufferImageCopy; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferImageCopy( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT - : bufferOffset( bufferOffset_ ) - , bufferRowLength( bufferRowLength_ ) - , bufferImageHeight( bufferImageHeight_ ) - , imageSubresource( imageSubresource_ ) - , imageOffset( imageOffset_ ) - , imageExtent( imageExtent_ ) + : bufferOffset{ bufferOffset_ } + , bufferRowLength{ bufferRowLength_ } + , bufferImageHeight{ bufferImageHeight_ } + , imageSubresource{ imageSubresource_ } + , imageOffset{ imageOffset_ } + , imageExtent{ imageExtent_ } { } VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : BufferImageCopy( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -10811,7 +13121,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT { bufferOffset = bufferOffset_; @@ -10847,7 +13157,7 @@ namespace VULKAN_HPP_NAMESPACE imageExtent = imageExtent_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT { @@ -10911,7 +13221,7 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferImageCopy2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferImageCopy2( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, @@ -10919,22 +13229,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , bufferOffset( bufferOffset_ ) - , bufferRowLength( bufferRowLength_ ) - , bufferImageHeight( bufferImageHeight_ ) - , imageSubresource( imageSubresource_ ) - , imageOffset( imageOffset_ ) - , imageExtent( imageExtent_ ) + : pNext{ pNext_ } + , bufferOffset{ bufferOffset_ } + , bufferRowLength{ bufferRowLength_ } + , bufferImageHeight{ bufferImageHeight_ } + , imageSubresource{ imageSubresource_ } + , imageOffset{ imageOffset_ } + , imageExtent{ imageExtent_ } { } VULKAN_HPP_CONSTEXPR BufferImageCopy2( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferImageCopy2( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferImageCopy2( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferImageCopy2 & operator=( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferImageCopy2 & operator=( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -10942,7 +13252,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -10984,7 +13294,7 @@ namespace VULKAN_HPP_NAMESPACE imageExtent = imageExtent_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferImageCopy2 const &() const VULKAN_HPP_NOEXCEPT { @@ -11051,6 +13361,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = BufferImageCopy2; }; + using BufferImageCopy2KHR = BufferImageCopy2; struct BufferMemoryBarrier @@ -11060,7 +13371,7 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, @@ -11069,14 +13380,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) - , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) - , buffer( buffer_ ) - , offset( offset_ ) - , size( size_ ) + : pNext{ pNext_ } + , srcAccessMask{ srcAccessMask_ } + , dstAccessMask{ dstAccessMask_ } + , srcQueueFamilyIndex{ srcQueueFamilyIndex_ } + , dstQueueFamilyIndex{ dstQueueFamilyIndex_ } + , buffer{ buffer_ } + , offset{ offset_ } + , size{ size_ } { } @@ -11085,9 +13396,9 @@ namespace VULKAN_HPP_NAMESPACE BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : BufferMemoryBarrier( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -11095,7 +13406,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -11143,7 +13454,7 @@ namespace VULKAN_HPP_NAMESPACE size = size_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT { @@ -11220,7 +13531,7 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, @@ -11231,16 +13542,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcStageMask( srcStageMask_ ) - , srcAccessMask( srcAccessMask_ ) - , dstStageMask( dstStageMask_ ) - , dstAccessMask( dstAccessMask_ ) - , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) - , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) - , buffer( buffer_ ) - , offset( offset_ ) - , size( size_ ) + : pNext{ pNext_ } + , srcStageMask{ srcStageMask_ } + , srcAccessMask{ srcAccessMask_ } + , dstStageMask{ dstStageMask_ } + , dstAccessMask{ dstAccessMask_ } + , srcQueueFamilyIndex{ srcQueueFamilyIndex_ } + , dstQueueFamilyIndex{ dstQueueFamilyIndex_ } + , buffer{ buffer_ } + , offset{ offset_ } + , size{ size_ } { } @@ -11250,9 +13561,9 @@ namespace VULKAN_HPP_NAMESPACE : BufferMemoryBarrier2( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferMemoryBarrier2 & operator=( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferMemoryBarrier2 & operator=( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -11260,7 +13571,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -11320,7 +13631,7 @@ namespace VULKAN_HPP_NAMESPACE size = size_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT { @@ -11393,6 +13704,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = BufferMemoryBarrier2; }; + using BufferMemoryBarrier2KHR = BufferMemoryBarrier2; struct BufferMemoryRequirementsInfo2 @@ -11402,10 +13714,10 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , buffer( buffer_ ) + : pNext{ pNext_ } + , buffer{ buffer_ } { } @@ -11415,9 +13727,9 @@ namespace VULKAN_HPP_NAMESPACE : BufferMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -11425,7 +13737,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -11437,7 +13749,7 @@ namespace VULKAN_HPP_NAMESPACE buffer = buffer_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT { @@ -11490,6 +13802,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = BufferMemoryRequirementsInfo2; }; + using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; struct BufferOpaqueCaptureAddressCreateInfo @@ -11499,10 +13812,10 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferOpaqueCaptureAddressCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , opaqueCaptureAddress( opaqueCaptureAddress_ ) + : pNext{ pNext_ } + , opaqueCaptureAddress{ opaqueCaptureAddress_ } { } @@ -11512,9 +13825,9 @@ namespace VULKAN_HPP_NAMESPACE : BufferOpaqueCaptureAddressCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferOpaqueCaptureAddressCreateInfo & operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferOpaqueCaptureAddressCreateInfo & operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -11522,7 +13835,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -11534,7 +13847,7 @@ namespace VULKAN_HPP_NAMESPACE opaqueCaptureAddress = opaqueCaptureAddress_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferOpaqueCaptureAddressCreateInfo const &() const VULKAN_HPP_NOEXCEPT { @@ -11587,8 +13900,107 @@ namespace VULKAN_HPP_NAMESPACE { using Type = BufferOpaqueCaptureAddressCreateInfo; }; + using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo; + struct BufferUsageFlags2CreateInfo + { + using NativeType = VkBufferUsageFlags2CreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferUsageFlags2CreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfo( VULKAN_HPP_NAMESPACE::BufferUsageFlags2 usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , usage{ usage_ } + { + } + + VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfo( BufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferUsageFlags2CreateInfo( VkBufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferUsageFlags2CreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + BufferUsageFlags2CreateInfo & operator=( BufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BufferUsageFlags2CreateInfo & operator=( VkBufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags2 usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkBufferUsageFlags2CreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferUsageFlags2CreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, usage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferUsageFlags2CreateInfo const & ) const = default; +#else + bool operator==( BufferUsageFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); +# endif + } + + bool operator!=( BufferUsageFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferUsageFlags2CreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags2 usage = {}; + }; + + template <> + struct CppType + { + using Type = BufferUsageFlags2CreateInfo; + }; + + using BufferUsageFlags2CreateInfoKHR = BufferUsageFlags2CreateInfo; + struct BufferViewCreateInfo { using NativeType = VkBufferViewCreateInfo; @@ -11596,19 +14008,19 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , buffer( buffer_ ) - , format( format_ ) - , offset( offset_ ) - , range( range_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , buffer{ buffer_ } + , format{ format_ } + , offset{ offset_ } + , range{ range_ } { } @@ -11618,9 +14030,9 @@ namespace VULKAN_HPP_NAMESPACE : BufferViewCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -11628,7 +14040,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -11664,7 +14076,7 @@ namespace VULKAN_HPP_NAMESPACE range = range_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT { @@ -11729,2399 +14141,2497 @@ namespace VULKAN_HPP_NAMESPACE using Type = BufferViewCreateInfo; }; - struct CalibratedTimestampInfoEXT + struct StridedDeviceAddressNV { - using NativeType = VkCalibratedTimestampInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoEXT; + using NativeType = VkStridedDeviceAddressNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , timeDomain( timeDomain_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR StridedDeviceAddressNV( VULKAN_HPP_NAMESPACE::DeviceAddress startAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize strideInBytes_ = {} ) VULKAN_HPP_NOEXCEPT + : startAddress{ startAddress_ } + , strideInBytes{ strideInBytes_ } { } - VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR StridedDeviceAddressNV( StridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : CalibratedTimestampInfoEXT( *reinterpret_cast( &rhs ) ) + StridedDeviceAddressNV( VkStridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT + : StridedDeviceAddressNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CalibratedTimestampInfoEXT & operator=( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + StridedDeviceAddressNV & operator=( StridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CalibratedTimestampInfoEXT & operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + StridedDeviceAddressNV & operator=( VkStridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressNV & setStartAddress( VULKAN_HPP_NAMESPACE::DeviceAddress startAddress_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + startAddress = startAddress_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoEXT & setTimeDomain( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressNV & setStrideInBytes( VULKAN_HPP_NAMESPACE::DeviceSize strideInBytes_ ) VULKAN_HPP_NOEXCEPT { - timeDomain = timeDomain_; + strideInBytes = strideInBytes_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCalibratedTimestampInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkStridedDeviceAddressNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCalibratedTimestampInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkStridedDeviceAddressNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, timeDomain ); + return std::tie( startAddress, strideInBytes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CalibratedTimestampInfoEXT const & ) const = default; + auto operator<=>( StridedDeviceAddressNV const & ) const = default; #else - bool operator==( CalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( StridedDeviceAddressNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timeDomain == rhs.timeDomain ); + return ( startAddress == rhs.startAddress ) && ( strideInBytes == rhs.strideInBytes ); # endif } - bool operator!=( CalibratedTimestampInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( StridedDeviceAddressNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice; + VULKAN_HPP_NAMESPACE::DeviceAddress startAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize strideInBytes = {}; }; - template <> - struct CppType + struct BuildPartitionedAccelerationStructureIndirectCommandNV { - using Type = CalibratedTimestampInfoEXT; - }; + using NativeType = VkBuildPartitionedAccelerationStructureIndirectCommandNV; - struct CheckpointData2NV - { - using NativeType = VkCheckpointData2NV; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureIndirectCommandNV( + VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureOpTypeNV opType_ = VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureOpTypeNV::eWriteInstance, + uint32_t argCount_ = {}, + VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV argData_ = {} ) VULKAN_HPP_NOEXCEPT + : opType{ opType_ } + , argCount{ argCount_ } + , argData{ argData_ } + { + } - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2NV; + VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureIndirectCommandNV( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - CheckpointData2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage_ = {}, void * pCheckpointMarker_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stage( stage_ ) - , pCheckpointMarker( pCheckpointMarker_ ) + BuildPartitionedAccelerationStructureIndirectCommandNV( VkBuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BuildPartitionedAccelerationStructureIndirectCommandNV( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR CheckpointData2NV( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + BuildPartitionedAccelerationStructureIndirectCommandNV & + operator=( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT : CheckpointData2NV( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + BuildPartitionedAccelerationStructureIndirectCommandNV & + operator=( VkBuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } - CheckpointData2NV & operator=( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV & + setOpType( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureOpTypeNV opType_ ) VULKAN_HPP_NOEXCEPT + { + opType = opType_; + return *this; + } - CheckpointData2NV & operator=( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV & setArgCount( uint32_t argCount_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + argCount = argCount_; return *this; } - operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV & + setArgData( VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV const & argData_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + argData = argData_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT + operator VkBuildPartitionedAccelerationStructureIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkBuildPartitionedAccelerationStructureIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, stage, pCheckpointMarker ); + return std::tie( opType, argCount, argData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CheckpointData2NV const & ) const = default; + auto operator<=>( BuildPartitionedAccelerationStructureIndirectCommandNV const & ) const = default; #else - bool operator==( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( pCheckpointMarker == rhs.pCheckpointMarker ); + return ( opType == rhs.opType ) && ( argCount == rhs.argCount ) && ( argData == rhs.argData ); # endif } - bool operator!=( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointData2NV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage = {}; - void * pCheckpointMarker = {}; - }; - - template <> - struct CppType - { - using Type = CheckpointData2NV; + VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureOpTypeNV opType = VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureOpTypeNV::eWriteInstance; + uint32_t argCount = {}; + VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV argData = {}; }; - struct CheckpointDataNV + struct PartitionedAccelerationStructureInstancesInputNV { - using NativeType = VkCheckpointDataNV; + using NativeType = VkPartitionedAccelerationStructureInstancesInputNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePartitionedAccelerationStructureInstancesInputNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CheckpointDataNV( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, - void * pCheckpointMarker_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stage( stage_ ) - , pCheckpointMarker( pCheckpointMarker_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureInstancesInputNV( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, + uint32_t instanceCount_ = {}, + uint32_t maxInstancePerPartitionCount_ = {}, + uint32_t partitionCount_ = {}, + uint32_t maxInstanceInGlobalPartitionCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , instanceCount{ instanceCount_ } + , maxInstancePerPartitionCount{ maxInstancePerPartitionCount_ } + , partitionCount{ partitionCount_ } + , maxInstanceInGlobalPartitionCount{ maxInstanceInGlobalPartitionCount_ } { } - VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PartitionedAccelerationStructureInstancesInputNV( PartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : CheckpointDataNV( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + PartitionedAccelerationStructureInstancesInputNV( VkPartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PartitionedAccelerationStructureInstancesInputNV( *reinterpret_cast( &rhs ) ) + { + } - CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PartitionedAccelerationStructureInstancesInputNV & operator=( PartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + PartitionedAccelerationStructureInstancesInputNV & operator=( VkPartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & + setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & + setMaxInstancePerPartitionCount( uint32_t maxInstancePerPartitionCount_ ) VULKAN_HPP_NOEXCEPT + { + maxInstancePerPartitionCount = maxInstancePerPartitionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setPartitionCount( uint32_t partitionCount_ ) VULKAN_HPP_NOEXCEPT + { + partitionCount = partitionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & + setMaxInstanceInGlobalPartitionCount( uint32_t maxInstanceInGlobalPartitionCount_ ) VULKAN_HPP_NOEXCEPT + { + maxInstanceInGlobalPartitionCount = maxInstanceInGlobalPartitionCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPartitionedAccelerationStructureInstancesInputNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureInstancesInputNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, stage, pCheckpointMarker ); + return std::tie( sType, pNext, flags, instanceCount, maxInstancePerPartitionCount, partitionCount, maxInstanceInGlobalPartitionCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CheckpointDataNV const & ) const = default; + auto operator<=>( PartitionedAccelerationStructureInstancesInputNV const & ) const = default; #else - bool operator==( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PartitionedAccelerationStructureInstancesInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( pCheckpointMarker == rhs.pCheckpointMarker ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( instanceCount == rhs.instanceCount ) && + ( maxInstancePerPartitionCount == rhs.maxInstancePerPartitionCount ) && ( partitionCount == rhs.partitionCount ) && + ( maxInstanceInGlobalPartitionCount == rhs.maxInstanceInGlobalPartitionCount ); # endif } - bool operator!=( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PartitionedAccelerationStructureInstancesInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe; - void * pCheckpointMarker = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePartitionedAccelerationStructureInstancesInputNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; + uint32_t instanceCount = {}; + uint32_t maxInstancePerPartitionCount = {}; + uint32_t partitionCount = {}; + uint32_t maxInstanceInGlobalPartitionCount = {}; }; template <> - struct CppType + struct CppType { - using Type = CheckpointDataNV; + using Type = PartitionedAccelerationStructureInstancesInputNV; }; - union ClearColorValue + struct BuildPartitionedAccelerationStructureInfoNV { - using NativeType = VkClearColorValue; -#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) - - VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & float32_ = {} ) : float32( float32_ ) {} + using NativeType = VkBuildPartitionedAccelerationStructureInfoNV; - VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & int32_ ) : int32( int32_ ) {} - - VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & uint32_ ) : uint32( uint32_ ) {} -#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBuildPartitionedAccelerationStructureInfoNV; -#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setFloat32( std::array float32_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV input_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructureData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress dstAccelerationStructureData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress scratchData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress srcInfos_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , input{ input_ } + , srcAccelerationStructureData{ srcAccelerationStructureData_ } + , dstAccelerationStructureData{ dstAccelerationStructureData_ } + , scratchData{ scratchData_ } + , srcInfos{ srcInfos_ } + , srcInfosCount{ srcInfosCount_ } { - float32 = float32_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setInt32( std::array int32_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureInfoNV( BuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BuildPartitionedAccelerationStructureInfoNV( VkBuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BuildPartitionedAccelerationStructureInfoNV( *reinterpret_cast( &rhs ) ) { - int32 = int32_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setUint32( std::array uint32_ ) VULKAN_HPP_NOEXCEPT + BuildPartitionedAccelerationStructureInfoNV & operator=( BuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + BuildPartitionedAccelerationStructureInfoNV & operator=( VkBuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - uint32 = uint32_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ - operator VkClearColorValue const &() const +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkClearColorValue &() + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & + setInput( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV const & input_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + input = input_; + return *this; } - VULKAN_HPP_NAMESPACE::ArrayWrapper1D float32; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D int32; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D uint32; - }; - - struct ClearDepthStencilValue - { - using NativeType = VkClearDepthStencilValue; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT - : depth( depth_ ) - , stencil( stencil_ ) + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & + setSrcAccelerationStructureData( VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructureData_ ) VULKAN_HPP_NOEXCEPT { + srcAccelerationStructureData = srcAccelerationStructureData_; + return *this; } - VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT - : ClearDepthStencilValue( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & + setDstAccelerationStructureData( VULKAN_HPP_NAMESPACE::DeviceAddress dstAccelerationStructureData_ ) VULKAN_HPP_NOEXCEPT { + dstAccelerationStructureData = dstAccelerationStructureData_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ClearDepthStencilValue & operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setScratchData( VULKAN_HPP_NAMESPACE::DeviceAddress scratchData_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + scratchData = scratchData_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setDepth( float depth_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setSrcInfos( VULKAN_HPP_NAMESPACE::DeviceAddress srcInfos_ ) VULKAN_HPP_NOEXCEPT { - depth = depth_; + srcInfos = srcInfos_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setStencil( uint32_t stencil_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & + setSrcInfosCount( VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount_ ) VULKAN_HPP_NOEXCEPT { - stencil = stencil_; + srcInfosCount = srcInfosCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT + operator VkBuildPartitionedAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT + operator VkBuildPartitionedAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( depth, stencil ); + return std::tie( sType, pNext, input, srcAccelerationStructureData, dstAccelerationStructureData, scratchData, srcInfos, srcInfosCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ClearDepthStencilValue const & ) const = default; + auto operator<=>( BuildPartitionedAccelerationStructureInfoNV const & ) const = default; #else - bool operator==( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BuildPartitionedAccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( depth == rhs.depth ) && ( stencil == rhs.stencil ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( input == rhs.input ) && + ( srcAccelerationStructureData == rhs.srcAccelerationStructureData ) && ( dstAccelerationStructureData == rhs.dstAccelerationStructureData ) && + ( scratchData == rhs.scratchData ) && ( srcInfos == rhs.srcInfos ) && ( srcInfosCount == rhs.srcInfosCount ); # endif } - bool operator!=( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BuildPartitionedAccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - float depth = {}; - uint32_t stencil = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBuildPartitionedAccelerationStructureInfoNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstancesInputNV input = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructureData = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress dstAccelerationStructureData = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress scratchData = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress srcInfos = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount = {}; }; - union ClearValue + template <> + struct CppType { - using NativeType = VkClearValue; -#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) - - VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} ) : color( color_ ) {} - - VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) : depthStencil( depthStencil_ ) {} -#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ - -#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT - { - color = color_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT - { - depthStencil = depthStencil_; - return *this; - } -#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ - - operator VkClearValue const &() const - { - return *reinterpret_cast( this ); - } - - operator VkClearValue &() - { - return *reinterpret_cast( this ); - } - -#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - VULKAN_HPP_NAMESPACE::ClearColorValue color; - VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil; -#else - VkClearColorValue color; - VkClearDepthStencilValue depthStencil; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + using Type = BuildPartitionedAccelerationStructureInfoNV; }; - struct ClearAttachment + struct CalibratedTimestampInfoKHR { - using NativeType = VkClearAttachment; + using NativeType = VkCalibratedTimestampInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - uint32_t colorAttachment_ = {}, - VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , colorAttachment( colorAttachment_ ) - , clearValue( clearValue_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoKHR( VULKAN_HPP_NAMESPACE::TimeDomainKHR timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainKHR::eDevice, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , timeDomain{ timeDomain_ } { } - VULKAN_HPP_CONSTEXPR_14 ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT : ClearAttachment( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoKHR( CalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT + CalibratedTimestampInfoKHR( VkCalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CalibratedTimestampInfoKHR( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + CalibratedTimestampInfoKHR & operator=( CalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CalibratedTimestampInfoKHR & operator=( VkCalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - aspectMask = aspectMask_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - colorAttachment = colorAttachment_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoKHR & setTimeDomain( VULKAN_HPP_NAMESPACE::TimeDomainKHR timeDomain_ ) VULKAN_HPP_NOEXCEPT { - clearValue = clearValue_; + timeDomain = timeDomain_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT + operator VkCalibratedTimestampInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT + operator VkCalibratedTimestampInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( aspectMask, colorAttachment, clearValue ); + return std::tie( sType, pNext, timeDomain ); } #endif - public: - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - uint32_t colorAttachment = {}; - VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; - }; - - struct ClearRect - { - using NativeType = VkClearRect; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT - : rect( rect_ ) - , baseArrayLayer( baseArrayLayer_ ) - , layerCount( layerCount_ ) +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CalibratedTimestampInfoKHR const & ) const = default; +#else + bool operator==( CalibratedTimestampInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timeDomain == rhs.timeDomain ); +# endif } - VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default; + bool operator!=( CalibratedTimestampInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif - ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT : ClearRect( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::TimeDomainKHR timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainKHR::eDevice; + }; - ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default; + template <> + struct CppType + { + using Type = CalibratedTimestampInfoKHR; + }; - ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + using CalibratedTimestampInfoEXT = CalibratedTimestampInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D const & rect_ ) VULKAN_HPP_NOEXCEPT - { - rect = rect_; - return *this; - } + struct CheckpointData2NV + { + using NativeType = VkCheckpointData2NV; - VULKAN_HPP_CONSTEXPR_14 ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2NV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CheckpointData2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage_ = {}, void * pCheckpointMarker_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stage{ stage_ } + , pCheckpointMarker{ pCheckpointMarker_ } { - baseArrayLayer = baseArrayLayer_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 ClearRect & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CheckpointData2NV( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT : CheckpointData2NV( *reinterpret_cast( &rhs ) ) {} + + CheckpointData2NV & operator=( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CheckpointData2NV & operator=( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT { - layerCount = layerCount_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT + operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkClearRect &() VULKAN_HPP_NOEXCEPT + operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( rect, baseArrayLayer, layerCount ); + return std::tie( sType, pNext, stage, pCheckpointMarker ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ClearRect const & ) const = default; + auto operator<=>( CheckpointData2NV const & ) const = default; #else - bool operator==( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( rect == rhs.rect ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( pCheckpointMarker == rhs.pCheckpointMarker ); # endif } - bool operator!=( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Rect2D rect = {}; - uint32_t baseArrayLayer = {}; - uint32_t layerCount = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointData2NV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage = {}; + void * pCheckpointMarker = {}; }; - struct CoarseSampleLocationNV + template <> + struct CppType { - using NativeType = VkCoarseSampleLocationNV; + using Type = CheckpointData2NV; + }; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT - : pixelX( pixelX_ ) - , pixelY( pixelY_ ) - , sample( sample_ ) - { - } + struct CheckpointDataNV + { + using NativeType = VkCheckpointDataNV; - VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV; - CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT - : CoarseSampleLocationNV( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CheckpointDataNV( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, + void * pCheckpointMarker_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stage{ stage_ } + , pCheckpointMarker{ pCheckpointMarker_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CoarseSampleLocationNV & operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) VULKAN_HPP_NOEXCEPT - { - pixelX = pixelX_; - return *this; - } + CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : CheckpointDataNV( *reinterpret_cast( &rhs ) ) {} - VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) VULKAN_HPP_NOEXCEPT - { - pixelY = pixelY_; - return *this; - } + CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setSample( uint32_t sample_ ) VULKAN_HPP_NOEXCEPT + CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { - sample = sample_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkCoarseSampleLocationNV const &() const VULKAN_HPP_NOEXCEPT + operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT + operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( pixelX, pixelY, sample ); + return std::tie( sType, pNext, stage, pCheckpointMarker ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CoarseSampleLocationNV const & ) const = default; + auto operator<=>( CheckpointDataNV const & ) const = default; #else - bool operator==( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( pixelX == rhs.pixelX ) && ( pixelY == rhs.pixelY ) && ( sample == rhs.sample ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( pCheckpointMarker == rhs.pCheckpointMarker ); # endif } - bool operator!=( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t pixelX = {}; - uint32_t pixelY = {}; - uint32_t sample = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe; + void * pCheckpointMarker = {}; }; - struct CoarseSampleOrderCustomNV + template <> + struct CppType { - using NativeType = VkCoarseSampleOrderCustomNV; + using Type = CheckpointDataNV; + }; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, - uint32_t sampleCount_ = {}, - uint32_t sampleLocationCount_ = {}, - const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT - : shadingRate( shadingRate_ ) - , sampleCount( sampleCount_ ) - , sampleLocationCount( sampleLocationCount_ ) - , pSampleLocations( pSampleLocations_ ) + union ClearColorValue + { + using NativeType = VkClearColorValue; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & float32_ = {} ) : float32( float32_ ) {} + + VULKAN_HPP_CONSTEXPR ClearColorValue( float float32_0, float float32_1, float float32_2, float float32_3 ) + : float32{ { { float32_0, float32_1, float32_2, float32_3 } } } { } - VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & int32_ ) : int32( int32_ ) {} - CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT - : CoarseSampleOrderCustomNV( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR ClearColorValue( int32_t int32_0, int32_t int32_1, int32_t int32_2, int32_t int32_3 ) + : int32{ { { int32_0, int32_1, int32_2, int32_3 } } } { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_, - uint32_t sampleCount_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sampleLocations_ ) - : shadingRate( shadingRate_ ) - , sampleCount( sampleCount_ ) - , sampleLocationCount( static_cast( sampleLocations_.size() ) ) - , pSampleLocations( sampleLocations_.data() ) + VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & uint32_ ) : uint32( uint32_ ) {} + + VULKAN_HPP_CONSTEXPR ClearColorValue( uint32_t uint32_0, uint32_t uint32_1, uint32_t uint32_2, uint32_t uint32_3 ) + : uint32{ { { uint32_0, uint32_1, uint32_2, uint32_3 } } } { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - CoarseSampleOrderCustomNV & operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setFloat32( std::array float32_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + float32 = float32_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setInt32( std::array int32_ ) VULKAN_HPP_NOEXCEPT { - shadingRate = shadingRate_; + int32 = int32_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setUint32( std::array uint32_ ) VULKAN_HPP_NOEXCEPT { - sampleCount = sampleCount_; + uint32 = uint32_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT + operator VkClearColorValue const &() const { - sampleLocationCount = sampleLocationCount_; + return *reinterpret_cast( this ); + } + + operator VkClearColorValue &() + { + return *reinterpret_cast( this ); + } + + VULKAN_HPP_NAMESPACE::ArrayWrapper1D float32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D int32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D uint32; + }; + + struct ClearDepthStencilValue + { + using NativeType = VkClearDepthStencilValue; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT + : depth{ depth_ } + , stencil{ stencil_ } + { + } + + VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + : ClearDepthStencilValue( *reinterpret_cast( &rhs ) ) + { + } + + ClearDepthStencilValue & operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & - setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setDepth( float depth_ ) VULKAN_HPP_NOEXCEPT { - pSampleLocations = pSampleLocations_; + depth = depth_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CoarseSampleOrderCustomNV & setSampleLocations( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setStencil( uint32_t stencil_ ) VULKAN_HPP_NOEXCEPT { - sampleLocationCount = static_cast( sampleLocations_.size() ); - pSampleLocations = sampleLocations_.data(); + stencil = stencil_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCoarseSampleOrderCustomNV const &() const VULKAN_HPP_NOEXCEPT + operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT + operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( shadingRate, sampleCount, sampleLocationCount, pSampleLocations ); + return std::tie( depth, stencil ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CoarseSampleOrderCustomNV const & ) const = default; + auto operator<=>( ClearDepthStencilValue const & ) const = default; #else - bool operator==( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( shadingRate == rhs.shadingRate ) && ( sampleCount == rhs.sampleCount ) && ( sampleLocationCount == rhs.sampleLocationCount ) && - ( pSampleLocations == rhs.pSampleLocations ); + return ( depth == rhs.depth ) && ( stencil == rhs.stencil ); # endif } - bool operator!=( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations; - uint32_t sampleCount = {}; - uint32_t sampleLocationCount = {}; - const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations = {}; + float depth = {}; + uint32_t stencil = {}; }; - struct CommandBufferAllocateInfo + union ClearValue { - using NativeType = VkCommandBufferAllocateInfo; + using NativeType = VkClearValue; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo; + VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} ) : color( color_ ) {} -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, - VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, - uint32_t commandBufferCount_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , commandPool( commandPool_ ) - , level( level_ ) - , commandBufferCount( commandBufferCount_ ) + VULKAN_HPP_CONSTEXPR_14 ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) : depthStencil( depthStencil_ ) {} +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT { + color = color_; + return *this; } - VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT + { + depthStencil = depthStencil_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ - CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : CommandBufferAllocateInfo( *reinterpret_cast( &rhs ) ) + operator VkClearValue const &() const { + return *reinterpret_cast( this ); } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + operator VkClearValue &() + { + return *reinterpret_cast( this ); + } - CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::ClearColorValue color; + VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil; +#else + VkClearColorValue color; + VkClearDepthStencilValue depthStencil; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct ClearAttachment + { + using NativeType = VkClearAttachment; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t colorAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask{ aspectMask_ } + , colorAttachment{ colorAttachment_ } + , clearValue{ clearValue_ } { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT : ClearAttachment( *reinterpret_cast( &rhs ) ) {} + + ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT { - commandPool = commandPool_; + aspectMask = aspectMask_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) VULKAN_HPP_NOEXCEPT { - level = level_; + colorAttachment = colorAttachment_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT { - commandBufferCount = commandBufferCount_; + clearValue = clearValue_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT + operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, commandPool, level, commandBufferCount ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CommandBufferAllocateInfo const & ) const = default; -#else - bool operator==( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandPool == rhs.commandPool ) && ( level == rhs.level ) && - ( commandBufferCount == rhs.commandBufferCount ); -# endif - } - - bool operator!=( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); + return std::tie( aspectMask, colorAttachment, clearValue ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::CommandPool commandPool = {}; - VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary; - uint32_t commandBufferCount = {}; - }; - - template <> - struct CppType - { - using Type = CommandBufferAllocateInfo; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t colorAttachment = {}; + VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; }; - struct CommandBufferInheritanceInfo + struct ClearRect { - using NativeType = VkCommandBufferInheritanceInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - uint32_t subpass_ = {}, - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, - VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, - VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , renderPass( renderPass_ ) - , subpass( subpass_ ) - , framebuffer( framebuffer_ ) - , occlusionQueryEnable( occlusionQueryEnable_ ) - , queryFlags( queryFlags_ ) - , pipelineStatistics( pipelineStatistics_ ) - { - } - - VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : CommandBufferInheritanceInfo( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + using NativeType = VkClearRect; - CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : rect{ rect_ } + , baseArrayLayer{ baseArrayLayer_ } + , layerCount{ layerCount_ } { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT - { - renderPass = renderPass_; - return *this; - } + ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT : ClearRect( *reinterpret_cast( &rhs ) ) {} - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT - { - subpass = subpass_; - return *this; - } + ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT + ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT { - framebuffer = framebuffer_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D const & rect_ ) VULKAN_HPP_NOEXCEPT { - occlusionQueryEnable = occlusionQueryEnable_; + rect = rect_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT { - queryFlags = queryFlags_; + baseArrayLayer = baseArrayLayer_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & - setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClearRect & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT { - pipelineStatistics = pipelineStatistics_; + layerCount = layerCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT + operator VkClearRect &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, renderPass, subpass, framebuffer, occlusionQueryEnable, queryFlags, pipelineStatistics ); + return std::tie( rect, baseArrayLayer, layerCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CommandBufferInheritanceInfo const & ) const = default; + auto operator<=>( ClearRect const & ) const = default; #else - bool operator==( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ) && - ( framebuffer == rhs.framebuffer ) && ( occlusionQueryEnable == rhs.occlusionQueryEnable ) && ( queryFlags == rhs.queryFlags ) && - ( pipelineStatistics == rhs.pipelineStatistics ); + return ( rect == rhs.rect ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); # endif } - bool operator!=( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; - uint32_t subpass = {}; - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; - VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {}; - VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {}; - VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; - }; - - template <> - struct CppType - { - using Type = CommandBufferInheritanceInfo; + VULKAN_HPP_NAMESPACE::Rect2D rect = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; }; - struct CommandBufferBeginInfo + struct ClusterAccelerationStructureBuildClustersBottomLevelInfoNV { - using NativeType = VkCommandBufferBeginInfo; + using NativeType = VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, - const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pInheritanceInfo( pInheritanceInfo_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( uint32_t clusterReferencesCount_ = {}, + uint32_t clusterReferencesStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress clusterReferences_ = {} ) VULKAN_HPP_NOEXCEPT + : clusterReferencesCount{ clusterReferencesCount_ } + , clusterReferencesStride{ clusterReferencesStride_ } + , clusterReferences{ clusterReferences_ } { } - VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; - CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : CommandBufferBeginInfo( *reinterpret_cast( &rhs ) ) + ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( + *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & + operator=( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & + operator=( VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & + setClusterReferencesCount( uint32_t clusterReferencesCount_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + clusterReferencesCount = clusterReferencesCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & + setClusterReferencesStride( uint32_t clusterReferencesStride_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + clusterReferencesStride = clusterReferencesStride_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & - setPInheritanceInfo( const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & + setClusterReferences( VULKAN_HPP_NAMESPACE::DeviceAddress clusterReferences_ ) VULKAN_HPP_NOEXCEPT { - pInheritanceInfo = pInheritanceInfo_; + clusterReferences = clusterReferences_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, pInheritanceInfo ); + return std::tie( clusterReferencesCount, clusterReferencesStride, clusterReferences ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CommandBufferBeginInfo const & ) const = default; + auto operator<=>( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & ) const = default; #else - bool operator==( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pInheritanceInfo == rhs.pInheritanceInfo ); + return ( clusterReferencesCount == rhs.clusterReferencesCount ) && ( clusterReferencesStride == rhs.clusterReferencesStride ) && + ( clusterReferences == rhs.clusterReferences ); # endif } - bool operator!=( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {}; - const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo = {}; + uint32_t clusterReferencesCount = {}; + uint32_t clusterReferencesStride = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress clusterReferences = {}; }; - template <> - struct CppType + struct ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV { - using Type = CommandBufferBeginInfo; - }; + using NativeType = VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV; - struct CommandBufferInheritanceConditionalRenderingInfoEXT - { - using NativeType = VkCommandBufferInheritanceConditionalRenderingInfoEXT; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( uint32_t geometryIndex_ = {}, + uint32_t reserved_ = {}, + uint32_t geometryFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : geometryIndex{ geometryIndex_ } + , reserved{ reserved_ } + , geometryFlags{ geometryFlags_ } + { + } - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , conditionalRenderingEnable( conditionalRenderingEnable_ ) + ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( + *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR - CommandBufferInheritanceConditionalRenderingInfoEXT( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & + operator=( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : CommandBufferInheritanceConditionalRenderingInfoEXT( *reinterpret_cast( &rhs ) ) + ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & + operator=( VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - CommandBufferInheritanceConditionalRenderingInfoEXT & - operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & setGeometryIndex( uint32_t geometryIndex_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + geometryIndex = geometryIndex_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & setReserved( uint32_t reserved_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + reserved = reserved_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & - setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & setGeometryFlags( uint32_t geometryFlags_ ) VULKAN_HPP_NOEXCEPT { - conditionalRenderingEnable = conditionalRenderingEnable_; + geometryFlags = geometryFlags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, conditionalRenderingEnable ); + return std::tie( geometryIndex, reserved, geometryFlags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const & ) const = default; + auto operator<=>( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & ) const = default; #else - bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable ); + return ( geometryIndex == rhs.geometryIndex ) && ( reserved == rhs.reserved ) && ( geometryFlags == rhs.geometryFlags ); # endif } - bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {}; - }; - - template <> - struct CppType - { - using Type = CommandBufferInheritanceConditionalRenderingInfoEXT; + uint32_t geometryIndex : 24; + uint32_t reserved : 5; + uint32_t geometryFlags : 3; }; - struct CommandBufferInheritanceRenderPassTransformInfoQCOM + struct ClusterAccelerationStructureBuildTriangleClusterInfoNV { - using NativeType = VkCommandBufferInheritanceRenderPassTransformInfoQCOM; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; + using NativeType = VkClusterAccelerationStructureBuildTriangleClusterInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , transform( transform_ ) - , renderArea( renderArea_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterInfoNV( + uint32_t clusterID_ = {}, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags_ = {}, + uint32_t triangleCount_ = {}, + uint32_t vertexCount_ = {}, + uint32_t positionTruncateBitCount_ = {}, + uint32_t indexType_ = {}, + uint32_t opacityMicromapIndexType_ = {}, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags_ = {}, + uint16_t indexBufferStride_ = {}, + uint16_t vertexBufferStride_ = {}, + uint16_t geometryIndexAndFlagsBufferStride_ = {}, + uint16_t opacityMicromapIndexBufferStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer_ = {} ) VULKAN_HPP_NOEXCEPT + : clusterID{ clusterID_ } + , clusterFlags{ clusterFlags_ } + , triangleCount{ triangleCount_ } + , vertexCount{ vertexCount_ } + , positionTruncateBitCount{ positionTruncateBitCount_ } + , indexType{ indexType_ } + , opacityMicromapIndexType{ opacityMicromapIndexType_ } + , baseGeometryIndexAndGeometryFlags{ baseGeometryIndexAndGeometryFlags_ } + , indexBufferStride{ indexBufferStride_ } + , vertexBufferStride{ vertexBufferStride_ } + , geometryIndexAndFlagsBufferStride{ geometryIndexAndFlagsBufferStride_ } + , opacityMicromapIndexBufferStride{ opacityMicromapIndexBufferStride_ } + , indexBuffer{ indexBuffer_ } + , vertexBuffer{ vertexBuffer_ } + , geometryIndexAndFlagsBuffer{ geometryIndexAndFlagsBuffer_ } + , opacityMicromapArray{ opacityMicromapArray_ } + , opacityMicromapIndexBuffer{ opacityMicromapIndexBuffer_ } { } - VULKAN_HPP_CONSTEXPR - CommandBufferInheritanceRenderPassTransformInfoQCOM( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterInfoNV( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; - CommandBufferInheritanceRenderPassTransformInfoQCOM( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT - : CommandBufferInheritanceRenderPassTransformInfoQCOM( *reinterpret_cast( &rhs ) ) + ClusterAccelerationStructureBuildTriangleClusterInfoNV( VkClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureBuildTriangleClusterInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CommandBufferInheritanceRenderPassTransformInfoQCOM & - operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ClusterAccelerationStructureBuildTriangleClusterInfoNV & + operator=( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + ClusterAccelerationStructureBuildTriangleClusterInfoNV & + operator=( VkClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setClusterID( uint32_t clusterID_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + clusterID = clusterID_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & - setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setClusterFlags( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags_ ) VULKAN_HPP_NOEXCEPT { - transform = transform_; + clusterFlags = clusterFlags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & - setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setTriangleCount( uint32_t triangleCount_ ) VULKAN_HPP_NOEXCEPT { - renderArea = renderArea_; + triangleCount = triangleCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, transform, renderArea ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const & ) const = default; -#else - bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ) && ( renderArea == rhs.renderArea ); -# endif - } - bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + vertexCount = vertexCount_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; - VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; - }; - - template <> - struct CppType - { - using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM; - }; - struct CommandBufferInheritanceRenderingInfo - { - using NativeType = VkCommandBufferInheritanceRenderingInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderingInfo; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - CommandBufferInheritanceRenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, - uint32_t viewMask_ = {}, - uint32_t colorAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, - VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , viewMask( viewMask_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachmentFormats( pColorAttachmentFormats_ ) - , depthAttachmentFormat( depthAttachmentFormat_ ) - , stencilAttachmentFormat( stencilAttachmentFormat_ ) - , rasterizationSamples( rasterizationSamples_ ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setPositionTruncateBitCount( uint32_t positionTruncateBitCount_ ) VULKAN_HPP_NOEXCEPT { + positionTruncateBitCount = positionTruncateBitCount_; + return *this; } - VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - CommandBufferInheritanceRenderingInfo( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : CommandBufferInheritanceRenderingInfo( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setIndexType( uint32_t indexType_ ) VULKAN_HPP_NOEXCEPT { + indexType = indexType_; + return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CommandBufferInheritanceRenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_, - uint32_t viewMask_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_, - VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , viewMask( viewMask_ ) - , colorAttachmentCount( static_cast( colorAttachmentFormats_.size() ) ) - , pColorAttachmentFormats( colorAttachmentFormats_.data() ) - , depthAttachmentFormat( depthAttachmentFormat_ ) - , stencilAttachmentFormat( stencilAttachmentFormat_ ) - , rasterizationSamples( rasterizationSamples_ ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setOpacityMicromapIndexType( uint32_t opacityMicromapIndexType_ ) VULKAN_HPP_NOEXCEPT { + opacityMicromapIndexType = opacityMicromapIndexType_; + return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - CommandBufferInheritanceRenderingInfo & operator=( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CommandBufferInheritanceRenderingInfo & operator=( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setBaseGeometryIndexAndGeometryFlags( + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & baseGeometryIndexAndGeometryFlags_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + baseGeometryIndexAndGeometryFlags = baseGeometryIndexAndGeometryFlags_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setIndexBufferStride( uint16_t indexBufferStride_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + indexBufferStride = indexBufferStride_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setVertexBufferStride( uint16_t vertexBufferStride_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + vertexBufferStride = vertexBufferStride_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setGeometryIndexAndFlagsBufferStride( uint16_t geometryIndexAndFlagsBufferStride_ ) VULKAN_HPP_NOEXCEPT { - viewMask = viewMask_; + geometryIndexAndFlagsBufferStride = geometryIndexAndFlagsBufferStride_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setOpacityMicromapIndexBufferStride( uint16_t opacityMicromapIndexBufferStride_ ) VULKAN_HPP_NOEXCEPT { - colorAttachmentCount = colorAttachmentCount_; + opacityMicromapIndexBufferStride = opacityMicromapIndexBufferStride_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & - setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer_ ) VULKAN_HPP_NOEXCEPT { - pColorAttachmentFormats = pColorAttachmentFormats_; + indexBuffer = indexBuffer_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CommandBufferInheritanceRenderingInfo & setColorAttachmentFormats( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setVertexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer_ ) VULKAN_HPP_NOEXCEPT { - colorAttachmentCount = static_cast( colorAttachmentFormats_.size() ); - pColorAttachmentFormats = colorAttachmentFormats_.data(); + vertexBuffer = vertexBuffer_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & - setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setGeometryIndexAndFlagsBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer_ ) VULKAN_HPP_NOEXCEPT { - depthAttachmentFormat = depthAttachmentFormat_; + geometryIndexAndFlagsBuffer = geometryIndexAndFlagsBuffer_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & - setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setOpacityMicromapArray( VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray_ ) VULKAN_HPP_NOEXCEPT { - stencilAttachmentFormat = stencilAttachmentFormat_; + opacityMicromapArray = opacityMicromapArray_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & - setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & + setOpacityMicromapIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer_ ) VULKAN_HPP_NOEXCEPT { - rasterizationSamples = rasterizationSamples_; + opacityMicromapIndexBuffer = opacityMicromapIndexBuffer_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCommandBufferInheritanceRenderingInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCommandBufferInheritanceRenderingInfo &() VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const &, + uint16_t const &, + uint16_t const &, + uint16_t const &, + uint16_t const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( - sType, pNext, flags, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat, rasterizationSamples ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CommandBufferInheritanceRenderingInfo const & ) const = default; -#else - bool operator==( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + return std::tie( clusterID, + clusterFlags, + triangleCount, + vertexCount, + positionTruncateBitCount, + indexType, + opacityMicromapIndexType, + baseGeometryIndexAndGeometryFlags, + indexBufferStride, + vertexBufferStride, + geometryIndexAndFlagsBufferStride, + opacityMicromapIndexBufferStride, + indexBuffer, + vertexBuffer, + geometryIndexAndFlagsBuffer, + opacityMicromapArray, + opacityMicromapIndexBuffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & ) const = default; +#else + bool operator==( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( clusterID == rhs.clusterID ) && ( clusterFlags == rhs.clusterFlags ) && ( triangleCount == rhs.triangleCount ) && + ( vertexCount == rhs.vertexCount ) && ( positionTruncateBitCount == rhs.positionTruncateBitCount ) && ( indexType == rhs.indexType ) && + ( opacityMicromapIndexType == rhs.opacityMicromapIndexType ) && ( baseGeometryIndexAndGeometryFlags == rhs.baseGeometryIndexAndGeometryFlags ) && + ( indexBufferStride == rhs.indexBufferStride ) && ( vertexBufferStride == rhs.vertexBufferStride ) && + ( geometryIndexAndFlagsBufferStride == rhs.geometryIndexAndFlagsBufferStride ) && + ( opacityMicromapIndexBufferStride == rhs.opacityMicromapIndexBufferStride ) && ( indexBuffer == rhs.indexBuffer ) && + ( vertexBuffer == rhs.vertexBuffer ) && ( geometryIndexAndFlagsBuffer == rhs.geometryIndexAndFlagsBuffer ) && + ( opacityMicromapArray == rhs.opacityMicromapArray ) && ( opacityMicromapIndexBuffer == rhs.opacityMicromapIndexBuffer ); +# endif + } + + bool operator!=( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t clusterID = {}; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags = {}; + uint32_t triangleCount : 9; + uint32_t vertexCount : 9; + uint32_t positionTruncateBitCount : 6; + uint32_t indexType : 4; + uint32_t opacityMicromapIndexType : 4; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags = {}; + uint16_t indexBufferStride = {}; + uint16_t vertexBufferStride = {}; + uint16_t geometryIndexAndFlagsBufferStride = {}; + uint16_t opacityMicromapIndexBufferStride = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer = {}; + }; + + struct ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV + { + using NativeType = VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( + uint32_t clusterID_ = {}, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags_ = {}, + uint32_t triangleCount_ = {}, + uint32_t vertexCount_ = {}, + uint32_t positionTruncateBitCount_ = {}, + uint32_t indexType_ = {}, + uint32_t opacityMicromapIndexType_ = {}, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags_ = {}, + uint16_t indexBufferStride_ = {}, + uint16_t vertexBufferStride_ = {}, + uint16_t geometryIndexAndFlagsBufferStride_ = {}, + uint16_t opacityMicromapIndexBufferStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress instantiationBoundingBoxLimit_ = {} ) VULKAN_HPP_NOEXCEPT + : clusterID{ clusterID_ } + , clusterFlags{ clusterFlags_ } + , triangleCount{ triangleCount_ } + , vertexCount{ vertexCount_ } + , positionTruncateBitCount{ positionTruncateBitCount_ } + , indexType{ indexType_ } + , opacityMicromapIndexType{ opacityMicromapIndexType_ } + , baseGeometryIndexAndGeometryFlags{ baseGeometryIndexAndGeometryFlags_ } + , indexBufferStride{ indexBufferStride_ } + , vertexBufferStride{ vertexBufferStride_ } + , geometryIndexAndFlagsBufferStride{ geometryIndexAndFlagsBufferStride_ } + , opacityMicromapIndexBufferStride{ opacityMicromapIndexBufferStride_ } + , indexBuffer{ indexBuffer_ } + , vertexBuffer{ vertexBuffer_ } + , geometryIndexAndFlagsBuffer{ geometryIndexAndFlagsBuffer_ } + , opacityMicromapArray{ opacityMicromapArray_ } + , opacityMicromapIndexBuffer{ opacityMicromapIndexBuffer_ } + , instantiationBoundingBoxLimit{ instantiationBoundingBoxLimit_ } { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewMask == rhs.viewMask ) && - ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && - ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ) && - ( rasterizationSamples == rhs.rasterizationSamples ); -# endif } - bool operator!=( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( + ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( + *reinterpret_cast( &rhs ) ) { - return !operator==( rhs ); } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderingInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::RenderingFlags flags = {}; - uint32_t viewMask = {}; - uint32_t colorAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {}; - VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - }; - - template <> - struct CppType - { - using Type = CommandBufferInheritanceRenderingInfo; - }; - using CommandBufferInheritanceRenderingInfoKHR = CommandBufferInheritanceRenderingInfo; - struct Viewport - { - using NativeType = VkViewport; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - Viewport( float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) - , width( width_ ) - , height( height_ ) - , minDepth( minDepth_ ) - , maxDepth( maxDepth_ ) + explicit ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( + ClusterAccelerationStructureBuildTriangleClusterInfoNV const & clusterAccelerationStructureBuildTriangleClusterInfoNV, + VULKAN_HPP_NAMESPACE::DeviceAddress instantiationBoundingBoxLimit_ = {} ) + : clusterID( clusterAccelerationStructureBuildTriangleClusterInfoNV.clusterID ) + , clusterFlags( clusterAccelerationStructureBuildTriangleClusterInfoNV.clusterFlags ) + , triangleCount( clusterAccelerationStructureBuildTriangleClusterInfoNV.triangleCount ) + , vertexCount( clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexCount ) + , positionTruncateBitCount( clusterAccelerationStructureBuildTriangleClusterInfoNV.positionTruncateBitCount ) + , indexType( clusterAccelerationStructureBuildTriangleClusterInfoNV.indexType ) + , opacityMicromapIndexType( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexType ) + , baseGeometryIndexAndGeometryFlags( clusterAccelerationStructureBuildTriangleClusterInfoNV.baseGeometryIndexAndGeometryFlags ) + , indexBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.indexBufferStride ) + , vertexBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexBufferStride ) + , geometryIndexAndFlagsBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.geometryIndexAndFlagsBufferStride ) + , opacityMicromapIndexBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexBufferStride ) + , indexBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.indexBuffer ) + , vertexBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexBuffer ) + , geometryIndexAndFlagsBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.geometryIndexAndFlagsBuffer ) + , opacityMicromapArray( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapArray ) + , opacityMicromapIndexBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexBuffer ) + , instantiationBoundingBoxLimit( instantiationBoundingBoxLimit_ ) { } - VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT : Viewport( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + operator=( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT + ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + operator=( VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setClusterID( uint32_t clusterID_ ) VULKAN_HPP_NOEXCEPT { - x = x_; + clusterID = clusterID_; return *this; } - VULKAN_HPP_CONSTEXPR_14 Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setClusterFlags( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags_ ) VULKAN_HPP_NOEXCEPT { - y = y_; + clusterFlags = clusterFlags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setTriangleCount( uint32_t triangleCount_ ) VULKAN_HPP_NOEXCEPT { - width = width_; + triangleCount = triangleCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT { - height = height_; + vertexCount = vertexCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setPositionTruncateBitCount( uint32_t positionTruncateBitCount_ ) VULKAN_HPP_NOEXCEPT { - minDepth = minDepth_; + positionTruncateBitCount = positionTruncateBitCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setIndexType( uint32_t indexType_ ) VULKAN_HPP_NOEXCEPT { - maxDepth = maxDepth_; + indexType = indexType_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkViewport const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setOpacityMicromapIndexType( uint32_t opacityMicromapIndexType_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + opacityMicromapIndexType = opacityMicromapIndexType_; + return *this; } - operator VkViewport &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setBaseGeometryIndexAndGeometryFlags( + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & baseGeometryIndexAndGeometryFlags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + baseGeometryIndexAndGeometryFlags = baseGeometryIndexAndGeometryFlags_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setIndexBufferStride( uint16_t indexBufferStride_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( x, y, width, height, minDepth, maxDepth ); + indexBufferStride = indexBufferStride_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Viewport const & ) const = default; -#else - bool operator==( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setVertexBufferStride( uint16_t vertexBufferStride_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( x == rhs.x ) && ( y == rhs.y ) && ( width == rhs.width ) && ( height == rhs.height ) && ( minDepth == rhs.minDepth ) && - ( maxDepth == rhs.maxDepth ); -# endif + vertexBufferStride = vertexBufferStride_; + return *this; } - bool operator!=( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setGeometryIndexAndFlagsBufferStride( uint16_t geometryIndexAndFlagsBufferStride_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + geometryIndexAndFlagsBufferStride = geometryIndexAndFlagsBufferStride_; + return *this; } -#endif - - public: - float x = {}; - float y = {}; - float width = {}; - float height = {}; - float minDepth = {}; - float maxDepth = {}; - }; - - struct CommandBufferInheritanceViewportScissorInfoNV - { - using NativeType = VkCommandBufferInheritanceViewportScissorInfoNV; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ = {}, - uint32_t viewportDepthCount_ = {}, - const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , viewportScissor2D( viewportScissor2D_ ) - , viewportDepthCount( viewportDepthCount_ ) - , pViewportDepths( pViewportDepths_ ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setOpacityMicromapIndexBufferStride( uint16_t opacityMicromapIndexBufferStride_ ) VULKAN_HPP_NOEXCEPT { + opacityMicromapIndexBufferStride = opacityMicromapIndexBufferStride_; + return *this; } - VULKAN_HPP_CONSTEXPR - CommandBufferInheritanceViewportScissorInfoNV( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - CommandBufferInheritanceViewportScissorInfoNV( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : CommandBufferInheritanceViewportScissorInfoNV( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer_ ) VULKAN_HPP_NOEXCEPT { + indexBuffer = indexBuffer_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CommandBufferInheritanceViewportScissorInfoNV & operator=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - CommandBufferInheritanceViewportScissorInfoNV & operator=( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setVertexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + vertexBuffer = vertexBuffer_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setGeometryIndexAndFlagsBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + geometryIndexAndFlagsBuffer = geometryIndexAndFlagsBuffer_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & - setViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setOpacityMicromapArray( VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray_ ) VULKAN_HPP_NOEXCEPT { - viewportScissor2D = viewportScissor2D_; + opacityMicromapArray = opacityMicromapArray_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setViewportDepthCount( uint32_t viewportDepthCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setOpacityMicromapIndexBuffer( VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer_ ) VULKAN_HPP_NOEXCEPT { - viewportDepthCount = viewportDepthCount_; + opacityMicromapIndexBuffer = opacityMicromapIndexBuffer_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & - setPViewportDepths( const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & + setInstantiationBoundingBoxLimit( VULKAN_HPP_NAMESPACE::DeviceAddress instantiationBoundingBoxLimit_ ) VULKAN_HPP_NOEXCEPT { - pViewportDepths = pViewportDepths_; + instantiationBoundingBoxLimit = instantiationBoundingBoxLimit_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCommandBufferInheritanceViewportScissorInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCommandBufferInheritanceViewportScissorInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const &, + uint16_t const &, + uint16_t const &, + uint16_t const &, + uint16_t const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, viewportScissor2D, viewportDepthCount, pViewportDepths ); + return std::tie( clusterID, + clusterFlags, + triangleCount, + vertexCount, + positionTruncateBitCount, + indexType, + opacityMicromapIndexType, + baseGeometryIndexAndGeometryFlags, + indexBufferStride, + vertexBufferStride, + geometryIndexAndFlagsBufferStride, + opacityMicromapIndexBufferStride, + indexBuffer, + vertexBuffer, + geometryIndexAndFlagsBuffer, + opacityMicromapArray, + opacityMicromapIndexBuffer, + instantiationBoundingBoxLimit ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CommandBufferInheritanceViewportScissorInfoNV const & ) const = default; + auto operator<=>( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & ) const = default; #else - bool operator==( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportScissor2D == rhs.viewportScissor2D ) && - ( viewportDepthCount == rhs.viewportDepthCount ) && ( pViewportDepths == rhs.pViewportDepths ); + return ( clusterID == rhs.clusterID ) && ( clusterFlags == rhs.clusterFlags ) && ( triangleCount == rhs.triangleCount ) && + ( vertexCount == rhs.vertexCount ) && ( positionTruncateBitCount == rhs.positionTruncateBitCount ) && ( indexType == rhs.indexType ) && + ( opacityMicromapIndexType == rhs.opacityMicromapIndexType ) && ( baseGeometryIndexAndGeometryFlags == rhs.baseGeometryIndexAndGeometryFlags ) && + ( indexBufferStride == rhs.indexBufferStride ) && ( vertexBufferStride == rhs.vertexBufferStride ) && + ( geometryIndexAndFlagsBufferStride == rhs.geometryIndexAndFlagsBufferStride ) && + ( opacityMicromapIndexBufferStride == rhs.opacityMicromapIndexBufferStride ) && ( indexBuffer == rhs.indexBuffer ) && + ( vertexBuffer == rhs.vertexBuffer ) && ( geometryIndexAndFlagsBuffer == rhs.geometryIndexAndFlagsBuffer ) && + ( opacityMicromapArray == rhs.opacityMicromapArray ) && ( opacityMicromapIndexBuffer == rhs.opacityMicromapIndexBuffer ) && + ( instantiationBoundingBoxLimit == rhs.instantiationBoundingBoxLimit ); # endif } - bool operator!=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D = {}; - uint32_t viewportDepthCount = {}; - const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths = {}; - }; - - template <> - struct CppType - { - using Type = CommandBufferInheritanceViewportScissorInfoNV; + uint32_t clusterID = {}; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClusterFlagsNV clusterFlags = {}; + uint32_t triangleCount : 9; + uint32_t vertexCount : 9; + uint32_t positionTruncateBitCount : 6; + uint32_t indexType : 4; + uint32_t opacityMicromapIndexType : 4; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags = {}; + uint16_t indexBufferStride = {}; + uint16_t vertexBufferStride = {}; + uint16_t geometryIndexAndFlagsBufferStride = {}; + uint16_t opacityMicromapIndexBufferStride = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress indexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress vertexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress geometryIndexAndFlagsBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapArray = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress opacityMicromapIndexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress instantiationBoundingBoxLimit = {}; }; - struct CommandBufferSubmitInfo + struct ClusterAccelerationStructureClustersBottomLevelInputNV { - using NativeType = VkCommandBufferSubmitInfo; + using NativeType = VkClusterAccelerationStructureClustersBottomLevelInputNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferSubmitInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureClustersBottomLevelInputNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ = {}, - uint32_t deviceMask_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , commandBuffer( commandBuffer_ ) - , deviceMask( deviceMask_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureClustersBottomLevelInputNV( uint32_t maxTotalClusterCount_ = {}, + uint32_t maxClusterCountPerAccelerationStructure_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxTotalClusterCount{ maxTotalClusterCount_ } + , maxClusterCountPerAccelerationStructure{ maxClusterCountPerAccelerationStructure_ } { } - VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureClustersBottomLevelInputNV( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; - CommandBufferSubmitInfo( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : CommandBufferSubmitInfo( *reinterpret_cast( &rhs ) ) + ClusterAccelerationStructureClustersBottomLevelInputNV( VkClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureClustersBottomLevelInputNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CommandBufferSubmitInfo & operator=( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ClusterAccelerationStructureClustersBottomLevelInputNV & + operator=( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CommandBufferSubmitInfo & operator=( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ClusterAccelerationStructureClustersBottomLevelInputNV & + operator=( VkClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV & + setMaxTotalClusterCount( uint32_t maxTotalClusterCount_ ) VULKAN_HPP_NOEXCEPT { - commandBuffer = commandBuffer_; + maxTotalClusterCount = maxTotalClusterCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV & + setMaxClusterCountPerAccelerationStructure( uint32_t maxClusterCountPerAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT { - deviceMask = deviceMask_; + maxClusterCountPerAccelerationStructure = maxClusterCountPerAccelerationStructure_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCommandBufferSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureClustersBottomLevelInputNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCommandBufferSubmitInfo &() VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureClustersBottomLevelInputNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, commandBuffer, deviceMask ); + return std::tie( sType, pNext, maxTotalClusterCount, maxClusterCountPerAccelerationStructure ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CommandBufferSubmitInfo const & ) const = default; + auto operator<=>( ClusterAccelerationStructureClustersBottomLevelInputNV const & ) const = default; #else - bool operator==( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBuffer == rhs.commandBuffer ) && ( deviceMask == rhs.deviceMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTotalClusterCount == rhs.maxTotalClusterCount ) && + ( maxClusterCountPerAccelerationStructure == rhs.maxClusterCountPerAccelerationStructure ); # endif } - bool operator!=( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferSubmitInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer = {}; - uint32_t deviceMask = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eClusterAccelerationStructureClustersBottomLevelInputNV; + void * pNext = {}; + uint32_t maxTotalClusterCount = {}; + uint32_t maxClusterCountPerAccelerationStructure = {}; }; template <> - struct CppType + struct CppType { - using Type = CommandBufferSubmitInfo; + using Type = ClusterAccelerationStructureClustersBottomLevelInputNV; }; - using CommandBufferSubmitInfoKHR = CommandBufferSubmitInfo; - struct CommandPoolCreateInfo + struct ClusterAccelerationStructureTriangleClusterInputNV { - using NativeType = VkCommandPoolCreateInfo; + using NativeType = VkClusterAccelerationStructureTriangleClusterInputNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureTriangleClusterInputNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, - uint32_t queueFamilyIndex_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , queueFamilyIndex( queueFamilyIndex_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ClusterAccelerationStructureTriangleClusterInputNV( VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t maxGeometryIndexValue_ = {}, + uint32_t maxClusterUniqueGeometryCount_ = {}, + uint32_t maxClusterTriangleCount_ = {}, + uint32_t maxClusterVertexCount_ = {}, + uint32_t maxTotalTriangleCount_ = {}, + uint32_t maxTotalVertexCount_ = {}, + uint32_t minPositionTruncateBitCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexFormat{ vertexFormat_ } + , maxGeometryIndexValue{ maxGeometryIndexValue_ } + , maxClusterUniqueGeometryCount{ maxClusterUniqueGeometryCount_ } + , maxClusterTriangleCount{ maxClusterTriangleCount_ } + , maxClusterVertexCount{ maxClusterVertexCount_ } + , maxTotalTriangleCount{ maxTotalTriangleCount_ } + , maxTotalVertexCount{ maxTotalVertexCount_ } + , minPositionTruncateBitCount{ minPositionTruncateBitCount_ } { } - VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + ClusterAccelerationStructureTriangleClusterInputNV( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : CommandPoolCreateInfo( *reinterpret_cast( &rhs ) ) + ClusterAccelerationStructureTriangleClusterInputNV( VkClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureTriangleClusterInputNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ClusterAccelerationStructureTriangleClusterInputNV & + operator=( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ClusterAccelerationStructureTriangleClusterInputNV & operator=( VkClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & + setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + vertexFormat = vertexFormat_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxGeometryIndexValue( uint32_t maxGeometryIndexValue_ ) VULKAN_HPP_NOEXCEPT { - queueFamilyIndex = queueFamilyIndex_; + maxGeometryIndexValue = maxGeometryIndexValue_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & + setMaxClusterUniqueGeometryCount( uint32_t maxClusterUniqueGeometryCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + maxClusterUniqueGeometryCount = maxClusterUniqueGeometryCount_; + return *this; } - operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & + setMaxClusterTriangleCount( uint32_t maxClusterTriangleCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + maxClusterTriangleCount = maxClusterTriangleCount_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxClusterVertexCount( uint32_t maxClusterVertexCount_ ) VULKAN_HPP_NOEXCEPT + { + maxClusterVertexCount = maxClusterVertexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxTotalTriangleCount( uint32_t maxTotalTriangleCount_ ) VULKAN_HPP_NOEXCEPT + { + maxTotalTriangleCount = maxTotalTriangleCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxTotalVertexCount( uint32_t maxTotalVertexCount_ ) VULKAN_HPP_NOEXCEPT + { + maxTotalVertexCount = maxTotalVertexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & + setMinPositionTruncateBitCount( uint32_t minPositionTruncateBitCount_ ) VULKAN_HPP_NOEXCEPT + { + minPositionTruncateBitCount = minPositionTruncateBitCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClusterAccelerationStructureTriangleClusterInputNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureTriangleClusterInputNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, queueFamilyIndex ); + return std::tie( sType, + pNext, + vertexFormat, + maxGeometryIndexValue, + maxClusterUniqueGeometryCount, + maxClusterTriangleCount, + maxClusterVertexCount, + maxTotalTriangleCount, + maxTotalVertexCount, + minPositionTruncateBitCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CommandPoolCreateInfo const & ) const = default; + auto operator<=>( ClusterAccelerationStructureTriangleClusterInputNV const & ) const = default; #else - bool operator==( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexFormat == rhs.vertexFormat ) && + ( maxGeometryIndexValue == rhs.maxGeometryIndexValue ) && ( maxClusterUniqueGeometryCount == rhs.maxClusterUniqueGeometryCount ) && + ( maxClusterTriangleCount == rhs.maxClusterTriangleCount ) && ( maxClusterVertexCount == rhs.maxClusterVertexCount ) && + ( maxTotalTriangleCount == rhs.maxTotalTriangleCount ) && ( maxTotalVertexCount == rhs.maxTotalVertexCount ) && + ( minPositionTruncateBitCount == rhs.minPositionTruncateBitCount ); # endif } - bool operator!=( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {}; - uint32_t queueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eClusterAccelerationStructureTriangleClusterInputNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t maxGeometryIndexValue = {}; + uint32_t maxClusterUniqueGeometryCount = {}; + uint32_t maxClusterTriangleCount = {}; + uint32_t maxClusterVertexCount = {}; + uint32_t maxTotalTriangleCount = {}; + uint32_t maxTotalVertexCount = {}; + uint32_t minPositionTruncateBitCount = {}; }; template <> - struct CppType + struct CppType { - using Type = CommandPoolCreateInfo; + using Type = ClusterAccelerationStructureTriangleClusterInputNV; }; - struct SpecializationMapEntry + struct ClusterAccelerationStructureMoveObjectsInputNV { - using NativeType = VkSpecializationMapEntry; + using NativeType = VkClusterAccelerationStructureMoveObjectsInputNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT - : constantID( constantID_ ) - , offset( offset_ ) - , size( size_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureMoveObjectsInputNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureMoveObjectsInputNV( + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTypeNV type_ = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTypeNV::eClustersBottomLevel, + VULKAN_HPP_NAMESPACE::Bool32 noMoveOverlap_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxMovedBytes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , noMoveOverlap{ noMoveOverlap_ } + , maxMovedBytes{ maxMovedBytes_ } { } - VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + ClusterAccelerationStructureMoveObjectsInputNV( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT - : SpecializationMapEntry( *reinterpret_cast( &rhs ) ) + ClusterAccelerationStructureMoveObjectsInputNV( VkClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureMoveObjectsInputNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ClusterAccelerationStructureMoveObjectsInputNV & operator=( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + ClusterAccelerationStructureMoveObjectsInputNV & operator=( VkClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - constantID = constantID_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & + setType( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT { - offset = offset_; + type = type_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & setNoMoveOverlap( VULKAN_HPP_NAMESPACE::Bool32 noMoveOverlap_ ) VULKAN_HPP_NOEXCEPT { - size = size_; + noMoveOverlap = noMoveOverlap_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & + setMaxMovedBytes( VULKAN_HPP_NAMESPACE::DeviceSize maxMovedBytes_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + maxMovedBytes = maxMovedBytes_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureMoveObjectsInputNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureMoveObjectsInputNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( constantID, offset, size ); + return std::tie( sType, pNext, type, noMoveOverlap, maxMovedBytes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SpecializationMapEntry const & ) const = default; + auto operator<=>( ClusterAccelerationStructureMoveObjectsInputNV const & ) const = default; #else - bool operator==( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( constantID == rhs.constantID ) && ( offset == rhs.offset ) && ( size == rhs.size ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( noMoveOverlap == rhs.noMoveOverlap ) && + ( maxMovedBytes == rhs.maxMovedBytes ); # endif } - bool operator!=( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t constantID = {}; - uint32_t offset = {}; - size_t size = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eClusterAccelerationStructureMoveObjectsInputNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTypeNV type = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTypeNV::eClustersBottomLevel; + VULKAN_HPP_NAMESPACE::Bool32 noMoveOverlap = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxMovedBytes = {}; }; - struct SpecializationInfo + template <> + struct CppType { - using NativeType = VkSpecializationInfo; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ = {}, - size_t dataSize_ = {}, - const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT - : mapEntryCount( mapEntryCount_ ) - , pMapEntries( pMapEntries_ ) - , dataSize( dataSize_ ) - , pData( pData_ ) - { - } - - VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SpecializationInfo( *reinterpret_cast( &rhs ) ) {} - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - template - SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mapEntries_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ = {} ) - : mapEntryCount( static_cast( mapEntries_.size() ) ) - , pMapEntries( mapEntries_.data() ) - , dataSize( data_.size() * sizeof( T ) ) - , pData( data_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + using Type = ClusterAccelerationStructureMoveObjectsInputNV; + }; - SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + union ClusterAccelerationStructureOpInputNV + { + using NativeType = VkClusterAccelerationStructureOpInputNV; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + ClusterAccelerationStructureOpInputNV( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel_ = {} ) + : pClustersBottomLevel( pClustersBottomLevel_ ) { - mapEntryCount = mapEntryCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 + ClusterAccelerationStructureOpInputNV( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters_ ) + : pTriangleClusters( pTriangleClusters_ ) { - pMapEntries = pMapEntries_; - return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SpecializationInfo & - setMapEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mapEntries_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects_ ) + : pMoveObjects( pMoveObjects_ ) { - mapEntryCount = static_cast( mapEntries_.size() ); - pMapEntries = mapEntries_.data(); - return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV & + setPClustersBottomLevel( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel_ ) VULKAN_HPP_NOEXCEPT { - dataSize = dataSize_; + pClustersBottomLevel = pClustersBottomLevel_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV & + setPTriangleClusters( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters_ ) VULKAN_HPP_NOEXCEPT { - pData = pData_; + pTriangleClusters = pTriangleClusters_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - template - SpecializationInfo & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV & + setPMoveObjects( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects_ ) VULKAN_HPP_NOEXCEPT { - dataSize = data_.size() * sizeof( T ); - pData = data_.data(); + pMoveObjects = pMoveObjects_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureOpInputNV const &() const { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureOpInputNV &() { - return std::tie( mapEntryCount, pMapEntries, dataSize, pData ); + return *reinterpret_cast( this ); } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SpecializationInfo const & ) const = default; +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects; #else - bool operator==( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( mapEntryCount == rhs.mapEntryCount ) && ( pMapEntries == rhs.pMapEntries ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); -# endif - } - - bool operator!=( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - uint32_t mapEntryCount = {}; - const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries = {}; - size_t dataSize = {}; - const void * pData = {}; + VkClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel; + VkClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters; + VkClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; - struct PipelineShaderStageCreateInfo + struct ClusterAccelerationStructureInputInfoNV { - using NativeType = VkPipelineShaderStageCreateInfo; + using NativeType = VkClusterAccelerationStructureInputInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureInputInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, - VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, - const char * pName_ = {}, - const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , stage( stage_ ) - , module( module_ ) - , pName( pName_ ) - , pSpecializationInfo( pSpecializationInfo_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV( + uint32_t maxAccelerationStructureCount_ = {}, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV opType_ = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV::eMoveObjects, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV opMode_ = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV::eImplicitDestinations, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpInputNV opInput_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxAccelerationStructureCount{ maxAccelerationStructureCount_ } + , flags{ flags_ } + , opType{ opType_ } + , opMode{ opMode_ } + , opInput{ opInput_ } { } - VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV( ClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineShaderStageCreateInfo( *reinterpret_cast( &rhs ) ) + ClusterAccelerationStructureInputInfoNV( VkClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureInputInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ClusterAccelerationStructureInputInfoNV & operator=( ClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ClusterAccelerationStructureInputInfoNV & operator=( VkClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & + setMaxAccelerationStructureCount( uint32_t maxAccelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + maxAccelerationStructureCount = maxAccelerationStructureCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { - stage = stage_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & + setOpType( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV opType_ ) VULKAN_HPP_NOEXCEPT { - module = module_; + opType = opType_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & + setOpMode( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV opMode_ ) VULKAN_HPP_NOEXCEPT { - pName = pName_; + opMode = opMode_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & - setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & + setOpInput( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpInputNV const & opInput_ ) VULKAN_HPP_NOEXCEPT { - pSpecializationInfo = pSpecializationInfo_; + opInput = opInput_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureInputInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureInputInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -14129,278 +16639,248 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + void * const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR const &, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV const &, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV const &, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpInputNV const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, stage, module, pName, pSpecializationInfo ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { - if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) - return cmp; - if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) - return cmp; - if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) - return cmp; - if ( auto cmp = stage <=> rhs.stage; cmp != 0 ) - return cmp; - if ( auto cmp = module <=> rhs.module; cmp != 0 ) - return cmp; - if ( pName != rhs.pName ) - if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) - return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 ) - return cmp; - - return std::strong_ordering::equivalent; + return std::tie( sType, pNext, maxAccelerationStructureCount, flags, opType, opMode, opInput ); } #endif - bool operator==( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( module == rhs.module ) && - ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( pSpecializationInfo == rhs.pSpecializationInfo ); - } - - bool operator!=( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex; - VULKAN_HPP_NAMESPACE::ShaderModule module = {}; - const char * pName = {}; - const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eClusterAccelerationStructureInputInfoNV; + void * pNext = {}; + uint32_t maxAccelerationStructureCount = {}; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV opType = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpTypeNV::eMoveObjects; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV opMode = VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpModeNV::eImplicitDestinations; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureOpInputNV opInput = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineShaderStageCreateInfo; + using Type = ClusterAccelerationStructureInputInfoNV; }; - struct ComputePipelineCreateInfo + struct StridedDeviceAddressRegionKHR { - using NativeType = VkComputePipelineCreateInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, - int32_t basePipelineIndex_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , stage( stage_ ) - , layout( layout_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) - { - } - - VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + using NativeType = VkStridedDeviceAddressRegionKHR; - ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : ComputePipelineCreateInfo( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceAddress{ deviceAddress_ } + , stride{ stride_ } + , size{ size_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : StridedDeviceAddressRegionKHR( *reinterpret_cast( &rhs ) ) { - pNext = pNext_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } + StridedDeviceAddressRegionKHR & operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT + StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - stage = stage_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT { - layout = layout_; + deviceAddress = deviceAddress_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT { - basePipelineHandle = basePipelineHandle_; + stride = stride_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT { - basePipelineIndex = basePipelineIndex_; + size = size_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkStridedDeviceAddressRegionKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, stage, layout, basePipelineHandle, basePipelineIndex ); + return std::tie( deviceAddress, stride, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ComputePipelineCreateInfo const & ) const = default; + auto operator<=>( StridedDeviceAddressRegionKHR const & ) const = default; #else - bool operator==( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( layout == rhs.layout ) && - ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); + return ( deviceAddress == rhs.deviceAddress ) && ( stride == rhs.stride ) && ( size == rhs.size ); # endif } - bool operator!=( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; - int32_t basePipelineIndex = {}; - }; - - template <> - struct CppType - { - using Type = ComputePipelineCreateInfo; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; - struct ConditionalRenderingBeginInfoEXT + struct ClusterAccelerationStructureCommandsInfoNV { - using NativeType = VkConditionalRenderingBeginInfoEXT; + using NativeType = VkClusterAccelerationStructureCommandsInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureCommandsInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , buffer( buffer_ ) - , offset( offset_ ) - , flags( flags_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + ClusterAccelerationStructureCommandsInfoNV( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV input_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress dstImplicitData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress scratchData_ = {}, + VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR dstAddressesArray_ = {}, + VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR dstSizesArray_ = {}, + VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR srcInfosArray_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount_ = {}, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , input{ input_ } + , dstImplicitData{ dstImplicitData_ } + , scratchData{ scratchData_ } + , dstAddressesArray{ dstAddressesArray_ } + , dstSizesArray{ dstSizesArray_ } + , srcInfosArray{ srcInfosArray_ } + , srcInfosCount{ srcInfosCount_ } + , addressResolutionFlags{ addressResolutionFlags_ } { } - VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV( ClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ConditionalRenderingBeginInfoEXT( *reinterpret_cast( &rhs ) ) + ClusterAccelerationStructureCommandsInfoNV( VkClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureCommandsInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ClusterAccelerationStructureCommandsInfoNV & operator=( ClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ClusterAccelerationStructureCommandsInfoNV & operator=( VkClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & + setInput( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV const & input_ ) VULKAN_HPP_NOEXCEPT { - buffer = buffer_; + input = input_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & + setDstImplicitData( VULKAN_HPP_NAMESPACE::DeviceAddress dstImplicitData_ ) VULKAN_HPP_NOEXCEPT { - offset = offset_; + dstImplicitData = dstImplicitData_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setScratchData( VULKAN_HPP_NAMESPACE::DeviceAddress scratchData_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + scratchData = scratchData_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkConditionalRenderingBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & + setDstAddressesArray( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const & dstAddressesArray_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dstAddressesArray = dstAddressesArray_; + return *this; } - operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & + setDstSizesArray( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const & dstSizesArray_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dstSizesArray = dstSizesArray_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & + setSrcInfosArray( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const & srcInfosArray_ ) VULKAN_HPP_NOEXCEPT + { + srcInfosArray = srcInfosArray_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & + setSrcInfosCount( VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount_ ) VULKAN_HPP_NOEXCEPT + { + srcInfosCount = srcInfosCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & + setAddressResolutionFlags( VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags_ ) VULKAN_HPP_NOEXCEPT + { + addressResolutionFlags = addressResolutionFlags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkClusterAccelerationStructureCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureCommandsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -14408,800 +16888,790 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + void * const &, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &, + VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const &, + VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const &, + VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &, + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureAddressResolutionFlagsNV const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, buffer, offset, flags ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ConditionalRenderingBeginInfoEXT const & ) const = default; -#else - bool operator==( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( flags == rhs.flags ); -# endif - } - - bool operator!=( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); + return std::tie( + sType, pNext, input, dstImplicitData, scratchData, dstAddressesArray, dstSizesArray, srcInfosArray, srcInfosCount, addressResolutionFlags ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eClusterAccelerationStructureCommandsInfoNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureInputInfoNV input = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress dstImplicitData = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress scratchData = {}; + VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR dstAddressesArray = {}; + VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR dstSizesArray = {}; + VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR srcInfosArray = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress srcInfosCount = {}; + VULKAN_HPP_NAMESPACE::ClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags = {}; }; template <> - struct CppType + struct CppType { - using Type = ConditionalRenderingBeginInfoEXT; + using Type = ClusterAccelerationStructureCommandsInfoNV; }; - struct ConformanceVersion + struct ClusterAccelerationStructureInstantiateClusterInfoNV { - using NativeType = VkConformanceVersion; + using NativeType = VkClusterAccelerationStructureInstantiateClusterInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT - : major( major_ ) - , minor( minor_ ) - , subminor( subminor_ ) - , patch( patch_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ClusterAccelerationStructureInstantiateClusterInfoNV( uint32_t clusterIdOffset_ = {}, + uint32_t geometryIndexOffset_ = {}, + uint32_t reserved_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress clusterTemplateAddress_ = {}, + VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV vertexBuffer_ = {} ) VULKAN_HPP_NOEXCEPT + : clusterIdOffset{ clusterIdOffset_ } + , geometryIndexOffset{ geometryIndexOffset_ } + , reserved{ reserved_ } + , clusterTemplateAddress{ clusterTemplateAddress_ } + , vertexBuffer{ vertexBuffer_ } { } - VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + ClusterAccelerationStructureInstantiateClusterInfoNV( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT : ConformanceVersion( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + ClusterAccelerationStructureInstantiateClusterInfoNV( VkClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureInstantiateClusterInfoNV( *reinterpret_cast( &rhs ) ) + { + } - ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ClusterAccelerationStructureInstantiateClusterInfoNV & + operator=( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT + ClusterAccelerationStructureInstantiateClusterInfoNV & operator=( VkClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setClusterIdOffset( uint32_t clusterIdOffset_ ) VULKAN_HPP_NOEXCEPT { - major = major_; + clusterIdOffset = clusterIdOffset_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setGeometryIndexOffset( uint32_t geometryIndexOffset_ ) VULKAN_HPP_NOEXCEPT { - minor = minor_; + geometryIndexOffset = geometryIndexOffset_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setReserved( uint32_t reserved_ ) VULKAN_HPP_NOEXCEPT { - subminor = subminor_; + reserved = reserved_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & + setClusterTemplateAddress( VULKAN_HPP_NAMESPACE::DeviceAddress clusterTemplateAddress_ ) VULKAN_HPP_NOEXCEPT { - patch = patch_; + clusterTemplateAddress = clusterTemplateAddress_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & + setVertexBuffer( VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV const & vertexBuffer_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + vertexBuffer = vertexBuffer_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureInstantiateClusterInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkClusterAccelerationStructureInstantiateClusterInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( major, minor, subminor, patch ); + return std::tie( clusterIdOffset, geometryIndexOffset, reserved, clusterTemplateAddress, vertexBuffer ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ConformanceVersion const & ) const = default; + auto operator<=>( ClusterAccelerationStructureInstantiateClusterInfoNV const & ) const = default; #else - bool operator==( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( major == rhs.major ) && ( minor == rhs.minor ) && ( subminor == rhs.subminor ) && ( patch == rhs.patch ); + return ( clusterIdOffset == rhs.clusterIdOffset ) && ( geometryIndexOffset == rhs.geometryIndexOffset ) && ( reserved == rhs.reserved ) && + ( clusterTemplateAddress == rhs.clusterTemplateAddress ) && ( vertexBuffer == rhs.vertexBuffer ); # endif } - bool operator!=( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint8_t major = {}; - uint8_t minor = {}; - uint8_t subminor = {}; - uint8_t patch = {}; + uint32_t clusterIdOffset = {}; + uint32_t geometryIndexOffset : 24; + uint32_t reserved : 8; + VULKAN_HPP_NAMESPACE::DeviceAddress clusterTemplateAddress = {}; + VULKAN_HPP_NAMESPACE::StridedDeviceAddressNV vertexBuffer = {}; }; - using ConformanceVersionKHR = ConformanceVersion; - struct CooperativeMatrixPropertiesNV + struct ClusterAccelerationStructureMoveObjectsInfoNV { - using NativeType = VkCooperativeMatrixPropertiesNV; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV; + using NativeType = VkClusterAccelerationStructureMoveObjectsInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( uint32_t MSize_ = {}, - uint32_t NSize_ = {}, - uint32_t KSize_ = {}, - VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , MSize( MSize_ ) - , NSize( NSize_ ) - , KSize( KSize_ ) - , AType( AType_ ) - , BType( BType_ ) - , CType( CType_ ) - , DType( DType_ ) - , scope( scope_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureMoveObjectsInfoNV( VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAccelerationStructure{ srcAccelerationStructure_ } { } - VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + ClusterAccelerationStructureMoveObjectsInfoNV( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : CooperativeMatrixPropertiesNV( *reinterpret_cast( &rhs ) ) + ClusterAccelerationStructureMoveObjectsInfoNV( VkClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ClusterAccelerationStructureMoveObjectsInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ClusterAccelerationStructureMoveObjectsInfoNV & operator=( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + ClusterAccelerationStructureMoveObjectsInfoNV & operator=( VkClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInfoNV & + setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + srcAccelerationStructure = srcAccelerationStructure_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setMSize( uint32_t MSize_ ) VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureMoveObjectsInfoNV const &() const VULKAN_HPP_NOEXCEPT { - MSize = MSize_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setNSize( uint32_t NSize_ ) VULKAN_HPP_NOEXCEPT + operator VkClusterAccelerationStructureMoveObjectsInfoNV &() VULKAN_HPP_NOEXCEPT { - NSize = NSize_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setKSize( uint32_t KSize_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - KSize = KSize_; - return *this; + return std::tie( srcAccelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ClusterAccelerationStructureMoveObjectsInfoNV const & ) const = default; +#else + bool operator==( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcAccelerationStructure == rhs.srcAccelerationStructure ); +# endif } - VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress srcAccelerationStructure = {}; + }; + + struct CoarseSampleLocationNV + { + using NativeType = VkCoarseSampleLocationNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT + : pixelX{ pixelX_ } + , pixelY{ pixelY_ } + , sample{ sample_ } + { + } + + VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CoarseSampleLocationNV( *reinterpret_cast( &rhs ) ) { - AType = AType_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT + CoarseSampleLocationNV & operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT { - BType = BType_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) VULKAN_HPP_NOEXCEPT { - CType = CType_; + pixelX = pixelX_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) VULKAN_HPP_NOEXCEPT { - DType = DType_; + pixelY = pixelY_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setSample( uint32_t sample_ ) VULKAN_HPP_NOEXCEPT { - scope = scope_; + sample = sample_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + operator VkCoarseSampleLocationNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT + operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, MSize, NSize, KSize, AType, BType, CType, DType, scope ); + return std::tie( pixelX, pixelY, sample ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CooperativeMatrixPropertiesNV const & ) const = default; + auto operator<=>( CoarseSampleLocationNV const & ) const = default; #else - bool operator==( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) && ( KSize == rhs.KSize ) && - ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && ( DType == rhs.DType ) && ( scope == rhs.scope ); + return ( pixelX == rhs.pixelX ) && ( pixelY == rhs.pixelY ) && ( sample == rhs.sample ); # endif } - bool operator!=( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV; - void * pNext = {}; - uint32_t MSize = {}; - uint32_t NSize = {}; - uint32_t KSize = {}; - VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; - VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; - VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; - VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; - VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice; + uint32_t pixelX = {}; + uint32_t pixelY = {}; + uint32_t sample = {}; }; - template <> - struct CppType + struct CoarseSampleOrderCustomNV { - using Type = CooperativeMatrixPropertiesNV; - }; + using NativeType = VkCoarseSampleOrderCustomNV; - struct CopyAccelerationStructureInfoKHR - { - using NativeType = VkCopyAccelerationStructureInfoKHR; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, + uint32_t sampleCount_ = {}, + uint32_t sampleLocationCount_ = {}, + const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRate{ shadingRate_ } + , sampleCount{ sampleCount_ } + , sampleLocationCount{ sampleLocationCount_ } + , pSampleLocations{ pSampleLocations_ } + { + } - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR; + VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , src( src_ ) - , dst( dst_ ) - , mode( mode_ ) + CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CoarseSampleOrderCustomNV( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : CopyAccelerationStructureInfoKHR( *reinterpret_cast( &rhs ) ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_, + uint32_t sampleCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sampleLocations_ ) + : shadingRate( shadingRate_ ) + , sampleCount( sampleCount_ ) + , sampleLocationCount( static_cast( sampleLocations_.size() ) ) + , pSampleLocations( sampleLocations_.data() ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - CopyAccelerationStructureInfoKHR & operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CoarseSampleOrderCustomNV & operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + shadingRate = shadingRate_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) VULKAN_HPP_NOEXCEPT { - src = src_; + sampleCount = sampleCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT { - dst = dst_; + sampleLocationCount = sampleLocationCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & + setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ ) VULKAN_HPP_NOEXCEPT { - mode = mode_; + pSampleLocations = pSampleLocations_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkCopyAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CoarseSampleOrderCustomNV & setSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + sampleLocationCount = static_cast( sampleLocations_.size() ); + pSampleLocations = sampleLocations_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkCoarseSampleOrderCustomNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, src, dst, mode ); + return std::tie( shadingRate, sampleCount, sampleLocationCount, pSampleLocations ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CopyAccelerationStructureInfoKHR const & ) const = default; + auto operator<=>( CoarseSampleOrderCustomNV const & ) const = default; #else - bool operator==( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && ( mode == rhs.mode ); + return ( shadingRate == rhs.shadingRate ) && ( sampleCount == rhs.sampleCount ) && ( sampleLocationCount == rhs.sampleLocationCount ) && + ( pSampleLocations == rhs.pSampleLocations ); # endif } - bool operator!=( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations; + uint32_t sampleCount = {}; + uint32_t sampleLocationCount = {}; + const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations = {}; }; - template <> - struct CppType + struct ColorBlendAdvancedEXT { - using Type = CopyAccelerationStructureInfoKHR; - }; + using NativeType = VkColorBlendAdvancedEXT; - struct CopyAccelerationStructureToMemoryInfoKHR - { - using NativeType = VkCopyAccelerationStructureToMemoryInfoKHR; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT( VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated, + VULKAN_HPP_NAMESPACE::Bool32 clampResults_ = {} ) VULKAN_HPP_NOEXCEPT + : advancedBlendOp{ advancedBlendOp_ } + , srcPremultiplied{ srcPremultiplied_ } + , dstPremultiplied{ dstPremultiplied_ } + , blendOverlap{ blendOverlap_ } + , clampResults{ clampResults_ } + { + } - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; + VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , src( src_ ) - , dst( dst_ ) - , mode( mode_ ) + ColorBlendAdvancedEXT( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ColorBlendAdvancedEXT( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ColorBlendAdvancedEXT & operator=( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : CopyAccelerationStructureToMemoryInfoKHR( *reinterpret_cast( &rhs ) ) + ColorBlendAdvancedEXT & operator=( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - CopyAccelerationStructureToMemoryInfoKHR & operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CopyAccelerationStructureToMemoryInfoKHR & operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setAdvancedBlendOp( VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + advancedBlendOp = advancedBlendOp_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + srcPremultiplied = srcPremultiplied_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT { - src = src_; + dstPremultiplied = dstPremultiplied_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT { - dst = dst_; + blendOverlap = blendOverlap_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & - setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setClampResults( VULKAN_HPP_NAMESPACE::Bool32 clampResults_ ) VULKAN_HPP_NOEXCEPT { - mode = mode_; + clampResults = clampResults_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCopyAccelerationStructureToMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkColorBlendAdvancedEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkColorBlendAdvancedEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, src, dst, mode ); + return std::tie( advancedBlendOp, srcPremultiplied, dstPremultiplied, blendOverlap, clampResults ); } #endif - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {}; - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; - }; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ColorBlendAdvancedEXT const & ) const = default; +#else + bool operator==( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( advancedBlendOp == rhs.advancedBlendOp ) && ( srcPremultiplied == rhs.srcPremultiplied ) && ( dstPremultiplied == rhs.dstPremultiplied ) && + ( blendOverlap == rhs.blendOverlap ) && ( clampResults == rhs.clampResults ); +# endif + } - template <> - struct CppType - { - using Type = CopyAccelerationStructureToMemoryInfoKHR; + bool operator!=( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::BlendOp advancedBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {}; + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {}; + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated; + VULKAN_HPP_NAMESPACE::Bool32 clampResults = {}; }; - struct CopyBufferInfo2 + struct ColorBlendEquationEXT { - using NativeType = VkCopyBufferInfo2; + using NativeType = VkColorBlendEquationEXT; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CopyBufferInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, - uint32_t regionCount_ = {}, - const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcBuffer( srcBuffer_ ) - , dstBuffer( dstBuffer_ ) - , regionCount( regionCount_ ) - , pRegions( pRegions_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd ) VULKAN_HPP_NOEXCEPT + : srcColorBlendFactor{ srcColorBlendFactor_ } + , dstColorBlendFactor{ dstColorBlendFactor_ } + , colorBlendOp{ colorBlendOp_ } + , srcAlphaBlendFactor{ srcAlphaBlendFactor_ } + , dstAlphaBlendFactor{ dstAlphaBlendFactor_ } + , alphaBlendOp{ alphaBlendOp_ } { } - VULKAN_HPP_CONSTEXPR CopyBufferInfo2( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - CopyBufferInfo2( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyBufferInfo2( *reinterpret_cast( &rhs ) ) {} + VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CopyBufferInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + ColorBlendEquationEXT( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ColorBlendEquationEXT( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CopyBufferInfo2 & operator=( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ColorBlendEquationEXT & operator=( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CopyBufferInfo2 & operator=( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + ColorBlendEquationEXT & operator=( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + srcColorBlendFactor = srcColorBlendFactor_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT { - srcBuffer = srcBuffer_; + dstColorBlendFactor = dstColorBlendFactor_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT { - dstBuffer = dstBuffer_; + colorBlendOp = colorBlendOp_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT { - regionCount = regionCount_; + srcAlphaBlendFactor = srcAlphaBlendFactor_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT { - pRegions = pRegions_; + dstAlphaBlendFactor = dstAlphaBlendFactor_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CopyBufferInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT { - regionCount = static_cast( regions_.size() ); - pRegions = regions_.data(); + alphaBlendOp = alphaBlendOp_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCopyBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT + operator VkColorBlendEquationEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCopyBufferInfo2 &() VULKAN_HPP_NOEXCEPT + operator VkColorBlendEquationEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, srcBuffer, dstBuffer, regionCount, pRegions ); + return std::tie( srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CopyBufferInfo2 const & ) const = default; + auto operator<=>( ColorBlendEquationEXT const & ) const = default; #else - bool operator==( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstBuffer == rhs.dstBuffer ) && - ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); + return ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && ( colorBlendOp == rhs.colorBlendOp ) && + ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) && ( alphaBlendOp == rhs.alphaBlendOp ); # endif } - bool operator!=( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferInfo2; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; - VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; - uint32_t regionCount = {}; - const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions = {}; - }; - - template <> - struct CppType - { - using Type = CopyBufferInfo2; + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; }; - using CopyBufferInfo2KHR = CopyBufferInfo2; - struct CopyBufferToImageInfo2 + struct CommandBufferAllocateInfo { - using NativeType = VkCopyBufferToImageInfo2; + using NativeType = VkCommandBufferAllocateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, - VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - uint32_t regionCount_ = {}, - const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcBuffer( srcBuffer_ ) - , dstImage( dstImage_ ) - , dstImageLayout( dstImageLayout_ ) - , regionCount( regionCount_ ) - , pRegions( pRegions_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, + VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, + uint32_t commandBufferCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , commandPool{ commandPool_ } + , level{ level_ } + , commandBufferCount{ commandBufferCount_ } { } - VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - CopyBufferToImageInfo2( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - : CopyBufferToImageInfo2( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CopyBufferToImageInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, - VULKAN_HPP_NAMESPACE::Image dstImage_, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , srcBuffer( srcBuffer_ ) - , dstImage( dstImage_ ) - , dstImageLayout( dstImageLayout_ ) - , regionCount( static_cast( regions_.size() ) ) - , pRegions( regions_.data() ) + CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferAllocateInfo( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CopyBufferToImageInfo2 & operator=( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CopyBufferToImageInfo2 & operator=( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT - { - srcBuffer = srcBuffer_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT - { - dstImage = dstImage_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT - { - dstImageLayout = dstImageLayout_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT { - regionCount = regionCount_; + commandPool = commandPool_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT { - pRegions = pRegions_; + level = level_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CopyBufferToImageInfo2 & - setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT { - regionCount = static_cast( regions_.size() ); - pRegions = regions_.data(); + commandBufferCount = commandBufferCount_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCopyBufferToImageInfo2 const &() const VULKAN_HPP_NOEXCEPT + operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCopyBufferToImageInfo2 &() VULKAN_HPP_NOEXCEPT + operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -15210,392 +17680,477 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::CommandPool const &, + VULKAN_HPP_NAMESPACE::CommandBufferLevel const &, + uint32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); + return std::tie( sType, pNext, commandPool, level, commandBufferCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CopyBufferToImageInfo2 const & ) const = default; + auto operator<=>( CommandBufferAllocateInfo const & ) const = default; #else - bool operator==( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstImage == rhs.dstImage ) && - ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandPool == rhs.commandPool ) && ( level == rhs.level ) && + ( commandBufferCount == rhs.commandBufferCount ); # endif } - bool operator!=( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferToImageInfo2; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; - VULKAN_HPP_NAMESPACE::Image dstImage = {}; - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - uint32_t regionCount = {}; - const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandPool commandPool = {}; + VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary; + uint32_t commandBufferCount = {}; }; template <> - struct CppType + struct CppType { - using Type = CopyBufferToImageInfo2; + using Type = CommandBufferAllocateInfo; }; - using CopyBufferToImageInfo2KHR = CopyBufferToImageInfo2; - struct CopyCommandTransformInfoQCOM + struct CommandBufferInheritanceInfo { - using NativeType = VkCopyCommandTransformInfoQCOM; + using NativeType = VkCommandBufferInheritanceInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - CopyCommandTransformInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , transform( transform_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, + VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , renderPass{ renderPass_ } + , subpass{ subpass_ } + , framebuffer{ framebuffer_ } + , occlusionQueryEnable{ occlusionQueryEnable_ } + , queryFlags{ queryFlags_ } + , pipelineStatistics{ pipelineStatistics_ } { } - VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT - : CopyCommandTransformInfoQCOM( *reinterpret_cast( &rhs ) ) + CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CopyCommandTransformInfoQCOM & operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT { - transform = transform_; + renderPass = renderPass_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkCopyCommandTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + subpass = subpass_; + return *this; } - operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + framebuffer = framebuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT + { + occlusionQueryEnable = occlusionQueryEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT + { + queryFlags = queryFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & + setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatistics = pipelineStatistics_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, transform ); + return std::tie( sType, pNext, renderPass, subpass, framebuffer, occlusionQueryEnable, queryFlags, pipelineStatistics ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CopyCommandTransformInfoQCOM const & ) const = default; + auto operator<=>( CommandBufferInheritanceInfo const & ) const = default; #else - bool operator==( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ) && + ( framebuffer == rhs.framebuffer ) && ( occlusionQueryEnable == rhs.occlusionQueryEnable ) && ( queryFlags == rhs.queryFlags ) && + ( pipelineStatistics == rhs.pipelineStatistics ); # endif } - bool operator!=( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {}; + VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {}; + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; }; template <> - struct CppType + struct CppType { - using Type = CopyCommandTransformInfoQCOM; + using Type = CommandBufferInheritanceInfo; }; - struct CopyDescriptorSet + struct CommandBufferBeginInfo { - using NativeType = VkCopyDescriptorSet; + using NativeType = VkCommandBufferBeginInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, - uint32_t srcBinding_ = {}, - uint32_t srcArrayElement_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, - uint32_t dstBinding_ = {}, - uint32_t dstArrayElement_ = {}, - uint32_t descriptorCount_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcSet( srcSet_ ) - , srcBinding( srcBinding_ ) - , srcArrayElement( srcArrayElement_ ) - , dstSet( dstSet_ ) - , dstBinding( dstBinding_ ) - , dstArrayElement( dstArrayElement_ ) - , descriptorCount( descriptorCount_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pInheritanceInfo{ pInheritanceInfo_ } { } - VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT : CopyDescriptorSet( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferBeginInfo( *reinterpret_cast( &rhs ) ) + { + } - CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT { - srcSet = srcSet_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & + setPInheritanceInfo( const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT { - srcBinding = srcBinding_; + pInheritanceInfo = pInheritanceInfo_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT + operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT { - srcArrayElement = srcArrayElement_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT + operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT { - dstSet = dstSet_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - dstBinding = dstBinding_; + return std::tie( sType, pNext, flags, pInheritanceInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferBeginInfo const & ) const = default; +#else + bool operator==( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pInheritanceInfo == rhs.pInheritanceInfo ); +# endif + } + + bool operator!=( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {}; + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo = {}; + }; + + template <> + struct CppType + { + using Type = CommandBufferBeginInfo; + }; + + struct CommandBufferInheritanceConditionalRenderingInfoEXT + { + using NativeType = VkCommandBufferInheritanceConditionalRenderingInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , conditionalRenderingEnable{ conditionalRenderingEnable_ } + { + } + + VULKAN_HPP_CONSTEXPR + CommandBufferInheritanceConditionalRenderingInfoEXT( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceConditionalRenderingInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + CommandBufferInheritanceConditionalRenderingInfoEXT & + operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - dstArrayElement = dstArrayElement_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & + setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT { - descriptorCount = descriptorCount_; + conditionalRenderingEnable = conditionalRenderingEnable_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT + operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT + operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, srcSet, srcBinding, srcArrayElement, dstSet, dstBinding, dstArrayElement, descriptorCount ); + return std::tie( sType, pNext, conditionalRenderingEnable ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CopyDescriptorSet const & ) const = default; + auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const & ) const = default; #else - bool operator==( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSet == rhs.srcSet ) && ( srcBinding == rhs.srcBinding ) && - ( srcArrayElement == rhs.srcArrayElement ) && ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) && - ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable ); # endif } - bool operator!=( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {}; - uint32_t srcBinding = {}; - uint32_t srcArrayElement = {}; - VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; - uint32_t dstBinding = {}; - uint32_t dstArrayElement = {}; - uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {}; }; template <> - struct CppType + struct CppType { - using Type = CopyDescriptorSet; + using Type = CommandBufferInheritanceConditionalRenderingInfoEXT; }; - struct ImageCopy2 + struct CommandBufferInheritanceRenderPassTransformInfoQCOM { - using NativeType = VkImageCopy2; + using NativeType = VkCommandBufferInheritanceRenderPassTransformInfoQCOM; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageCopy2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcSubresource( srcSubresource_ ) - , srcOffset( srcOffset_ ) - , dstSubresource( dstSubresource_ ) - , dstOffset( dstOffset_ ) - , extent( extent_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , transform{ transform_ } + , renderArea{ renderArea_ } { } - VULKAN_HPP_CONSTEXPR ImageCopy2( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ImageCopy2( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy2( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ImageCopy2 & operator=( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ImageCopy2 & operator=( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR + CommandBufferInheritanceRenderPassTransformInfoQCOM( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceRenderPassTransformInfoQCOM( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceRenderPassTransformInfoQCOM( *reinterpret_cast( &rhs ) ) { - pNext = pNext_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT - { - srcSubresource = srcSubresource_; - return *this; - } + CommandBufferInheritanceRenderPassTransformInfoQCOM & + operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - srcOffset = srcOffset_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - dstSubresource = dstSubresource_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & + setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT { - dstOffset = dstOffset_; + transform = transform_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & + setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT { - extent = extent_; + renderArea = renderArea_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImageCopy2 const &() const VULKAN_HPP_NOEXCEPT + operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageCopy2 &() VULKAN_HPP_NOEXCEPT + operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -15603,172 +18158,183 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + void * const &, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, + VULKAN_HPP_NAMESPACE::Rect2D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); + return std::tie( sType, pNext, transform, renderArea ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageCopy2 const & ) const = default; + auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const & ) const = default; #else - bool operator==( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && - ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ) && ( renderArea == rhs.renderArea ); # endif } - bool operator!=( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; - VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageCopy2; + using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM; }; - using ImageCopy2KHR = ImageCopy2; - struct CopyImageInfo2 + struct CommandBufferInheritanceRenderingInfo { - using NativeType = VkCopyImageInfo2; + using NativeType = VkCommandBufferInheritanceRenderingInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderingInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CopyImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - uint32_t regionCount_ = {}, - const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcImage( srcImage_ ) - , srcImageLayout( srcImageLayout_ ) - , dstImage( dstImage_ ) - , dstImageLayout( dstImageLayout_ ) - , regionCount( regionCount_ ) - , pRegions( pRegions_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CommandBufferInheritanceRenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, + uint32_t viewMask_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , viewMask{ viewMask_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentFormats{ pColorAttachmentFormats_ } + , depthAttachmentFormat{ depthAttachmentFormat_ } + , stencilAttachmentFormat{ stencilAttachmentFormat_ } + , rasterizationSamples{ rasterizationSamples_ } { } - VULKAN_HPP_CONSTEXPR CopyImageInfo2( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CopyImageInfo2( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyImageInfo2( *reinterpret_cast( &rhs ) ) {} + CommandBufferInheritanceRenderingInfo( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceRenderingInfo( *reinterpret_cast( &rhs ) ) + { + } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CopyImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, - VULKAN_HPP_NAMESPACE::Image dstImage_, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, - const void * pNext_ = nullptr ) + CommandBufferInheritanceRenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_, + uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , srcImage( srcImage_ ) - , srcImageLayout( srcImageLayout_ ) - , dstImage( dstImage_ ) - , dstImageLayout( dstImageLayout_ ) - , regionCount( static_cast( regions_.size() ) ) - , pRegions( regions_.data() ) + , flags( flags_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( static_cast( colorAttachmentFormats_.size() ) ) + , pColorAttachmentFormats( colorAttachmentFormats_.data() ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) + , rasterizationSamples( rasterizationSamples_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CopyImageInfo2 & operator=( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CommandBufferInheritanceRenderingInfo & operator=( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CopyImageInfo2 & operator=( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceRenderingInfo & operator=( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT { - srcImage = srcImage_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT { - srcImageLayout = srcImageLayout_; + viewMask = viewMask_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT { - dstImage = dstImage_; + colorAttachmentCount = colorAttachmentCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT { - dstImageLayout = dstImageLayout_; + pColorAttachmentFormats = pColorAttachmentFormats_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CommandBufferInheritanceRenderingInfo & setColorAttachmentFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT { - regionCount = regionCount_; + colorAttachmentCount = static_cast( colorAttachmentFormats_.size() ); + pColorAttachmentFormats = colorAttachmentFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + depthAttachmentFormat = depthAttachmentFormat_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT { - pRegions = pRegions_; + stencilAttachmentFormat = stencilAttachmentFormat_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CopyImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & + setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT { - regionCount = static_cast( regions_.size() ); - pRegions = regions_.data(); + rasterizationSamples = rasterizationSamples_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCopyImageInfo2 const &() const VULKAN_HPP_NOEXCEPT + operator VkCommandBufferInheritanceRenderingInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCopyImageInfo2 &() VULKAN_HPP_NOEXCEPT + operator VkCommandBufferInheritanceRenderingInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -15777,295 +18343,251 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple -# endif + uint32_t const &, + const VULKAN_HPP_NAMESPACE::Format * const &, + VULKAN_HPP_NAMESPACE::Format const &, + VULKAN_HPP_NAMESPACE::Format const &, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + return std::tie( + sType, pNext, flags, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat, rasterizationSamples ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CopyImageInfo2 const & ) const = default; + auto operator<=>( CommandBufferInheritanceRenderingInfo const & ) const = default; #else - bool operator==( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && - ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewMask == rhs.viewMask ) && + ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && + ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ) && + ( rasterizationSamples == rhs.rasterizationSamples ); # endif } - bool operator!=( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Image srcImage = {}; - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::Image dstImage = {}; - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - uint32_t regionCount = {}; - const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderingInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderingFlags flags = {}; + uint32_t viewMask = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {}; + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; }; template <> - struct CppType + struct CppType { - using Type = CopyImageInfo2; + using Type = CommandBufferInheritanceRenderingInfo; }; - using CopyImageInfo2KHR = CopyImageInfo2; - - struct CopyImageToBufferInfo2 - { - using NativeType = VkCopyImageToBufferInfo2; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, - uint32_t regionCount_ = {}, - const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcImage( srcImage_ ) - , srcImageLayout( srcImageLayout_ ) - , dstBuffer( dstBuffer_ ) - , regionCount( regionCount_ ) - , pRegions( pRegions_ ) - { - } + using CommandBufferInheritanceRenderingInfoKHR = CommandBufferInheritanceRenderingInfo; - VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + struct Viewport + { + using NativeType = VkViewport; - CopyImageToBufferInfo2( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - : CopyImageToBufferInfo2( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + Viewport( float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT + : x{ x_ } + , y{ y_ } + , width{ width_ } + , height{ height_ } + , minDepth{ minDepth_ } + , maxDepth{ maxDepth_ } { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CopyImageToBufferInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , srcImage( srcImage_ ) - , srcImageLayout( srcImageLayout_ ) - , dstBuffer( dstBuffer_ ) - , regionCount( static_cast( regions_.size() ) ) - , pRegions( regions_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CopyImageToBufferInfo2 & operator=( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT : Viewport( *reinterpret_cast( &rhs ) ) {} - CopyImageToBufferInfo2 & operator=( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT { - srcImage = srcImage_; + x = x_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT { - srcImageLayout = srcImageLayout_; + y = y_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT { - dstBuffer = dstBuffer_; + width = width_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT { - regionCount = regionCount_; + height = height_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT { - pRegions = pRegions_; + minDepth = minDepth_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CopyImageToBufferInfo2 & - setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT { - regionCount = static_cast( regions_.size() ); - pRegions = regions_.data(); + maxDepth = maxDepth_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCopyImageToBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT + operator VkViewport const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCopyImageToBufferInfo2 &() VULKAN_HPP_NOEXCEPT + operator VkViewport &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); + return std::tie( x, y, width, height, minDepth, maxDepth ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CopyImageToBufferInfo2 const & ) const = default; + auto operator<=>( Viewport const & ) const = default; #else - bool operator==( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && - ( dstBuffer == rhs.dstBuffer ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); + return ( x == rhs.x ) && ( y == rhs.y ) && ( width == rhs.width ) && ( height == rhs.height ) && ( minDepth == rhs.minDepth ) && + ( maxDepth == rhs.maxDepth ); # endif } - bool operator!=( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Image srcImage = {}; - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; - uint32_t regionCount = {}; - const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {}; - }; - - template <> - struct CppType - { - using Type = CopyImageToBufferInfo2; + float x = {}; + float y = {}; + float width = {}; + float height = {}; + float minDepth = {}; + float maxDepth = {}; }; - using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2; - struct CopyMemoryToAccelerationStructureInfoKHR + struct CommandBufferInheritanceViewportScissorInfoNV { - using NativeType = VkCopyMemoryToAccelerationStructureInfoKHR; + using NativeType = VkCommandBufferInheritanceViewportScissorInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , src( src_ ) - , dst( dst_ ) - , mode( mode_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ = {}, + uint32_t viewportDepthCount_ = {}, + const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , viewportScissor2D{ viewportScissor2D_ } + , viewportDepthCount{ viewportDepthCount_ } + , pViewportDepths{ pViewportDepths_ } { } - VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + CommandBufferInheritanceViewportScissorInfoNV( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : CopyMemoryToAccelerationStructureInfoKHR( *reinterpret_cast( &rhs ) ) + CommandBufferInheritanceViewportScissorInfoNV( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferInheritanceViewportScissorInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CommandBufferInheritanceViewportScissorInfoNV & operator=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CopyMemoryToAccelerationStructureInfoKHR & operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferInheritanceViewportScissorInfoNV & operator=( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & - setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & + setViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D_ ) VULKAN_HPP_NOEXCEPT { - src = src_; + viewportScissor2D = viewportScissor2D_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setViewportDepthCount( uint32_t viewportDepthCount_ ) VULKAN_HPP_NOEXCEPT { - dst = dst_; + viewportDepthCount = viewportDepthCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & - setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & + setPViewportDepths( const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ ) VULKAN_HPP_NOEXCEPT { - mode = mode_; + pViewportDepths = pViewportDepths_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCopyMemoryToAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkCommandBufferInheritanceViewportScissorInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkCommandBufferInheritanceViewportScissorInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -16074,454 +18596,428 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::Viewport * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, src, dst, mode ); + return std::tie( sType, pNext, viewportScissor2D, viewportDepthCount, pViewportDepths ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandBufferInheritanceViewportScissorInfoNV const & ) const = default; +#else + bool operator==( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportScissor2D == rhs.viewportScissor2D ) && + ( viewportDepthCount == rhs.viewportDepthCount ) && ( pViewportDepths == rhs.pViewportDepths ); +# endif + } + + bool operator!=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; - VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 viewportScissor2D = {}; + uint32_t viewportDepthCount = {}; + const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths = {}; }; template <> - struct CppType + struct CppType { - using Type = CopyMemoryToAccelerationStructureInfoKHR; + using Type = CommandBufferInheritanceViewportScissorInfoNV; }; - struct CuFunctionCreateInfoNVX + struct CommandBufferSubmitInfo { - using NativeType = VkCuFunctionCreateInfoNVX; + using NativeType = VkCommandBufferSubmitInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuFunctionCreateInfoNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferSubmitInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - CuFunctionCreateInfoNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , module( module_ ) - , pName( pName_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ = {}, + uint32_t deviceMask_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , commandBuffer{ commandBuffer_ } + , deviceMask{ deviceMask_ } { } - VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CuFunctionCreateInfoNVX( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - : CuFunctionCreateInfoNVX( *reinterpret_cast( &rhs ) ) + CommandBufferSubmitInfo( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandBufferSubmitInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CuFunctionCreateInfoNVX & operator=( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CommandBufferSubmitInfo & operator=( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CuFunctionCreateInfoNVX & operator=( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + CommandBufferSubmitInfo & operator=( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setModule( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ ) VULKAN_HPP_NOEXCEPT { - module = module_; + commandBuffer = commandBuffer_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT { - pName = pName_; + deviceMask = deviceMask_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCuFunctionCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT + operator VkCommandBufferSubmitInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCuFunctionCreateInfoNVX &() VULKAN_HPP_NOEXCEPT + operator VkCommandBufferSubmitInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, module, pName ); + return std::tie( sType, pNext, commandBuffer, deviceMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) - return cmp; - if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) - return cmp; - if ( auto cmp = module <=> rhs.module; cmp != 0 ) - return cmp; - if ( pName != rhs.pName ) - if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) - return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - - return std::strong_ordering::equivalent; - } -#endif - - bool operator==( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + auto operator<=>( CommandBufferSubmitInfo const & ) const = default; +#else + bool operator==( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ); +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBuffer == rhs.commandBuffer ) && ( deviceMask == rhs.deviceMask ); +# endif } - bool operator!=( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuFunctionCreateInfoNVX; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::CuModuleNVX module = {}; - const char * pName = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferSubmitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer = {}; + uint32_t deviceMask = {}; }; template <> - struct CppType + struct CppType { - using Type = CuFunctionCreateInfoNVX; + using Type = CommandBufferSubmitInfo; }; - struct CuLaunchInfoNVX + using CommandBufferSubmitInfoKHR = CommandBufferSubmitInfo; + + struct CommandPoolCreateInfo { - using NativeType = VkCuLaunchInfoNVX; + using NativeType = VkCommandPoolCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuLaunchInfoNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ = {}, - uint32_t gridDimX_ = {}, - uint32_t gridDimY_ = {}, - uint32_t gridDimZ_ = {}, - uint32_t blockDimX_ = {}, - uint32_t blockDimY_ = {}, - uint32_t blockDimZ_ = {}, - uint32_t sharedMemBytes_ = {}, - size_t paramCount_ = {}, - const void * const * pParams_ = {}, - size_t extraCount_ = {}, - const void * const * pExtras_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , function( function_ ) - , gridDimX( gridDimX_ ) - , gridDimY( gridDimY_ ) - , gridDimZ( gridDimZ_ ) - , blockDimX( blockDimX_ ) - , blockDimY( blockDimY_ ) - , blockDimZ( blockDimZ_ ) - , sharedMemBytes( sharedMemBytes_ ) - , paramCount( paramCount_ ) - , pParams( pParams_ ) - , extraCount( extraCount_ ) - , pExtras( pExtras_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , queueFamilyIndex{ queueFamilyIndex_ } { } - VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - CuLaunchInfoNVX( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT : CuLaunchInfoNVX( *reinterpret_cast( &rhs ) ) {} + VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CuLaunchInfoNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_, - uint32_t gridDimX_, - uint32_t gridDimY_, - uint32_t gridDimZ_, - uint32_t blockDimX_, - uint32_t blockDimY_, - uint32_t blockDimZ_, - uint32_t sharedMemBytes_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & params_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & extras_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , function( function_ ) - , gridDimX( gridDimX_ ) - , gridDimY( gridDimY_ ) - , gridDimZ( gridDimZ_ ) - , blockDimX( blockDimX_ ) - , blockDimY( blockDimY_ ) - , blockDimZ( blockDimZ_ ) - , sharedMemBytes( sharedMemBytes_ ) - , paramCount( params_.size() ) - , pParams( params_.data() ) - , extraCount( extras_.size() ) - , pExtras( extras_.data() ) + CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CommandPoolCreateInfo( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CuLaunchInfoNVX & operator=( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CuLaunchInfoNVX & operator=( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setFunction( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - function = function_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT { - gridDimX = gridDimX_; + queueFamilyIndex = queueFamilyIndex_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT + operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - gridDimY = gridDimY_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT + operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT { - gridDimZ = gridDimZ_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - blockDimX = blockDimX_; - return *this; + return std::tie( sType, pNext, flags, queueFamilyIndex ); } +#endif - VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CommandPoolCreateInfo const & ) const = default; +#else + bool operator==( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - blockDimY = blockDimY_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ); +# endif } - VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - blockDimZ = blockDimZ_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + }; + + template <> + struct CppType + { + using Type = CommandPoolCreateInfo; + }; + + struct SpecializationMapEntry + { + using NativeType = VkSpecializationMapEntry; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT + : constantID{ constantID_ } + , offset{ offset_ } + , size{ size_ } { - sharedMemBytes = sharedMemBytes_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + : SpecializationMapEntry( *reinterpret_cast( &rhs ) ) { - paramCount = paramCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT + SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT { - pParams = pParams_; + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CuLaunchInfoNVX & setParams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & params_ ) VULKAN_HPP_NOEXCEPT - { - paramCount = params_.size(); - pParams = params_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT { - extraCount = extraCount_; + constantID = constantID_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT { - pExtras = pExtras_; + offset = offset_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CuLaunchInfoNVX & setExtras( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & extras_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT { - extraCount = extras_.size(); - pExtras = extras_.data(); + size = size_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCuLaunchInfoNVX const &() const VULKAN_HPP_NOEXCEPT + operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCuLaunchInfoNVX &() VULKAN_HPP_NOEXCEPT + operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( - sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras ); + return std::tie( constantID, offset, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CuLaunchInfoNVX const & ) const = default; + auto operator<=>( SpecializationMapEntry const & ) const = default; #else - bool operator==( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) && ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) && - ( gridDimZ == rhs.gridDimZ ) && ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) && - ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) && ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) && - ( pExtras == rhs.pExtras ); + return ( constantID == rhs.constantID ) && ( offset == rhs.offset ) && ( size == rhs.size ); # endif } - bool operator!=( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuLaunchInfoNVX; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::CuFunctionNVX function = {}; - uint32_t gridDimX = {}; - uint32_t gridDimY = {}; - uint32_t gridDimZ = {}; - uint32_t blockDimX = {}; - uint32_t blockDimY = {}; - uint32_t blockDimZ = {}; - uint32_t sharedMemBytes = {}; - size_t paramCount = {}; - const void * const * pParams = {}; - size_t extraCount = {}; - const void * const * pExtras = {}; - }; - - template <> - struct CppType - { - using Type = CuLaunchInfoNVX; + uint32_t constantID = {}; + uint32_t offset = {}; + size_t size = {}; }; - struct CuModuleCreateInfoNVX + struct SpecializationInfo { - using NativeType = VkCuModuleCreateInfoNVX; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleCreateInfoNVX; + using NativeType = VkSpecializationInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dataSize( dataSize_ ) - , pData( pData_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ = {}, + size_t dataSize_ = {}, + const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT + : mapEntryCount{ mapEntryCount_ } + , pMapEntries{ pMapEntries_ } + , dataSize{ dataSize_ } + , pData{ pData_ } { } - VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CuModuleCreateInfoNVX( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - : CuModuleCreateInfoNVX( *reinterpret_cast( &rhs ) ) - { - } + SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SpecializationInfo( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template - CuModuleCreateInfoNVX( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_, const void * pNext_ = nullptr ) - : pNext( pNext_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) + SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mapEntries_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ = {} ) + : mapEntryCount( static_cast( mapEntries_.size() ) ) + , pMapEntries( mapEntries_.data() ) + , dataSize( data_.size() * sizeof( T ) ) + , pData( data_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CuModuleCreateInfoNVX & operator=( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - CuModuleCreateInfoNVX & operator=( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + mapEntryCount = mapEntryCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ ) VULKAN_HPP_NOEXCEPT + { + pMapEntries = pMapEntries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SpecializationInfo & + setMapEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mapEntries_ ) VULKAN_HPP_NOEXCEPT + { + mapEntryCount = static_cast( mapEntries_.size() ); + pMapEntries = mapEntries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT { dataSize = dataSize_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT { pData = pData_; return *this; @@ -16529,419 +19025,428 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template - CuModuleCreateInfoNVX & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + SpecializationInfo & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT { dataSize = data_.size() * sizeof( T ); pData = data_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkCuModuleCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT + operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCuModuleCreateInfoNVX &() VULKAN_HPP_NOEXCEPT + operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, dataSize, pData ); + return std::tie( mapEntryCount, pMapEntries, dataSize, pData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CuModuleCreateInfoNVX const & ) const = default; + auto operator<=>( SpecializationInfo const & ) const = default; #else - bool operator==( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); + return ( mapEntryCount == rhs.mapEntryCount ) && ( pMapEntries == rhs.pMapEntries ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); # endif } - bool operator!=( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuModuleCreateInfoNVX; - const void * pNext = {}; - size_t dataSize = {}; - const void * pData = {}; - }; - - template <> - struct CppType - { - using Type = CuModuleCreateInfoNVX; + uint32_t mapEntryCount = {}; + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries = {}; + size_t dataSize = {}; + const void * pData = {}; }; -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - struct D3D12FenceSubmitInfoKHR + struct PipelineShaderStageCreateInfo { - using NativeType = VkD3D12FenceSubmitInfoKHR; + using NativeType = VkPipelineShaderStageCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = {}, - const uint64_t * pWaitSemaphoreValues_ = {}, - uint32_t signalSemaphoreValuesCount_ = {}, - const uint64_t * pSignalSemaphoreValues_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ) - , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) - , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ) - , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, + VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, + const char * pName_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , stage{ stage_ } + , module{ module_ } + , pName{ pName_ } + , pSpecializationInfo{ pSpecializationInfo_ } { } - VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : D3D12FenceSubmitInfoKHR( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - D3D12FenceSubmitInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , waitSemaphoreValuesCount( static_cast( waitSemaphoreValues_.size() ) ) - , pWaitSemaphoreValues( waitSemaphoreValues_.data() ) - , signalSemaphoreValuesCount( static_cast( signalSemaphoreValues_.size() ) ) - , pSignalSemaphoreValues( signalSemaphoreValues_.data() ) + PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineShaderStageCreateInfo( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT - { - waitSemaphoreValuesCount = waitSemaphoreValuesCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - pWaitSemaphoreValues = pWaitSemaphoreValues_; + flags = flags_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - D3D12FenceSubmitInfoKHR & - setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT { - waitSemaphoreValuesCount = static_cast( waitSemaphoreValues_.size() ); - pWaitSemaphoreValues = waitSemaphoreValues_.data(); + stage = stage_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT { - signalSemaphoreValuesCount = signalSemaphoreValuesCount_; + module = module_; return *this; } - VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT { - pSignalSemaphoreValues = pSignalSemaphoreValues_; + pName = pName_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - D3D12FenceSubmitInfoKHR & - setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & + setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT { - signalSemaphoreValuesCount = static_cast( signalSemaphoreValues_.size() ); - pSignalSemaphoreValues = signalSemaphoreValues_.data(); + pSpecializationInfo = pSpecializationInfo_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags const &, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits const &, + VULKAN_HPP_NAMESPACE::ShaderModule const &, + const char * const &, + const VULKAN_HPP_NAMESPACE::SpecializationInfo * const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, waitSemaphoreValuesCount, pWaitSemaphoreValues, signalSemaphoreValuesCount, pSignalSemaphoreValues ); + return std::tie( sType, pNext, flags, stage, module, pName, pSpecializationInfo ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( D3D12FenceSubmitInfoKHR const & ) const = default; -# else - bool operator==( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) && - ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) && - ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = stage <=> rhs.stage; cmp != 0 ) + return cmp; + if ( auto cmp = module <=> rhs.module; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; } +#endif - bool operator!=( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( module == rhs.module ) && + ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( pSpecializationInfo == rhs.pSpecializationInfo ); + } + + bool operator!=( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR; - const void * pNext = {}; - uint32_t waitSemaphoreValuesCount = {}; - const uint64_t * pWaitSemaphoreValues = {}; - uint32_t signalSemaphoreValuesCount = {}; - const uint64_t * pSignalSemaphoreValues = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex; + VULKAN_HPP_NAMESPACE::ShaderModule module = {}; + const char * pName = {}; + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {}; }; template <> - struct CppType + struct CppType { - using Type = D3D12FenceSubmitInfoKHR; + using Type = PipelineShaderStageCreateInfo; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct DebugMarkerMarkerInfoEXT + struct ComputePipelineCreateInfo { - using NativeType = VkDebugMarkerMarkerInfoEXT; + using NativeType = VkComputePipelineCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 - DebugMarkerMarkerInfoEXT( const char * pMarkerName_ = {}, std::array const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pMarkerName( pMarkerName_ ) - , color( color_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , stage{ stage_ } + , layout{ layout_ } + , basePipelineHandle{ basePipelineHandle_ } + , basePipelineIndex{ basePipelineIndex_ } { } - VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : DebugMarkerMarkerInfoEXT( *reinterpret_cast( &rhs ) ) + ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ComputePipelineCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPMarkerName( const char * pMarkerName_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - pMarkerName = pMarkerName_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT { - color = color_; + stage = stage_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDebugMarkerMarkerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + layout = layout_; + return *this; } - operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + basePipelineHandle = basePipelineHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pMarkerName, color ); + return std::tie( sType, pNext, flags, stage, layout, basePipelineHandle, basePipelineIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::partial_ordering operator<=>( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) - return cmp; - if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) - return cmp; - if ( pMarkerName != rhs.pMarkerName ) - if ( auto cmp = strcmp( pMarkerName, rhs.pMarkerName ); cmp != 0 ) - return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; - if ( auto cmp = color <=> rhs.color; cmp != 0 ) - return cmp; - - return std::partial_ordering::equivalent; - } -#endif - - bool operator==( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + auto operator<=>( ComputePipelineCreateInfo const & ) const = default; +#else + bool operator==( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pMarkerName == rhs.pMarkerName ) || ( strcmp( pMarkerName, rhs.pMarkerName ) == 0 ) ) && - ( color == rhs.color ); +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( layout == rhs.layout ) && + ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); +# endif } - bool operator!=( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT; - const void * pNext = {}; - const char * pMarkerName = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; }; template <> - struct CppType + struct CppType { - using Type = DebugMarkerMarkerInfoEXT; + using Type = ComputePipelineCreateInfo; }; - struct DebugMarkerObjectNameInfoEXT + struct ComputePipelineIndirectBufferInfoNV { - using NativeType = VkDebugMarkerObjectNameInfoEXT; + using NativeType = VkComputePipelineIndirectBufferInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineIndirectBufferInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, - uint64_t object_ = {}, - const char * pObjectName_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , objectType( objectType_ ) - , object( object_ ) - , pObjectName( pObjectName_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ComputePipelineIndirectBufferInfoNV( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceAddress{ deviceAddress_ } + , size{ size_ } + , pipelineDeviceAddressCaptureReplay{ pipelineDeviceAddressCaptureReplay_ } { } - VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ComputePipelineIndirectBufferInfoNV( ComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : DebugMarkerObjectNameInfoEXT( *reinterpret_cast( &rhs ) ) + ComputePipelineIndirectBufferInfoNV( VkComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ComputePipelineIndirectBufferInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ComputePipelineIndirectBufferInfoNV & operator=( ComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ComputePipelineIndirectBufferInfoNV & operator=( VkComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT { - objectType = objectType_; + deviceAddress = deviceAddress_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT { - object = object_; + size = size_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & + setPipelineDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT { - pObjectName = pObjectName_; + pipelineDeviceAddressCaptureReplay = pipelineDeviceAddressCaptureReplay_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDebugMarkerObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkComputePipelineIndirectBufferInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkComputePipelineIndirectBufferInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -16950,168 +19455,118 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::DeviceAddress const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, objectType, object, pObjectName ); + return std::tie( sType, pNext, deviceAddress, size, pipelineDeviceAddressCaptureReplay ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + auto operator<=>( ComputePipelineIndirectBufferInfoNV const & ) const = default; +#else + bool operator==( ComputePipelineIndirectBufferInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) - return cmp; - if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) - return cmp; - if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) - return cmp; - if ( auto cmp = object <=> rhs.object; cmp != 0 ) - return cmp; - if ( pObjectName != rhs.pObjectName ) - if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 ) - return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - - return std::strong_ordering::equivalent; - } -#endif - - bool operator==( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( object == rhs.object ) && - ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) ); +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && ( size == rhs.size ) && + ( pipelineDeviceAddressCaptureReplay == rhs.pipelineDeviceAddressCaptureReplay ); +# endif } - bool operator!=( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ComputePipelineIndirectBufferInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - uint64_t object = {}; - const char * pObjectName = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineIndirectBufferInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay = {}; }; template <> - struct CppType + struct CppType { - using Type = DebugMarkerObjectNameInfoEXT; + using Type = ComputePipelineIndirectBufferInfoNV; }; - struct DebugMarkerObjectTagInfoEXT + struct ConditionalRenderingBeginInfoEXT { - using NativeType = VkDebugMarkerObjectTagInfoEXT; + using NativeType = VkConditionalRenderingBeginInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, - uint64_t object_ = {}, - uint64_t tagName_ = {}, - size_t tagSize_ = {}, - const void * pTag_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , objectType( objectType_ ) - , object( object_ ) - , tagName( tagName_ ) - , tagSize( tagSize_ ) - , pTag( pTag_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , buffer{ buffer_ } + , offset{ offset_ } + , flags{ flags_ } { } - VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : DebugMarkerObjectTagInfoEXT( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - template - DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, - uint64_t object_, - uint64_t tagName_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof( T ) ), pTag( tag_.data() ) + ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ConditionalRenderingBeginInfoEXT( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT - { - objectType = objectType_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT - { - object = object_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT - { - tagName = tagName_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT { - tagSize = tagSize_; + buffer = buffer_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT { - pTag = pTag_; + offset = offset_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - template - DebugMarkerObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { - tagSize = tag_.size() * sizeof( T ); - pTag = tag_.data(); + flags = flags_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDebugMarkerObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkConditionalRenderingBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -17120,592 +19575,659 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::Buffer const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, objectType, object, tagName, tagSize, pTag ); + return std::tie( sType, pNext, buffer, offset, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DebugMarkerObjectTagInfoEXT const & ) const = default; + auto operator<=>( ConditionalRenderingBeginInfoEXT const & ) const = default; #else - bool operator==( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( object == rhs.object ) && ( tagName == rhs.tagName ) && - ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( flags == rhs.flags ); # endif } - bool operator!=( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - uint64_t object = {}; - uint64_t tagName = {}; - size_t tagSize = {}; - const void * pTag = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {}; }; template <> - struct CppType + struct CppType { - using Type = DebugMarkerObjectTagInfoEXT; + using Type = ConditionalRenderingBeginInfoEXT; }; - struct DebugReportCallbackCreateInfoEXT + struct ConformanceVersion { - using NativeType = VkDebugReportCallbackCreateInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT; + using NativeType = VkConformanceVersion; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, - PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, - void * pUserData_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pfnCallback( pfnCallback_ ) - , pUserData( pUserData_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT + : major{ major_ } + , minor{ minor_ } + , subminor{ subminor_ } + , patch{ patch_ } { } - VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : DebugReportCallbackCreateInfoEXT( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT : ConformanceVersion( *reinterpret_cast( &rhs ) ) {} - DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + major = major_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + minor = minor_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT { - pfnCallback = pfnCallback_; + subminor = subminor_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT { - pUserData = pUserData_; + patch = patch_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, pfnCallback, pUserData ); + return std::tie( major, minor, subminor, patch ); } #endif - bool operator==( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ConformanceVersion const & ) const = default; +#else + bool operator==( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT { -#if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -#else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnCallback == rhs.pfnCallback ) && ( pUserData == rhs.pUserData ); -#endif +# else + return ( major == rhs.major ) && ( minor == rhs.minor ) && ( subminor == rhs.subminor ) && ( patch == rhs.patch ); +# endif } - bool operator!=( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {}; - PFN_vkDebugReportCallbackEXT pfnCallback = {}; - void * pUserData = {}; + uint8_t major = {}; + uint8_t minor = {}; + uint8_t subminor = {}; + uint8_t patch = {}; }; - template <> - struct CppType - { - using Type = DebugReportCallbackCreateInfoEXT; - }; + using ConformanceVersionKHR = ConformanceVersion; - struct DebugUtilsLabelEXT + struct ConvertCooperativeVectorMatrixInfoNV { - using NativeType = VkDebugUtilsLabelEXT; + using NativeType = VkConvertCooperativeVectorMatrixInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConvertCooperativeVectorMatrixInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 - DebugUtilsLabelEXT( const char * pLabelName_ = {}, std::array const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pLabelName( pLabelName_ ) - , color( color_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV( + size_t srcSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR srcData_ = {}, + size_t * pDstSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dstData_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR srcComponentType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR dstComponentType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + uint32_t numRows_ = {}, + uint32_t numColumns_ = {}, + VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV srcLayout_ = VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV::eRowMajor, + size_t srcStride_ = {}, + VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV dstLayout_ = VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV::eRowMajor, + size_t dstStride_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcSize{ srcSize_ } + , srcData{ srcData_ } + , pDstSize{ pDstSize_ } + , dstData{ dstData_ } + , srcComponentType{ srcComponentType_ } + , dstComponentType{ dstComponentType_ } + , numRows{ numRows_ } + , numColumns{ numColumns_ } + , srcLayout{ srcLayout_ } + , srcStride{ srcStride_ } + , dstLayout{ dstLayout_ } + , dstStride{ dstStride_ } { } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV( ConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DebugUtilsLabelEXT( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + ConvertCooperativeVectorMatrixInfoNV( VkConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ConvertCooperativeVectorMatrixInfoNV( *reinterpret_cast( &rhs ) ) + { + } - DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ConvertCooperativeVectorMatrixInfoNV & operator=( ConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ConvertCooperativeVectorMatrixInfoNV & operator=( VkConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPLabelName( const char * pLabelName_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcSize( size_t srcSize_ ) VULKAN_HPP_NOEXCEPT { - pLabelName = pLabelName_; + srcSize = srcSize_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & + setSrcData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & srcData_ ) VULKAN_HPP_NOEXCEPT { - color = color_; + srcData = srcData_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setPDstSize( size_t * pDstSize_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pDstSize = pDstSize_; + return *this; } - operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & + setDstData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dstData_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dstData = dstData_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple const &> -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & + setSrcComponentType( VULKAN_HPP_NAMESPACE::ComponentTypeKHR srcComponentType_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pLabelName, color ); + srcComponentType = srcComponentType_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::partial_ordering operator<=>( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & + setDstComponentType( VULKAN_HPP_NAMESPACE::ComponentTypeKHR dstComponentType_ ) VULKAN_HPP_NOEXCEPT { - if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) - return cmp; - if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) - return cmp; - if ( pLabelName != rhs.pLabelName ) - if ( auto cmp = strcmp( pLabelName, rhs.pLabelName ); cmp != 0 ) - return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; - if ( auto cmp = color <=> rhs.color; cmp != 0 ) - return cmp; - - return std::partial_ordering::equivalent; + dstComponentType = dstComponentType_; + return *this; } -#endif - bool operator==( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setNumRows( uint32_t numRows_ ) VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pLabelName == rhs.pLabelName ) || ( strcmp( pLabelName, rhs.pLabelName ) == 0 ) ) && - ( color == rhs.color ); + numRows = numRows_; + return *this; } - bool operator!=( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setNumColumns( uint32_t numColumns_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + numColumns = numColumns_; + return *this; } - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT; - const void * pNext = {}; - const char * pLabelName = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; - }; - - template <> - struct CppType - { - using Type = DebugUtilsLabelEXT; - }; - - struct DebugUtilsObjectNameInfoEXT - { - using NativeType = VkDebugUtilsObjectNameInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT; + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & + setSrcLayout( VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV srcLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcLayout = srcLayout_; + return *this; + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, - uint64_t objectHandle_ = {}, - const char * pObjectName_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , objectType( objectType_ ) - , objectHandle( objectHandle_ ) - , pObjectName( pObjectName_ ) + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcStride( size_t srcStride_ ) VULKAN_HPP_NOEXCEPT { + srcStride = srcStride_; + return *this; } - VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & + setDstLayout( VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV dstLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstLayout = dstLayout_; + return *this; + } - DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : DebugUtilsObjectNameInfoEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setDstStride( size_t dstStride_ ) VULKAN_HPP_NOEXCEPT { + dstStride = dstStride_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + operator VkConvertCooperativeVectorMatrixInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } - DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkConvertCooperativeVectorMatrixInfoNV &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); - return *this; + return *reinterpret_cast( this ); } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return std::tie( sType, + pNext, + srcSize, + srcData, + pDstSize, + dstData, + srcComponentType, + dstComponentType, + numRows, + numColumns, + srcLayout, + srcStride, + dstLayout, + dstStride ); } +#endif - VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConvertCooperativeVectorMatrixInfoNV; + const void * pNext = {}; + size_t srcSize = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR srcData = {}; + size_t * pDstSize = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dstData = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR srcComponentType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR dstComponentType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + uint32_t numRows = {}; + uint32_t numColumns = {}; + VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV srcLayout = VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV::eRowMajor; + size_t srcStride = {}; + VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV dstLayout = VULKAN_HPP_NAMESPACE::CooperativeVectorMatrixLayoutNV::eRowMajor; + size_t dstStride = {}; + }; + + template <> + struct CppType + { + using Type = ConvertCooperativeVectorMatrixInfoNV; + }; + + struct CooperativeMatrixFlexibleDimensionsPropertiesNV + { + using NativeType = VkCooperativeMatrixFlexibleDimensionsPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CooperativeMatrixFlexibleDimensionsPropertiesNV( uint32_t MGranularity_ = {}, + uint32_t NGranularity_ = {}, + uint32_t KGranularity_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation_ = {}, + VULKAN_HPP_NAMESPACE::ScopeKHR scope_ = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice, + uint32_t workgroupInvocations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , MGranularity{ MGranularity_ } + , NGranularity{ NGranularity_ } + , KGranularity{ KGranularity_ } + , AType{ AType_ } + , BType{ BType_ } + , CType{ CType_ } + , ResultType{ ResultType_ } + , saturatingAccumulation{ saturatingAccumulation_ } + , scope{ scope_ } + , workgroupInvocations{ workgroupInvocations_ } { - objectType = objectType_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + CooperativeMatrixFlexibleDimensionsPropertiesNV( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CooperativeMatrixFlexibleDimensionsPropertiesNV( VkCooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CooperativeMatrixFlexibleDimensionsPropertiesNV( *reinterpret_cast( &rhs ) ) { - objectHandle = objectHandle_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT + CooperativeMatrixFlexibleDimensionsPropertiesNV & operator=( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CooperativeMatrixFlexibleDimensionsPropertiesNV & operator=( VkCooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - pObjectName = pObjectName_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std:: - tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, objectType, objectHandle, pObjectName ); + return std::tie( + sType, pNext, MGranularity, NGranularity, KGranularity, AType, BType, CType, ResultType, saturatingAccumulation, scope, workgroupInvocations ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) - return cmp; - if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) - return cmp; - if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) - return cmp; - if ( auto cmp = objectHandle <=> rhs.objectHandle; cmp != 0 ) - return cmp; - if ( pObjectName != rhs.pObjectName ) - if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 ) - return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - - return std::strong_ordering::equivalent; - } -#endif - - bool operator==( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + auto operator<=>( CooperativeMatrixFlexibleDimensionsPropertiesNV const & ) const = default; +#else + bool operator==( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && - ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) ); +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MGranularity == rhs.MGranularity ) && ( NGranularity == rhs.NGranularity ) && + ( KGranularity == rhs.KGranularity ) && ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && + ( ResultType == rhs.ResultType ) && ( saturatingAccumulation == rhs.saturatingAccumulation ) && ( scope == rhs.scope ) && + ( workgroupInvocations == rhs.workgroupInvocations ); +# endif } - bool operator!=( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; - uint64_t objectHandle = {}; - const char * pObjectName = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV; + void * pNext = {}; + uint32_t MGranularity = {}; + uint32_t NGranularity = {}; + uint32_t KGranularity = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation = {}; + VULKAN_HPP_NAMESPACE::ScopeKHR scope = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice; + uint32_t workgroupInvocations = {}; }; template <> - struct CppType + struct CppType { - using Type = DebugUtilsObjectNameInfoEXT; + using Type = CooperativeMatrixFlexibleDimensionsPropertiesNV; }; - struct DebugUtilsMessengerCallbackDataEXT + struct CooperativeMatrixPropertiesKHR { - using NativeType = VkDebugUtilsMessengerCallbackDataEXT; + using NativeType = VkCooperativeMatrixPropertiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, - const char * pMessageIdName_ = {}, - int32_t messageIdNumber_ = {}, - const char * pMessage_ = {}, - uint32_t queueLabelCount_ = {}, - const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ = {}, - uint32_t cmdBufLabelCount_ = {}, - const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ = {}, - uint32_t objectCount_ = {}, - const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pMessageIdName( pMessageIdName_ ) - , messageIdNumber( messageIdNumber_ ) - , pMessage( pMessage_ ) - , queueLabelCount( queueLabelCount_ ) - , pQueueLabels( pQueueLabels_ ) - , cmdBufLabelCount( cmdBufLabelCount_ ) - , pCmdBufLabels( pCmdBufLabels_ ) - , objectCount( objectCount_ ) - , pObjects( pObjects_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesKHR( uint32_t MSize_ = {}, + uint32_t NSize_ = {}, + uint32_t KSize_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation_ = {}, + VULKAN_HPP_NAMESPACE::ScopeKHR scope_ = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , MSize{ MSize_ } + , NSize{ NSize_ } + , KSize{ KSize_ } + , AType{ AType_ } + , BType{ BType_ } + , CType{ CType_ } + , ResultType{ ResultType_ } + , saturatingAccumulation{ saturatingAccumulation_ } + , scope{ scope_ } { } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DebugUtilsMessengerCallbackDataEXT( - VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_, - const char * pMessageIdName_, - int32_t messageIdNumber_, - const char * pMessage_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueLabels_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & cmdBufLabels_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & objects_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , pMessageIdName( pMessageIdName_ ) - , messageIdNumber( messageIdNumber_ ) - , pMessage( pMessage_ ) - , queueLabelCount( static_cast( queueLabels_.size() ) ) - , pQueueLabels( queueLabels_.data() ) - , cmdBufLabelCount( static_cast( cmdBufLabels_.size() ) ) - , pCmdBufLabels( cmdBufLabels_.data() ) - , objectCount( static_cast( objects_.size() ) ) - , pObjects( objects_.data() ) + CooperativeMatrixPropertiesKHR( VkCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CooperativeMatrixPropertiesKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CooperativeMatrixPropertiesKHR & operator=( CooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DebugUtilsMessengerCallbackDataEXT & operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + CooperativeMatrixPropertiesKHR & operator=( VkCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + operator VkCooperativeMatrixPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & - setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + operator VkCooperativeMatrixPropertiesKHR &() VULKAN_HPP_NOEXCEPT { - flags = flags_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char * pMessageIdName_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - pMessageIdName = pMessageIdName_; - return *this; + return std::tie( sType, pNext, MSize, NSize, KSize, AType, BType, CType, ResultType, saturatingAccumulation, scope ); } +#endif - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CooperativeMatrixPropertiesKHR const & ) const = default; +#else + bool operator==( CooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - messageIdNumber = messageIdNumber_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) && ( KSize == rhs.KSize ) && + ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && ( ResultType == rhs.ResultType ) && + ( saturatingAccumulation == rhs.saturatingAccumulation ) && ( scope == rhs.scope ); +# endif } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessage( const char * pMessage_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( CooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - pMessage = pMessage_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT - { - queueLabelCount = queueLabelCount_; - return *this; - } + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesKHR; + void * pNext = {}; + uint32_t MSize = {}; + uint32_t NSize = {}; + uint32_t KSize = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation = {}; + VULKAN_HPP_NAMESPACE::ScopeKHR scope = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice; + }; - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & - setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ ) VULKAN_HPP_NOEXCEPT - { - pQueueLabels = pQueueLabels_; - return *this; - } + template <> + struct CppType + { + using Type = CooperativeMatrixPropertiesKHR; + }; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DebugUtilsMessengerCallbackDataEXT & - setQueueLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueLabels_ ) VULKAN_HPP_NOEXCEPT - { - queueLabelCount = static_cast( queueLabels_.size() ); - pQueueLabels = queueLabels_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + struct CooperativeMatrixPropertiesNV + { + using NativeType = VkCooperativeMatrixPropertiesNV; - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT - { - cmdBufLabelCount = cmdBufLabelCount_; - return *this; - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV; - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & - setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( uint32_t MSize_ = {}, + uint32_t NSize_ = {}, + uint32_t KSize_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = {}, + VULKAN_HPP_NAMESPACE::ScopeNV scope_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , MSize{ MSize_ } + , NSize{ NSize_ } + , KSize{ KSize_ } + , AType{ AType_ } + , BType{ BType_ } + , CType{ CType_ } + , DType{ DType_ } + , scope{ scope_ } { - pCmdBufLabels = pCmdBufLabels_; - return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DebugUtilsMessengerCallbackDataEXT & - setCmdBufLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT - { - cmdBufLabelCount = static_cast( cmdBufLabels_.size() ); - pCmdBufLabels = cmdBufLabels_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT + CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CooperativeMatrixPropertiesNV( *reinterpret_cast( &rhs ) ) { - objectCount = objectCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & - setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ ) VULKAN_HPP_NOEXCEPT - { - pObjects = pObjects_; - return *this; - } + CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DebugUtilsMessengerCallbackDataEXT & - setObjects( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & objects_ ) VULKAN_HPP_NOEXCEPT + CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - objectCount = static_cast( objects_.size() ); - pObjects = objects_.data(); + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT + operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -17713,181 +20235,157 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + VULKAN_HPP_NAMESPACE::ComponentTypeNV const &, + VULKAN_HPP_NAMESPACE::ComponentTypeNV const &, + VULKAN_HPP_NAMESPACE::ComponentTypeNV const &, + VULKAN_HPP_NAMESPACE::ComponentTypeNV const &, + VULKAN_HPP_NAMESPACE::ScopeNV const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( - sType, pNext, flags, pMessageIdName, messageIdNumber, pMessage, queueLabelCount, pQueueLabels, cmdBufLabelCount, pCmdBufLabels, objectCount, pObjects ); + return std::tie( sType, pNext, MSize, NSize, KSize, AType, BType, CType, DType, scope ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) - return cmp; - if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) - return cmp; - if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) - return cmp; - if ( pMessageIdName != rhs.pMessageIdName ) - if ( auto cmp = strcmp( pMessageIdName, rhs.pMessageIdName ); cmp != 0 ) - return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - if ( auto cmp = messageIdNumber <=> rhs.messageIdNumber; cmp != 0 ) - return cmp; - if ( pMessage != rhs.pMessage ) - if ( auto cmp = strcmp( pMessage, rhs.pMessage ); cmp != 0 ) - return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - if ( auto cmp = queueLabelCount <=> rhs.queueLabelCount; cmp != 0 ) - return cmp; - if ( auto cmp = pQueueLabels <=> rhs.pQueueLabels; cmp != 0 ) - return cmp; - if ( auto cmp = cmdBufLabelCount <=> rhs.cmdBufLabelCount; cmp != 0 ) - return cmp; - if ( auto cmp = pCmdBufLabels <=> rhs.pCmdBufLabels; cmp != 0 ) - return cmp; - if ( auto cmp = objectCount <=> rhs.objectCount; cmp != 0 ) - return cmp; - if ( auto cmp = pObjects <=> rhs.pObjects; cmp != 0 ) - return cmp; - - return std::strong_ordering::equivalent; - } -#endif - - bool operator==( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + auto operator<=>( CooperativeMatrixPropertiesNV const & ) const = default; +#else + bool operator==( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && - ( ( pMessageIdName == rhs.pMessageIdName ) || ( strcmp( pMessageIdName, rhs.pMessageIdName ) == 0 ) ) && - ( messageIdNumber == rhs.messageIdNumber ) && ( ( pMessage == rhs.pMessage ) || ( strcmp( pMessage, rhs.pMessage ) == 0 ) ) && - ( queueLabelCount == rhs.queueLabelCount ) && ( pQueueLabels == rhs.pQueueLabels ) && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) && - ( pCmdBufLabels == rhs.pCmdBufLabels ) && ( objectCount == rhs.objectCount ) && ( pObjects == rhs.pObjects ); +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) && ( KSize == rhs.KSize ) && + ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && ( DType == rhs.DType ) && ( scope == rhs.scope ); +# endif } - bool operator!=( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {}; - const char * pMessageIdName = {}; - int32_t messageIdNumber = {}; - const char * pMessage = {}; - uint32_t queueLabelCount = {}; - const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels = {}; - uint32_t cmdBufLabelCount = {}; - const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels = {}; - uint32_t objectCount = {}; - const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV; + void * pNext = {}; + uint32_t MSize = {}; + uint32_t NSize = {}; + uint32_t KSize = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = {}; + VULKAN_HPP_NAMESPACE::ScopeNV scope = {}; }; template <> - struct CppType + struct CppType { - using Type = DebugUtilsMessengerCallbackDataEXT; + using Type = CooperativeMatrixPropertiesNV; }; - struct DebugUtilsMessengerCreateInfoEXT + struct CooperativeVectorPropertiesNV { - using NativeType = VkDebugUtilsMessengerCreateInfoEXT; + using NativeType = VkCooperativeVectorPropertiesNV; - static const bool allowDuplicate = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeVectorPropertiesNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, - VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, - PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, - void * pUserData_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , messageSeverity( messageSeverity_ ) - , messageType( messageType_ ) - , pfnUserCallback( pfnUserCallback_ ) - , pUserData( pUserData_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CooperativeVectorPropertiesNV( VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputInterpretation_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR matrixInterpretation_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR biasInterpretation_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR resultType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::Bool32 transpose_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , inputType{ inputType_ } + , inputInterpretation{ inputInterpretation_ } + , matrixInterpretation{ matrixInterpretation_ } + , biasInterpretation{ biasInterpretation_ } + , resultType{ resultType_ } + , transpose{ transpose_ } { } - VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CooperativeVectorPropertiesNV( CooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast( &rhs ) ) + CooperativeVectorPropertiesNV( VkCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CooperativeVectorPropertiesNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CooperativeVectorPropertiesNV & operator=( CooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + CooperativeVectorPropertiesNV & operator=( VkCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setInputType( VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputType_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + inputType = inputType_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & - setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & + setInputInterpretation( VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputInterpretation_ ) VULKAN_HPP_NOEXCEPT { - messageSeverity = messageSeverity_; + inputInterpretation = inputInterpretation_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & - setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & + setMatrixInterpretation( VULKAN_HPP_NAMESPACE::ComponentTypeKHR matrixInterpretation_ ) VULKAN_HPP_NOEXCEPT { - messageType = messageType_; + matrixInterpretation = matrixInterpretation_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & + setBiasInterpretation( VULKAN_HPP_NAMESPACE::ComponentTypeKHR biasInterpretation_ ) VULKAN_HPP_NOEXCEPT { - pfnUserCallback = pfnUserCallback_; + biasInterpretation = biasInterpretation_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setResultType( VULKAN_HPP_NAMESPACE::ComponentTypeKHR resultType_ ) VULKAN_HPP_NOEXCEPT { - pUserData = pUserData_; + resultType = resultType_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setTranspose( VULKAN_HPP_NAMESPACE::Bool32 transpose_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + transpose = transpose_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkCooperativeVectorPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkCooperativeVectorPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -17895,162 +20393,127 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + void * const &, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, messageSeverity, messageType, pfnUserCallback, pUserData ); + return std::tie( sType, pNext, inputType, inputInterpretation, matrixInterpretation, biasInterpretation, resultType, transpose ); } #endif - bool operator==( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CooperativeVectorPropertiesNV const & ) const = default; +#else + bool operator==( CooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { -#if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -#else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( messageSeverity == rhs.messageSeverity ) && - ( messageType == rhs.messageType ) && ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData ); -#endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inputType == rhs.inputType ) && ( inputInterpretation == rhs.inputInterpretation ) && + ( matrixInterpretation == rhs.matrixInterpretation ) && ( biasInterpretation == rhs.biasInterpretation ) && ( resultType == rhs.resultType ) && + ( transpose == rhs.transpose ); +# endif } - bool operator!=( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {}; - VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {}; - PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {}; - void * pUserData = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeVectorPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR inputInterpretation = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR matrixInterpretation = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR biasInterpretation = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR resultType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::Bool32 transpose = {}; }; template <> - struct CppType + struct CppType { - using Type = DebugUtilsMessengerCreateInfoEXT; + using Type = CooperativeVectorPropertiesNV; }; - struct DebugUtilsObjectTagInfoEXT + struct CopyAccelerationStructureInfoKHR { - using NativeType = VkDebugUtilsObjectTagInfoEXT; + using NativeType = VkCopyAccelerationStructureInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, - uint64_t objectHandle_ = {}, - uint64_t tagName_ = {}, - size_t tagSize_ = {}, - const void * pTag_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , objectType( objectType_ ) - , objectHandle( objectHandle_ ) - , tagName( tagName_ ) - , tagSize( tagSize_ ) - , pTag( pTag_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } { } - VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : DebugUtilsObjectTagInfoEXT( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - template - DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, - uint64_t objectHandle_, - uint64_t tagName_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , objectType( objectType_ ) - , objectHandle( objectHandle_ ) - , tagName( tagName_ ) - , tagSize( tag_.size() * sizeof( T ) ) - , pTag( tag_.data() ) + CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyAccelerationStructureInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CopyAccelerationStructureInfoKHR & operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT - { - objectType = objectType_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT - { - objectHandle = objectHandle_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT - { - tagName = tagName_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT { - tagSize = tagSize_; + src = src_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT { - pTag = pTag_; + dst = dst_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - template - DebugUtilsObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT { - tagSize = tag_.size() * sizeof( T ); - pTag = tag_.data(); + mode = mode_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkCopyAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -18059,429 +20522,410 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const &, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, objectType, objectHandle, tagName, tagSize, pTag ); + return std::tie( sType, pNext, src, dst, mode ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DebugUtilsObjectTagInfoEXT const & ) const = default; + auto operator<=>( CopyAccelerationStructureInfoKHR const & ) const = default; #else - bool operator==( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && - ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && ( mode == rhs.mode ); # endif } - bool operator!=( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; - uint64_t objectHandle = {}; - uint64_t tagName = {}; - size_t tagSize = {}; - const void * pTag = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; }; template <> - struct CppType + struct CppType { - using Type = DebugUtilsObjectTagInfoEXT; + using Type = CopyAccelerationStructureInfoKHR; }; - struct DedicatedAllocationBufferCreateInfoNV + struct CopyAccelerationStructureToMemoryInfoKHR { - using NativeType = VkDedicatedAllocationBufferCreateInfoNV; + using NativeType = VkCopyAccelerationStructureToMemoryInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationBufferCreateInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dedicatedAllocation( dedicatedAllocation_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } { } - VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : DedicatedAllocationBufferCreateInfoNV( *reinterpret_cast( &rhs ) ) + CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyAccelerationStructureToMemoryInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CopyAccelerationStructureToMemoryInfoKHR & operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DedicatedAllocationBufferCreateInfoNV & operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + CopyAccelerationStructureToMemoryInfoKHR & operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & - setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT { - dedicatedAllocation = dedicatedAllocation_; + src = src_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDedicatedAllocationBufferCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dst = dst_; + return *this; } - operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + mode = mode_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + operator VkCopyAccelerationStructureToMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, dedicatedAllocation ); + return *reinterpret_cast( this ); } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DedicatedAllocationBufferCreateInfoNV const & ) const = default; -#else - bool operator==( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); -# endif + return *reinterpret_cast( this ); } - bool operator!=( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + return std::tie( sType, pNext, src, dst, mode ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; }; template <> - struct CppType + struct CppType { - using Type = DedicatedAllocationBufferCreateInfoNV; + using Type = CopyAccelerationStructureToMemoryInfoKHR; }; - struct DedicatedAllocationImageCreateInfoNV + struct CopyBufferInfo2 { - using NativeType = VkDedicatedAllocationImageCreateInfoNV; + using NativeType = VkCopyBufferInfo2; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationImageCreateInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dedicatedAllocation( dedicatedAllocation_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyBufferInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcBuffer{ srcBuffer_ } + , dstBuffer{ dstBuffer_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } { } - VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CopyBufferInfo2( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : DedicatedAllocationImageCreateInfoNV( *reinterpret_cast( &rhs ) ) + CopyBufferInfo2( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyBufferInfo2( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CopyBufferInfo2 & operator=( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DedicatedAllocationImageCreateInfoNV & operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + CopyBufferInfo2 & operator=( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & - setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT { - dedicatedAllocation = dedicatedAllocation_; + srcBuffer = srcBuffer_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDedicatedAllocationImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dstBuffer = dstBuffer_; + return *this; } - operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + regionCount = regionCount_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, dedicatedAllocation ); + pRegions = pRegions_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DedicatedAllocationImageCreateInfoNV const & ) const = default; -#else - bool operator==( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); -# endif + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - bool operator!=( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; - }; - - template <> - struct CppType - { - using Type = DedicatedAllocationImageCreateInfoNV; - }; - - struct DedicatedAllocationMemoryAllocateInfoNV - { - using NativeType = VkDedicatedAllocationMemoryAllocateInfoNV; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image( image_ ) - , buffer( buffer_ ) - { - } - - VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : DedicatedAllocationMemoryAllocateInfoNV( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DedicatedAllocationMemoryAllocateInfoNV & operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT - { - image = image_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT - { - buffer = buffer_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkDedicatedAllocationMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkCopyBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkCopyBufferInfo2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, image, buffer ); + return std::tie( sType, pNext, srcBuffer, dstBuffer, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const & ) const = default; + auto operator<=>( CopyBufferInfo2 const & ) const = default; #else - bool operator==( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstBuffer == rhs.dstBuffer ) && + ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } - bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions = {}; }; template <> - struct CppType + struct CppType { - using Type = DedicatedAllocationMemoryAllocateInfoNV; + using Type = CopyBufferInfo2; }; - struct MemoryBarrier2 + using CopyBufferInfo2KHR = CopyBufferInfo2; + + struct CopyBufferToImageInfo2 { - using NativeType = VkMemoryBarrier2; + using NativeType = VkCopyBufferToImageInfo2; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcStageMask( srcStageMask_ ) - , srcAccessMask( srcAccessMask_ ) - , dstStageMask( dstStageMask_ ) - , dstAccessMask( dstAccessMask_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcBuffer{ srcBuffer_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } { } - VULKAN_HPP_CONSTEXPR MemoryBarrier2( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryBarrier2( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryBarrier2( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + CopyBufferToImageInfo2( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyBufferToImageInfo2( *reinterpret_cast( &rhs ) ) + { + } - MemoryBarrier2 & operator=( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferToImageInfo2( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , srcBuffer( srcBuffer_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - MemoryBarrier2 & operator=( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT + CopyBufferToImageInfo2 & operator=( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyBufferToImageInfo2 & operator=( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT { - srcStageMask = srcStageMask_; + srcBuffer = srcBuffer_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT { - srcAccessMask = srcAccessMask_; + dstImage = dstImage_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT { - dstStageMask = dstStageMask_; + dstImageLayout = dstImageLayout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT { - dstAccessMask = dstAccessMask_; + regionCount = regionCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pRegions = pRegions_; + return *this; } - operator VkMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyBufferToImageInfo2 & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyBufferToImageInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyBufferToImageInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -18490,292 +20934,251 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::Buffer const &, + VULKAN_HPP_NAMESPACE::Image const &, + VULKAN_HPP_NAMESPACE::ImageLayout const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask ); + return std::tie( sType, pNext, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryBarrier2 const & ) const = default; + auto operator<=>( CopyBufferToImageInfo2 const & ) const = default; #else - bool operator==( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && - ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstImage == rhs.dstImage ) && + ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } - bool operator!=( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier2; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferToImageInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {}; }; template <> - struct CppType + struct CppType { - using Type = MemoryBarrier2; + using Type = CopyBufferToImageInfo2; }; - using MemoryBarrier2KHR = MemoryBarrier2; - struct ImageSubresourceRange - { - using NativeType = VkImageSubresourceRange; + using CopyBufferToImageInfo2KHR = CopyBufferToImageInfo2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageSubresourceRange( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - uint32_t baseMipLevel_ = {}, - uint32_t levelCount_ = {}, - uint32_t baseArrayLayer_ = {}, - uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , baseMipLevel( baseMipLevel_ ) - , levelCount( levelCount_ ) - , baseArrayLayer( baseArrayLayer_ ) - , layerCount( layerCount_ ) - { - } + struct CopyCommandTransformInfoQCOM + { + using NativeType = VkCopyCommandTransformInfoQCOM; - VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM; - ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageSubresourceRange( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CopyCommandTransformInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , transform{ transform_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyCommandTransformInfoQCOM( *reinterpret_cast( &rhs ) ) { - aspectMask = aspectMask_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT - { - baseMipLevel = baseMipLevel_; - return *this; - } + CopyCommandTransformInfoQCOM & operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT + CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - levelCount = levelCount_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - baseArrayLayer = baseArrayLayer_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT { - layerCount = layerCount_; + transform = transform_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT + operator VkCopyCommandTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT + operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( aspectMask, baseMipLevel, levelCount, baseArrayLayer, layerCount ); + return std::tie( sType, pNext, transform ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageSubresourceRange const & ) const = default; + auto operator<=>( CopyCommandTransformInfoQCOM const & ) const = default; #else - bool operator==( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( aspectMask == rhs.aspectMask ) && ( baseMipLevel == rhs.baseMipLevel ) && ( levelCount == rhs.levelCount ) && - ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ); # endif } - bool operator!=( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - uint32_t baseMipLevel = {}; - uint32_t levelCount = {}; - uint32_t baseArrayLayer = {}; - uint32_t layerCount = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; }; - struct ImageMemoryBarrier2 + template <> + struct CppType { - using NativeType = VkImageMemoryBarrier2; + using Type = CopyCommandTransformInfoQCOM; + }; + + struct CopyDescriptorSet + { + using NativeType = VkCopyDescriptorSet; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - uint32_t srcQueueFamilyIndex_ = {}, - uint32_t dstQueueFamilyIndex_ = {}, - VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcStageMask( srcStageMask_ ) - , srcAccessMask( srcAccessMask_ ) - , dstStageMask( dstStageMask_ ) - , dstAccessMask( dstAccessMask_ ) - , oldLayout( oldLayout_ ) - , newLayout( newLayout_ ) - , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) - , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) - , image( image_ ) - , subresourceRange( subresourceRange_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, + uint32_t srcBinding_ = {}, + uint32_t srcArrayElement_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcSet{ srcSet_ } + , srcBinding{ srcBinding_ } + , srcArrayElement{ srcArrayElement_ } + , dstSet{ dstSet_ } + , dstBinding{ dstBinding_ } + , dstArrayElement{ dstArrayElement_ } + , descriptorCount{ descriptorCount_ } { } - VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageMemoryBarrier2( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier2( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT : CopyDescriptorSet( *reinterpret_cast( &rhs ) ) {} - ImageMemoryBarrier2 & operator=( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageMemoryBarrier2 & operator=( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT + CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT - { - srcStageMask = srcStageMask_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - srcAccessMask = srcAccessMask_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT - { - dstStageMask = dstStageMask_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT { - dstAccessMask = dstAccessMask_; + srcSet = srcSet_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT { - oldLayout = oldLayout_; + srcBinding = srcBinding_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT { - newLayout = newLayout_; + srcArrayElement = srcArrayElement_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT { - srcQueueFamilyIndex = srcQueueFamilyIndex_; + dstSet = dstSet_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT { - dstQueueFamilyIndex = dstQueueFamilyIndex_; + dstBinding = dstBinding_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT { - image = image_; + dstArrayElement = dstArrayElement_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & - setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT { - subresourceRange = subresourceRange_; + descriptorCount = descriptorCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImageMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT + operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT + operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -18784,225 +21187,140 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::DescriptorSet const &, + uint32_t const &, + uint32_t const &, + uint32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - srcStageMask, - srcAccessMask, - dstStageMask, - dstAccessMask, - oldLayout, - newLayout, - srcQueueFamilyIndex, - dstQueueFamilyIndex, - image, - subresourceRange ); + return std::tie( sType, pNext, srcSet, srcBinding, srcArrayElement, dstSet, dstBinding, dstArrayElement, descriptorCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageMemoryBarrier2 const & ) const = default; + auto operator<=>( CopyDescriptorSet const & ) const = default; #else - bool operator==( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && - ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) && - ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && - ( image == rhs.image ) && ( subresourceRange == rhs.subresourceRange ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSet == rhs.srcSet ) && ( srcBinding == rhs.srcBinding ) && + ( srcArrayElement == rhs.srcArrayElement ) && ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) && + ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ); # endif } - bool operator!=( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier2; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {}; - VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - uint32_t srcQueueFamilyIndex = {}; - uint32_t dstQueueFamilyIndex = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {}; + uint32_t srcBinding = {}; + uint32_t srcArrayElement = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageMemoryBarrier2; + using Type = CopyDescriptorSet; }; - using ImageMemoryBarrier2KHR = ImageMemoryBarrier2; - struct DependencyInfo + struct ImageCopy2 { - using NativeType = VkDependencyInfo; + using NativeType = VkImageCopy2; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDependencyInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DependencyInfo( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, - uint32_t memoryBarrierCount_ = {}, - const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ = {}, - uint32_t bufferMemoryBarrierCount_ = {}, - const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ = {}, - uint32_t imageMemoryBarrierCount_ = {}, - const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dependencyFlags( dependencyFlags_ ) - , memoryBarrierCount( memoryBarrierCount_ ) - , pMemoryBarriers( pMemoryBarriers_ ) - , bufferMemoryBarrierCount( bufferMemoryBarrierCount_ ) - , pBufferMemoryBarriers( pBufferMemoryBarriers_ ) - , imageMemoryBarrierCount( imageMemoryBarrierCount_ ) - , pImageMemoryBarriers( pImageMemoryBarriers_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCopy2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcSubresource{ srcSubresource_ } + , srcOffset{ srcOffset_ } + , dstSubresource{ dstSubresource_ } + , dstOffset{ dstOffset_ } + , extent{ extent_ } { } - VULKAN_HPP_CONSTEXPR DependencyInfo( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageCopy2( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DependencyInfo( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DependencyInfo( *reinterpret_cast( &rhs ) ) {} + ImageCopy2( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy2( *reinterpret_cast( &rhs ) ) {} -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DependencyInfo( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & memoryBarriers_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferMemoryBarriers_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageMemoryBarriers_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , dependencyFlags( dependencyFlags_ ) - , memoryBarrierCount( static_cast( memoryBarriers_.size() ) ) - , pMemoryBarriers( memoryBarriers_.data() ) - , bufferMemoryBarrierCount( static_cast( bufferMemoryBarriers_.size() ) ) - , pBufferMemoryBarriers( bufferMemoryBarriers_.data() ) - , imageMemoryBarrierCount( static_cast( imageMemoryBarriers_.size() ) ) - , pImageMemoryBarriers( imageMemoryBarriers_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - DependencyInfo & operator=( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageCopy2 & operator=( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DependencyInfo & operator=( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ImageCopy2 & operator=( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT - { - dependencyFlags = dependencyFlags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setMemoryBarrierCount( uint32_t memoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT - { - memoryBarrierCount = memoryBarrierCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPMemoryBarriers( const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT - { - pMemoryBarriers = pMemoryBarriers_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DependencyInfo & - setMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & memoryBarriers_ ) VULKAN_HPP_NOEXCEPT - { - memoryBarrierCount = static_cast( memoryBarriers_.size() ); - pMemoryBarriers = memoryBarriers_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT - { - bufferMemoryBarrierCount = bufferMemoryBarrierCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 DependencyInfo & - setPBufferMemoryBarriers( const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT { - pBufferMemoryBarriers = pBufferMemoryBarriers_; + srcSubresource = srcSubresource_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DependencyInfo & setBufferMemoryBarriers( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT { - bufferMemoryBarrierCount = static_cast( bufferMemoryBarriers_.size() ); - pBufferMemoryBarriers = bufferMemoryBarriers_.data(); + srcOffset = srcOffset_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT { - imageMemoryBarrierCount = imageMemoryBarrierCount_; + dstSubresource = dstSubresource_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DependencyInfo & - setPImageMemoryBarriers( const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT { - pImageMemoryBarriers = pImageMemoryBarriers_; + dstOffset = dstOffset_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DependencyInfo & setImageMemoryBarriers( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT { - imageMemoryBarrierCount = static_cast( imageMemoryBarriers_.size() ); - pImageMemoryBarriers = imageMemoryBarriers_.data(); + extent = extent_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDependencyInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkImageCopy2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDependencyInfo &() VULKAN_HPP_NOEXCEPT + operator VkImageCopy2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -19011,441 +21329,530 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, + VULKAN_HPP_NAMESPACE::Offset3D const &, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, + VULKAN_HPP_NAMESPACE::Offset3D const &, + VULKAN_HPP_NAMESPACE::Extent3D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - dependencyFlags, - memoryBarrierCount, - pMemoryBarriers, - bufferMemoryBarrierCount, - pBufferMemoryBarriers, - imageMemoryBarrierCount, - pImageMemoryBarriers ); + return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DependencyInfo const & ) const = default; + auto operator<=>( ImageCopy2 const & ) const = default; #else - bool operator==( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dependencyFlags == rhs.dependencyFlags ) && - ( memoryBarrierCount == rhs.memoryBarrierCount ) && ( pMemoryBarriers == rhs.pMemoryBarriers ) && - ( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount ) && ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers ) && - ( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount ) && ( pImageMemoryBarriers == rhs.pImageMemoryBarriers ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); # endif } - bool operator!=( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDependencyInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; - uint32_t memoryBarrierCount = {}; - const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers = {}; - uint32_t bufferMemoryBarrierCount = {}; - const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers = {}; - uint32_t imageMemoryBarrierCount = {}; - const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; }; template <> - struct CppType + struct CppType { - using Type = DependencyInfo; + using Type = ImageCopy2; }; - using DependencyInfoKHR = DependencyInfo; - struct DescriptorBufferInfo + using ImageCopy2KHR = ImageCopy2; + + struct CopyImageInfo2 { - using NativeType = VkDescriptorBufferInfo; + using NativeType = VkCopyImageInfo2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) - , offset( offset_ ) - , range( range_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } { } - VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CopyImageInfo2( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : DescriptorBufferInfo( *reinterpret_cast( &rhs ) ) + CopyImageInfo2( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyImageInfo2( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CopyImageInfo2 & operator=( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + CopyImageInfo2 & operator=( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - buffer = buffer_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT { - offset = offset_; + srcImage = srcImage_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT { - range = range_; + srcImageLayout = srcImageLayout_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dstImage = dstImage_; + return *this; } - operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageInfo2 & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyImageInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( buffer, offset, range ); + return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorBufferInfo const & ) const = default; + auto operator<=>( CopyImageInfo2 const & ) const = default; #else - bool operator==( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( range == rhs.range ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && + ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } - bool operator!=( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {}; }; - struct DescriptorImageInfo + template <> + struct CppType { - using NativeType = VkDescriptorImageInfo; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, - VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT - : sampler( sampler_ ) - , imageView( imageView_ ) - , imageLayout( imageLayout_ ) - { - } + using Type = CopyImageInfo2; + }; - VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + using CopyImageInfo2KHR = CopyImageInfo2; - DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorImageInfo( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + struct CopyImageToBufferInfo2 + { + using NativeType = VkCopyImageToBufferInfo2; - DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2; - DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , dstBuffer{ dstBuffer_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT - { - sampler = sampler_; - return *this; - } + VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + CopyImageToBufferInfo2( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageToBufferInfo2( *reinterpret_cast( &rhs ) ) { - imageView = imageView_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToBufferInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstBuffer( dstBuffer_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) { - imageLayout = imageLayout_; - return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } + CopyImageToBufferInfo2 & operator=( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT + CopyImageToBufferInfo2 & operator=( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + *this = *reinterpret_cast( &rhs ); + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sampler, imageView, imageLayout ); + pNext = pNext_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorImageInfo const & ) const = default; -#else - bool operator==( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sampler == rhs.sampler ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ); -# endif + srcImage = srcImage_; + return *this; } - bool operator!=( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + srcImageLayout = srcImageLayout_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::Sampler sampler = {}; - VULKAN_HPP_NAMESPACE::ImageView imageView = {}; - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - }; - - struct DescriptorPoolSize - { - using NativeType = VkDescriptorPoolSize; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , descriptorCount( descriptorCount_ ) + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT { + dstBuffer = dstBuffer_; + return *this; } - VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorPoolSize( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + regionCount = regionCount_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT { - type = type_; + pRegions = pRegions_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToBufferInfo2 & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { - descriptorCount = descriptorCount_; + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT + operator VkCopyImageToBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT + operator VkCopyImageToBufferInfo2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( type, descriptorCount ); + return std::tie( sType, pNext, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorPoolSize const & ) const = default; + auto operator<=>( CopyImageToBufferInfo2 const & ) const = default; #else - bool operator==( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( type == rhs.type ) && ( descriptorCount == rhs.descriptorCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && + ( dstBuffer == rhs.dstBuffer ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } - bool operator!=( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; - uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions = {}; }; - struct DescriptorPoolCreateInfo + template <> + struct CppType { - using NativeType = VkDescriptorPoolCreateInfo; + using Type = CopyImageToBufferInfo2; + }; + + using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2; + + struct CopyImageToImageInfo + { + using NativeType = VkCopyImageToImageInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToImageInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, - uint32_t maxSets_ = {}, - uint32_t poolSizeCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , maxSets( maxSets_ ) - , poolSizeCount( poolSizeCount_ ) - , pPoolSizes( pPoolSizes_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageToImageInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } { } - VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CopyImageToImageInfo( CopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : DescriptorPoolCreateInfo( *reinterpret_cast( &rhs ) ) + CopyImageToImageInfo( VkCopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageToImageInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_, - uint32_t maxSets_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & poolSizes_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( static_cast( poolSizes_.size() ) ), pPoolSizes( poolSizes_.data() ) + CopyImageToImageInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_, + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CopyImageToImageInfo & operator=( CopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + CopyImageToImageInfo & operator=( VkCopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT { - maxSets = maxSets_; + srcImage = srcImage_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT { - poolSizeCount = poolSizeCount_; + srcImageLayout = srcImageLayout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT { - pPoolSizes = pPoolSizes_; + dstImage = dstImage_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DescriptorPoolCreateInfo & - setPoolSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & poolSizes_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT { - poolSizeCount = static_cast( poolSizes_.size() ); - pPoolSizes = poolSizes_.data(); + dstImageLayout = dstImageLayout_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + regionCount = regionCount_; + return *this; } - operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToImageInfo & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyImageToImageInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageToImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -19454,238 +21861,322 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + const VULKAN_HPP_NAMESPACE::ImageCopy2 * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, maxSets, poolSizeCount, pPoolSizes ); + return std::tie( sType, pNext, flags, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorPoolCreateInfo const & ) const = default; + auto operator<=>( CopyImageToImageInfo const & ) const = default; #else - bool operator==( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyImageToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( maxSets == rhs.maxSets ) && - ( poolSizeCount == rhs.poolSizeCount ) && ( pPoolSizes == rhs.pPoolSizes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && + ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } - bool operator!=( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyImageToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {}; - uint32_t maxSets = {}; - uint32_t poolSizeCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToImageInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {}; }; template <> - struct CppType + struct CppType { - using Type = DescriptorPoolCreateInfo; + using Type = CopyImageToImageInfo; }; - struct DescriptorPoolInlineUniformBlockCreateInfo + using CopyImageToImageInfoEXT = CopyImageToImageInfo; + + struct ImageToMemoryCopy { - using NativeType = VkDescriptorPoolInlineUniformBlockCreateInfo; + using NativeType = VkImageToMemoryCopy; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageToMemoryCopy; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( uint32_t maxInlineUniformBlockBindings_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageToMemoryCopy( void * pHostPointer_ = {}, + uint32_t memoryRowLength_ = {}, + uint32_t memoryImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pHostPointer{ pHostPointer_ } + , memoryRowLength{ memoryRowLength_ } + , memoryImageHeight{ memoryImageHeight_ } + , imageSubresource{ imageSubresource_ } + , imageOffset{ imageOffset_ } + , imageExtent{ imageExtent_ } { } - VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageToMemoryCopy( ImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DescriptorPoolInlineUniformBlockCreateInfo( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : DescriptorPoolInlineUniformBlockCreateInfo( *reinterpret_cast( &rhs ) ) + ImageToMemoryCopy( VkImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageToMemoryCopy( *reinterpret_cast( &rhs ) ) {} + + ImageToMemoryCopy & operator=( ImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageToMemoryCopy & operator=( VkImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DescriptorPoolInlineUniformBlockCreateInfo & operator=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } - DescriptorPoolInlineUniformBlockCreateInfo & operator=( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + pHostPointer = pHostPointer_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + memoryRowLength = memoryRowLength_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & - setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT { - maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_; + memoryImageHeight = memoryImageHeight_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDescriptorPoolInlineUniformBlockCreateInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + imageSubresource = imageSubresource_; + return *this; } - operator VkDescriptorPoolInlineUniformBlockCreateInfo &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + imageOffset = imageOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageToMemoryCopy const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageToMemoryCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxInlineUniformBlockBindings ); + return std::tie( sType, pNext, pHostPointer, memoryRowLength, memoryImageHeight, imageSubresource, imageOffset, imageExtent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfo const & ) const = default; + auto operator<=>( ImageToMemoryCopy const & ) const = default; #else - bool operator==( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageToMemoryCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pHostPointer == rhs.pHostPointer ) && ( memoryRowLength == rhs.memoryRowLength ) && + ( memoryImageHeight == rhs.memoryImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && + ( imageExtent == rhs.imageExtent ); # endif } - bool operator!=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageToMemoryCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo; - const void * pNext = {}; - uint32_t maxInlineUniformBlockBindings = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageToMemoryCopy; + const void * pNext = {}; + void * pHostPointer = {}; + uint32_t memoryRowLength = {}; + uint32_t memoryImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; }; template <> - struct CppType + struct CppType { - using Type = DescriptorPoolInlineUniformBlockCreateInfo; + using Type = ImageToMemoryCopy; }; - using DescriptorPoolInlineUniformBlockCreateInfoEXT = DescriptorPoolInlineUniformBlockCreateInfo; - struct DescriptorSetAllocateInfo + using ImageToMemoryCopyEXT = ImageToMemoryCopy; + + struct CopyImageToMemoryInfo { - using NativeType = VkDescriptorSetAllocateInfo; + using NativeType = VkCopyImageToMemoryInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToMemoryInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, - uint32_t descriptorSetCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , descriptorPool( descriptorPool_ ) - , descriptorSetCount( descriptorSetCount_ ) - , pSetLayouts( pSetLayouts_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageToMemoryCopy * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } { } - VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfo( CopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : DescriptorSetAllocateInfo( *reinterpret_cast( &rhs ) ) + CopyImageToMemoryInfo( VkCopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageToMemoryInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), descriptorPool( descriptorPool_ ), descriptorSetCount( static_cast( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ) + CopyImageToMemoryInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_, + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CopyImageToMemoryInfo & operator=( CopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + CopyImageToMemoryInfo & operator=( VkCopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ ) VULKAN_HPP_NOEXCEPT { - descriptorPool = descriptorPool_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT { - descriptorSetCount = descriptorSetCount_; + srcImage = srcImage_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT { - pSetLayouts = pSetLayouts_; + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setPRegions( const VULKAN_HPP_NAMESPACE::ImageToMemoryCopy * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DescriptorSetAllocateInfo & - setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT + CopyImageToMemoryInfo & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { - descriptorSetCount = static_cast( setLayouts_.size() ); - pSetLayouts = setLayouts_.data(); + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkCopyImageToMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT + operator VkCopyImageToMemoryInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -19694,515 +22185,476 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + const VULKAN_HPP_NAMESPACE::ImageToMemoryCopy * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, descriptorPool, descriptorSetCount, pSetLayouts ); + return std::tie( sType, pNext, flags, srcImage, srcImageLayout, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorSetAllocateInfo const & ) const = default; + auto operator<=>( CopyImageToMemoryInfo const & ) const = default; #else - bool operator==( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyImageToMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPool == rhs.descriptorPool ) && ( descriptorSetCount == rhs.descriptorSetCount ) && - ( pSetLayouts == rhs.pSetLayouts ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } - bool operator!=( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyImageToMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {}; - uint32_t descriptorSetCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageToMemoryCopy * pRegions = {}; }; template <> - struct CppType + struct CppType { - using Type = DescriptorSetAllocateInfo; + using Type = CopyImageToMemoryInfo; }; - struct DescriptorSetBindingReferenceVALVE - { - using NativeType = VkDescriptorSetBindingReferenceVALVE; + using CopyImageToMemoryInfoEXT = CopyImageToMemoryInfo; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetBindingReferenceVALVE; + struct CopyMemoryIndirectCommandNV + { + using NativeType = VkCopyMemoryIndirectCommandNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, - uint32_t binding_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , descriptorSetLayout( descriptorSetLayout_ ) - , binding( binding_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyMemoryIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAddress{ srcAddress_ } + , dstAddress{ dstAddress_ } + , size{ size_ } { } - VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CopyMemoryIndirectCommandNV( CopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DescriptorSetBindingReferenceVALVE( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT - : DescriptorSetBindingReferenceVALVE( *reinterpret_cast( &rhs ) ) + CopyMemoryIndirectCommandNV( VkCopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryIndirectCommandNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DescriptorSetBindingReferenceVALVE & operator=( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CopyMemoryIndirectCommandNV & operator=( CopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DescriptorSetBindingReferenceVALVE & operator=( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + CopyMemoryIndirectCommandNV & operator=( VkCopyMemoryIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + srcAddress = srcAddress_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & - setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setDstAddress( VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ ) VULKAN_HPP_NOEXCEPT { - descriptorSetLayout = descriptorSetLayout_; + dstAddress = dstAddress_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandNV & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT { - binding = binding_; + size = size_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDescriptorSetBindingReferenceVALVE const &() const VULKAN_HPP_NOEXCEPT + operator VkCopyMemoryIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDescriptorSetBindingReferenceVALVE &() VULKAN_HPP_NOEXCEPT + operator VkCopyMemoryIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, descriptorSetLayout, binding ); + return std::tie( srcAddress, dstAddress, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorSetBindingReferenceVALVE const & ) const = default; + auto operator<=>( CopyMemoryIndirectCommandNV const & ) const = default; #else - bool operator==( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyMemoryIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( binding == rhs.binding ); + return ( srcAddress == rhs.srcAddress ) && ( dstAddress == rhs.dstAddress ) && ( size == rhs.size ); # endif } - bool operator!=( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyMemoryIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetBindingReferenceVALVE; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {}; - uint32_t binding = {}; - }; - - template <> - struct CppType - { - using Type = DescriptorSetBindingReferenceVALVE; + VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; - struct DescriptorSetLayoutBinding + struct CopyMemoryToAccelerationStructureInfoKHR { - using NativeType = VkDescriptorSetLayoutBinding; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( uint32_t binding_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - uint32_t descriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, - const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT - : binding( binding_ ) - , descriptorType( descriptorType_ ) - , descriptorCount( descriptorCount_ ) - , stageFlags( stageFlags_ ) - , pImmutableSamplers( pImmutableSamplers_ ) - { - } - - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default; + using NativeType = VkCopyMemoryToAccelerationStructureInfoKHR; - DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT - : DescriptorSetLayoutBinding( *reinterpret_cast( &rhs ) ) - { - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DescriptorSetLayoutBinding( uint32_t binding_, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & immutableSamplers_ ) - : binding( binding_ ) - , descriptorType( descriptorType_ ) - , descriptorCount( static_cast( immutableSamplers_.size() ) ) - , stageFlags( stageFlags_ ) - , pImmutableSamplers( immutableSamplers_.data() ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT + CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryToAccelerationStructureInfoKHR( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT - { - binding = binding_; - return *this; - } + CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + CopyMemoryToAccelerationStructureInfoKHR & operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - descriptorType = descriptorType_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - descriptorCount = descriptorCount_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & + setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT { - stageFlags = stageFlags_; + src = src_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT { - pImmutableSamplers = pImmutableSamplers_; + dst = dst_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DescriptorSetLayoutBinding & - setImmutableSamplers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & immutableSamplers_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & + setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT { - descriptorCount = static_cast( immutableSamplers_.size() ); - pImmutableSamplers = immutableSamplers_.data(); + mode = mode_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT + operator VkCopyMemoryToAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT + operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( binding, descriptorType, descriptorCount, stageFlags, pImmutableSamplers ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorSetLayoutBinding const & ) const = default; -#else - bool operator==( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( binding == rhs.binding ) && ( descriptorType == rhs.descriptorType ) && ( descriptorCount == rhs.descriptorCount ) && - ( stageFlags == rhs.stageFlags ) && ( pImmutableSamplers == rhs.pImmutableSamplers ); -# endif - } - - bool operator!=( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); + return std::tie( sType, pNext, src, dst, mode ); } #endif public: - uint32_t binding = {}; - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; - uint32_t descriptorCount = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; - const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone; }; - struct DescriptorSetLayoutBindingFlagsCreateInfo + template <> + struct CppType { - using NativeType = VkDescriptorSetLayoutBindingFlagsCreateInfo; + using Type = CopyMemoryToAccelerationStructureInfoKHR; + }; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; + struct CopyMemoryToImageIndirectCommandNV + { + using NativeType = VkCopyMemoryToImageIndirectCommandNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( uint32_t bindingCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , bindingCount( bindingCount_ ) - , pBindingFlags( pBindingFlags_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, + uint32_t bufferRowLength_ = {}, + uint32_t bufferImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAddress{ srcAddress_ } + , bufferRowLength{ bufferRowLength_ } + , bufferImageHeight{ bufferImageHeight_ } + , imageSubresource{ imageSubresource_ } + , imageOffset{ imageOffset_ } + , imageExtent{ imageExtent_ } { } - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectCommandNV( CopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : DescriptorSetLayoutBindingFlagsCreateInfo( *reinterpret_cast( &rhs ) ) + CopyMemoryToImageIndirectCommandNV( VkCopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryToImageIndirectCommandNV( *reinterpret_cast( &rhs ) ) { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DescriptorSetLayoutBindingFlagsCreateInfo( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindingFlags_, const void * pNext_ = nullptr ) - : pNext( pNext_ ), bindingCount( static_cast( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() ) + CopyMemoryToImageIndirectCommandNV & operator=( CopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyMemoryToImageIndirectCommandNV & operator=( VkCopyMemoryToImageIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT + { + srcAddress = srcAddress_; + return *this; + } - DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + bufferRowLength = bufferRowLength_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + bufferImageHeight = bufferImageHeight_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT { - bindingCount = bindingCount_; + imageSubresource = imageSubresource_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & - setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT { - pBindingFlags = pBindingFlags_; + imageOffset = imageOffset_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindingFlags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandNV & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT { - bindingCount = static_cast( bindingFlags_.size() ); - pBindingFlags = bindingFlags_.data(); + imageExtent = imageExtent_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkCopyMemoryToImageIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkCopyMemoryToImageIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std:: - tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, bindingCount, pBindingFlags ); + return std::tie( srcAddress, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const & ) const = default; + auto operator<=>( CopyMemoryToImageIndirectCommandNV const & ) const = default; #else - bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyMemoryToImageIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bindingCount == rhs.bindingCount ) && ( pBindingFlags == rhs.pBindingFlags ); + return ( srcAddress == rhs.srcAddress ) && ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) && + ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent ); # endif } - bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyMemoryToImageIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; - const void * pNext = {}; - uint32_t bindingCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags = {}; - }; - - template <> - struct CppType - { - using Type = DescriptorSetLayoutBindingFlagsCreateInfo; + VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {}; + uint32_t bufferRowLength = {}; + uint32_t bufferImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; }; - using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo; - struct DescriptorSetLayoutCreateInfo + struct MemoryToImageCopy { - using NativeType = VkDescriptorSetLayoutCreateInfo; + using NativeType = VkMemoryToImageCopy; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryToImageCopy; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, - uint32_t bindingCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , bindingCount( bindingCount_ ) - , pBindings( pBindings_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryToImageCopy( const void * pHostPointer_ = {}, + uint32_t memoryRowLength_ = {}, + uint32_t memoryImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pHostPointer{ pHostPointer_ } + , memoryRowLength{ memoryRowLength_ } + , memoryImageHeight{ memoryImageHeight_ } + , imageSubresource{ imageSubresource_ } + , imageOffset{ imageOffset_ } + , imageExtent{ imageExtent_ } { } - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR MemoryToImageCopy( MemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : DescriptorSetLayoutCreateInfo( *reinterpret_cast( &rhs ) ) + MemoryToImageCopy( VkMemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryToImageCopy( *reinterpret_cast( &rhs ) ) {} + + MemoryToImageCopy & operator=( MemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryToImageCopy & operator=( VkMemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindings_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), flags( flags_ ), bindingCount( static_cast( bindings_.size() ) ), pBindings( bindings_.data() ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { + pNext = pNext_; + return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setPHostPointer( const void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + pHostPointer = pHostPointer_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + memoryRowLength = memoryRowLength_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + memoryImageHeight = memoryImageHeight_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT { - bindingCount = bindingCount_; + imageSubresource = imageSubresource_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & - setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT { - pBindings = pBindings_; + imageOffset = imageOffset_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DescriptorSetLayoutCreateInfo & - setBindings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindings_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT { - bindingCount = static_cast( bindings_.size() ); - pBindings = bindings_.data(); + imageExtent = imageExtent_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkMemoryToImageCopy const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkMemoryToImageCopy &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -20211,724 +22663,845 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + uint32_t const &, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, + VULKAN_HPP_NAMESPACE::Offset3D const &, + VULKAN_HPP_NAMESPACE::Extent3D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, bindingCount, pBindings ); + return std::tie( sType, pNext, pHostPointer, memoryRowLength, memoryImageHeight, imageSubresource, imageOffset, imageExtent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorSetLayoutCreateInfo const & ) const = default; + auto operator<=>( MemoryToImageCopy const & ) const = default; #else - bool operator==( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryToImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( bindingCount == rhs.bindingCount ) && - ( pBindings == rhs.pBindings ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pHostPointer == rhs.pHostPointer ) && ( memoryRowLength == rhs.memoryRowLength ) && + ( memoryImageHeight == rhs.memoryImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && + ( imageExtent == rhs.imageExtent ); # endif } - bool operator!=( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryToImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {}; - uint32_t bindingCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryToImageCopy; + const void * pNext = {}; + const void * pHostPointer = {}; + uint32_t memoryRowLength = {}; + uint32_t memoryImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; }; template <> - struct CppType + struct CppType { - using Type = DescriptorSetLayoutCreateInfo; + using Type = MemoryToImageCopy; }; - struct DescriptorSetLayoutHostMappingInfoVALVE + using MemoryToImageCopyEXT = MemoryToImageCopy; + + struct CopyMemoryToImageInfo { - using NativeType = VkDescriptorSetLayoutHostMappingInfoVALVE; + using NativeType = VkCopyMemoryToImageInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToImageInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - DescriptorSetLayoutHostMappingInfoVALVE( size_t descriptorOffset_ = {}, uint32_t descriptorSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , descriptorOffset( descriptorOffset_ ) - , descriptorSize( descriptorSize_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::MemoryToImageCopy * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } { } - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutHostMappingInfoVALVE( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfo( CopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DescriptorSetLayoutHostMappingInfoVALVE( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT - : DescriptorSetLayoutHostMappingInfoVALVE( *reinterpret_cast( &rhs ) ) + CopyMemoryToImageInfo( VkCopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryToImageInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DescriptorSetLayoutHostMappingInfoVALVE & operator=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyMemoryToImageInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - DescriptorSetLayoutHostMappingInfoVALVE & operator=( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + CopyMemoryToImageInfo & operator=( CopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CopyMemoryToImageInfo & operator=( VkCopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorOffset( size_t descriptorOffset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ ) VULKAN_HPP_NOEXCEPT { - descriptorOffset = descriptorOffset_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorSize( uint32_t descriptorSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT { - descriptorSize = descriptorSize_; + dstImage = dstImage_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDescriptorSetLayoutHostMappingInfoVALVE const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dstImageLayout = dstImageLayout_; + return *this; } - operator VkDescriptorSetLayoutHostMappingInfoVALVE &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setPRegions( const VULKAN_HPP_NAMESPACE::MemoryToImageCopy * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyMemoryToImageInfo & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCopyMemoryToImageInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, descriptorOffset, descriptorSize ); + return std::tie( sType, pNext, flags, dstImage, dstImageLayout, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorSetLayoutHostMappingInfoVALVE const & ) const = default; + auto operator<=>( CopyMemoryToImageInfo const & ) const = default; #else - bool operator==( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyMemoryToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorOffset == rhs.descriptorOffset ) && ( descriptorSize == rhs.descriptorSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dstImage == rhs.dstImage ) && + ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } - bool operator!=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyMemoryToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE; - void * pNext = {}; - size_t descriptorOffset = {}; - uint32_t descriptorSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToImageInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags = {}; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::MemoryToImageCopy * pRegions = {}; }; template <> - struct CppType + struct CppType { - using Type = DescriptorSetLayoutHostMappingInfoVALVE; + using Type = CopyMemoryToImageInfo; }; - struct DescriptorSetLayoutSupport + using CopyMemoryToImageInfoEXT = CopyMemoryToImageInfo; + + struct CopyMemoryToMicromapInfoEXT { - using NativeType = VkDescriptorSetLayoutSupport; + using NativeType = VkCopyMemoryToMicromapInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToMicromapInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , supported( supported_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } { } - VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT - : DescriptorSetLayoutSupport( *reinterpret_cast( &rhs ) ) + CopyMemoryToMicromapInfoEXT( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryToMicromapInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CopyMemoryToMicromapInfoEXT & operator=( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + CopyMemoryToMicromapInfoEXT & operator=( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + src = src_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setDst( VULKAN_HPP_NAMESPACE::MicromapEXT dst_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, supported ); + dst = dst_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorSetLayoutSupport const & ) const = default; -#else - bool operator==( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supported == rhs.supported ); -# endif + mode = mode_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - bool operator!=( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + operator VkCopyMemoryToMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, src, dst, mode ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 supported = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToMicromapInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT dst = {}; + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone; }; template <> - struct CppType + struct CppType { - using Type = DescriptorSetLayoutSupport; + using Type = CopyMemoryToMicromapInfoEXT; }; - using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; - struct DescriptorSetVariableDescriptorCountAllocateInfo + struct CopyMicromapInfoEXT { - using NativeType = VkDescriptorSetVariableDescriptorCountAllocateInfo; + using NativeType = VkCopyMicromapInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMicromapInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {}, - const uint32_t * pDescriptorCounts_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , descriptorSetCount( descriptorSetCount_ ) - , pDescriptorCounts( pDescriptorCounts_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT( VULKAN_HPP_NAMESPACE::MicromapEXT src_ = {}, + VULKAN_HPP_NAMESPACE::MicromapEXT dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } { } - VULKAN_HPP_CONSTEXPR - DescriptorSetVariableDescriptorCountAllocateInfo( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : DescriptorSetVariableDescriptorCountAllocateInfo( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DescriptorSetVariableDescriptorCountAllocateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorCounts_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), descriptorSetCount( static_cast( descriptorCounts_.size() ) ), pDescriptorCounts( descriptorCounts_.data() ) + CopyMicromapInfoEXT( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : CopyMicromapInfoEXT( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CopyMicromapInfoEXT & operator=( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + CopyMicromapInfoEXT & operator=( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::MicromapEXT src_ ) VULKAN_HPP_NOEXCEPT { - descriptorSetCount = descriptorSetCount_; + src = src_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setDst( VULKAN_HPP_NAMESPACE::MicromapEXT dst_ ) VULKAN_HPP_NOEXCEPT { - pDescriptorCounts = pDescriptorCounts_; + dst = dst_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DescriptorSetVariableDescriptorCountAllocateInfo & - setDescriptorCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT { - descriptorSetCount = static_cast( descriptorCounts_.size() ); - pDescriptorCounts = descriptorCounts_.data(); + mode = mode_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkCopyMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT + operator VkCopyMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, descriptorSetCount, pDescriptorCounts ); + return std::tie( sType, pNext, src, dst, mode ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const & ) const = default; + auto operator<=>( CopyMicromapInfoEXT const & ) const = default; #else - bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetCount == rhs.descriptorSetCount ) && - ( pDescriptorCounts == rhs.pDescriptorCounts ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && ( mode == rhs.mode ); # endif } - bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; - const void * pNext = {}; - uint32_t descriptorSetCount = {}; - const uint32_t * pDescriptorCounts = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMicromapInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT src = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT dst = {}; + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone; }; template <> - struct CppType + struct CppType { - using Type = DescriptorSetVariableDescriptorCountAllocateInfo; + using Type = CopyMicromapInfoEXT; }; - using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo; - struct DescriptorSetVariableDescriptorCountLayoutSupport + struct CopyMicromapToMemoryInfoEXT { - using NativeType = VkDescriptorSetVariableDescriptorCountLayoutSupport; + using NativeType = VkCopyMicromapToMemoryInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMicromapToMemoryInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxVariableDescriptorCount( maxVariableDescriptorCount_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT( VULKAN_HPP_NAMESPACE::MicromapEXT src_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } { } - VULKAN_HPP_CONSTEXPR - DescriptorSetVariableDescriptorCountLayoutSupport( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT - : DescriptorSetVariableDescriptorCountLayoutSupport( *reinterpret_cast( &rhs ) ) + CopyMicromapToMemoryInfoEXT( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMicromapToMemoryInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DescriptorSetVariableDescriptorCountLayoutSupport & - operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CopyMicromapToMemoryInfoEXT & operator=( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + CopyMicromapToMemoryInfoEXT & operator=( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setSrc( VULKAN_HPP_NAMESPACE::MicromapEXT src_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + src = src_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxVariableDescriptorCount ); + dst = dst_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const & ) const = default; -#else - bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setMode( VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount ); -# endif + mode = mode_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT + operator VkCopyMicromapToMemoryInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + return *reinterpret_cast( this ); + } + + operator VkCopyMicromapToMemoryInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, src, dst, mode ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; - void * pNext = {}; - uint32_t maxVariableDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMicromapToMemoryInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT src = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {}; + VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone; }; template <> - struct CppType + struct CppType { - using Type = DescriptorSetVariableDescriptorCountLayoutSupport; + using Type = CopyMicromapToMemoryInfoEXT; }; - using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport; - struct DescriptorUpdateTemplateEntry + struct CuFunctionCreateInfoNVX { - using NativeType = VkDescriptorUpdateTemplateEntry; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = {}, - uint32_t dstArrayElement_ = {}, - uint32_t descriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - size_t offset_ = {}, - size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT - : dstBinding( dstBinding_ ) - , dstArrayElement( dstArrayElement_ ) - , descriptorCount( descriptorCount_ ) - , descriptorType( descriptorType_ ) - , offset( offset_ ) - , stride( stride_ ) - { - } + using NativeType = VkCuFunctionCreateInfoNVX; - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuFunctionCreateInfoNVX; - DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT - : DescriptorUpdateTemplateEntry( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CuFunctionCreateInfoNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , module{ module_ } + , pName{ pName_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DescriptorUpdateTemplateEntry & operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + CuFunctionCreateInfoNVX( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuFunctionCreateInfoNVX( *reinterpret_cast( &rhs ) ) { - dstBinding = dstBinding_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT - { - dstArrayElement = dstArrayElement_; - return *this; - } + CuFunctionCreateInfoNVX & operator=( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + CuFunctionCreateInfoNVX & operator=( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT { - descriptorCount = descriptorCount_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - descriptorType = descriptorType_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setModule( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ ) VULKAN_HPP_NOEXCEPT { - offset = offset_; + module = module_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT { - stride = stride_; + pName = pName_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDescriptorUpdateTemplateEntry const &() const VULKAN_HPP_NOEXCEPT + operator VkCuFunctionCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT + operator VkCuFunctionCreateInfoNVX &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( dstBinding, dstArrayElement, descriptorCount, descriptorType, offset, stride ); + return std::tie( sType, pNext, module, pName ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorUpdateTemplateEntry const & ) const = default; -#else - bool operator==( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ) && - ( descriptorType == rhs.descriptorType ) && ( offset == rhs.offset ) && ( stride == rhs.stride ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = module <=> rhs.module; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; } +#endif - bool operator!=( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ); + } + + bool operator!=( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - uint32_t dstBinding = {}; - uint32_t dstArrayElement = {}; - uint32_t descriptorCount = {}; - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; - size_t offset = {}; - size_t stride = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuFunctionCreateInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CuModuleNVX module = {}; + const char * pName = {}; }; - using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; - struct DescriptorUpdateTemplateCreateInfo + template <> + struct CppType { - using NativeType = VkDescriptorUpdateTemplateCreateInfo; + using Type = CuFunctionCreateInfoNVX; + }; + + struct CuLaunchInfoNVX + { + using NativeType = VkCuLaunchInfoNVX; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorUpdateTemplateCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuLaunchInfoNVX; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, - uint32_t descriptorUpdateEntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, - VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, - uint32_t set_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ) - , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ) - , templateType( templateType_ ) - , descriptorSetLayout( descriptorSetLayout_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , pipelineLayout( pipelineLayout_ ) - , set( set_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ = {}, + uint32_t gridDimX_ = {}, + uint32_t gridDimY_ = {}, + uint32_t gridDimZ_ = {}, + uint32_t blockDimX_ = {}, + uint32_t blockDimY_ = {}, + uint32_t blockDimZ_ = {}, + uint32_t sharedMemBytes_ = {}, + size_t paramCount_ = {}, + const void * const * pParams_ = {}, + size_t extraCount_ = {}, + const void * const * pExtras_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , function{ function_ } + , gridDimX{ gridDimX_ } + , gridDimY{ gridDimY_ } + , gridDimZ{ gridDimZ_ } + , blockDimX{ blockDimX_ } + , blockDimY{ blockDimY_ } + , blockDimZ{ blockDimZ_ } + , sharedMemBytes{ sharedMemBytes_ } + , paramCount{ paramCount_ } + , pParams{ pParams_ } + , extraCount{ extraCount_ } + , pExtras{ pExtras_ } { } - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : DescriptorUpdateTemplateCreateInfo( *reinterpret_cast( &rhs ) ) - { - } + CuLaunchInfoNVX( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT : CuLaunchInfoNVX( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DescriptorUpdateTemplateCreateInfo( - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorUpdateEntries_, - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, - VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, - uint32_t set_ = {}, - const void * pNext_ = nullptr ) + CuLaunchInfoNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_, + uint32_t gridDimX_, + uint32_t gridDimY_, + uint32_t gridDimZ_, + uint32_t blockDimX_, + uint32_t blockDimY_, + uint32_t blockDimZ_, + uint32_t sharedMemBytes_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & params_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & extras_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , flags( flags_ ) - , descriptorUpdateEntryCount( static_cast( descriptorUpdateEntries_.size() ) ) - , pDescriptorUpdateEntries( descriptorUpdateEntries_.data() ) - , templateType( templateType_ ) - , descriptorSetLayout( descriptorSetLayout_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , pipelineLayout( pipelineLayout_ ) - , set( set_ ) + , function( function_ ) + , gridDimX( gridDimX_ ) + , gridDimY( gridDimY_ ) + , gridDimZ( gridDimZ_ ) + , blockDimX( blockDimX_ ) + , blockDimY( blockDimY_ ) + , blockDimZ( blockDimZ_ ) + , sharedMemBytes( sharedMemBytes_ ) + , paramCount( params_.size() ) + , pParams( params_.data() ) + , extraCount( extras_.size() ) + , pExtras( extras_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CuLaunchInfoNVX & operator=( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DescriptorUpdateTemplateCreateInfo & operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + CuLaunchInfoNVX & operator=( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & - setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setFunction( VULKAN_HPP_NAMESPACE::CuFunctionNVX function_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + function = function_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT { - descriptorUpdateEntryCount = descriptorUpdateEntryCount_; + gridDimX = gridDimX_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & - setPDescriptorUpdateEntries( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT { - pDescriptorUpdateEntries = pDescriptorUpdateEntries_; + gridDimY = gridDimY_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorUpdateEntries_ ) - VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT { - descriptorUpdateEntryCount = static_cast( descriptorUpdateEntries_.size() ); - pDescriptorUpdateEntries = descriptorUpdateEntries_.data(); + gridDimZ = gridDimZ_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & - setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT { - templateType = templateType_; + blockDimX = blockDimX_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & - setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT { - descriptorSetLayout = descriptorSetLayout_; + blockDimY = blockDimY_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & - setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT { - pipelineBindPoint = pipelineBindPoint_; + blockDimZ = blockDimZ_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT { - pipelineLayout = pipelineLayout_; + sharedMemBytes = sharedMemBytes_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT { - set = set_; + paramCount = paramCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDescriptorUpdateTemplateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pParams = pParams_; + return *this; } - operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CuLaunchInfoNVX & setParams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & params_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + paramCount = params_.size(); + pParams = params_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT + { + extraCount = extraCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT + { + pExtras = pExtras_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CuLaunchInfoNVX & setExtras( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & extras_ ) VULKAN_HPP_NOEXCEPT + { + extraCount = extras_.size(); + pExtras = extras_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCuLaunchInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuLaunchInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -20937,1368 +23510,1139 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + size_t const &, + const void * const * const &, + size_t const &, + const void * const * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( - sType, pNext, flags, descriptorUpdateEntryCount, pDescriptorUpdateEntries, templateType, descriptorSetLayout, pipelineBindPoint, pipelineLayout, set ); + sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorUpdateTemplateCreateInfo const & ) const = default; + auto operator<=>( CuLaunchInfoNVX const & ) const = default; #else - bool operator==( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) && - ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) && ( templateType == rhs.templateType ) && - ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipelineLayout == rhs.pipelineLayout ) && - ( set == rhs.set ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) && ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) && + ( gridDimZ == rhs.gridDimZ ) && ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) && + ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) && ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) && + ( pExtras == rhs.pExtras ); # endif } - bool operator!=( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {}; - uint32_t descriptorUpdateEntryCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries = {}; - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet; - VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; - uint32_t set = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuLaunchInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CuFunctionNVX function = {}; + uint32_t gridDimX = {}; + uint32_t gridDimY = {}; + uint32_t gridDimZ = {}; + uint32_t blockDimX = {}; + uint32_t blockDimY = {}; + uint32_t blockDimZ = {}; + uint32_t sharedMemBytes = {}; + size_t paramCount = {}; + const void * const * pParams = {}; + size_t extraCount = {}; + const void * const * pExtras = {}; }; template <> - struct CppType + struct CppType { - using Type = DescriptorUpdateTemplateCreateInfo; + using Type = CuLaunchInfoNVX; }; - using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; - struct DeviceBufferMemoryRequirements + struct CuModuleCreateInfoNVX { - using NativeType = VkDeviceBufferMemoryRequirements; + using NativeType = VkCuModuleCreateInfoNVX; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceBufferMemoryRequirements; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleCreateInfoNVX; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pCreateInfo( pCreateInfo_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dataSize{ dataSize_ } + , pData{ pData_ } { } - VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceBufferMemoryRequirements( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceBufferMemoryRequirements( *reinterpret_cast( &rhs ) ) + CuModuleCreateInfoNVX( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuModuleCreateInfoNVX( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DeviceBufferMemoryRequirements & operator=( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + CuModuleCreateInfoNVX( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - DeviceBufferMemoryRequirements & operator=( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + CuModuleCreateInfoNVX & operator=( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CuModuleCreateInfoNVX & operator=( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT { - pCreateInfo = pCreateInfo_; + dataSize = dataSize_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDeviceBufferMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pData = pData_; + return *this; } - operator VkDeviceBufferMemoryRequirements &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + CuModuleCreateInfoNVX & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dataSize = data_.size() * sizeof( T ); + pData = data_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkCuModuleCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuModuleCreateInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pCreateInfo ); + return std::tie( sType, pNext, dataSize, pData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceBufferMemoryRequirements const & ) const = default; + auto operator<=>( CuModuleCreateInfoNVX const & ) const = default; #else - bool operator==( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); # endif } - bool operator!=( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceBufferMemoryRequirements; - const void * pNext = {}; - const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuModuleCreateInfoNVX; + const void * pNext = {}; + size_t dataSize = {}; + const void * pData = {}; }; template <> - struct CppType + struct CppType { - using Type = DeviceBufferMemoryRequirements; + using Type = CuModuleCreateInfoNVX; }; - using DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements; - struct DeviceQueueCreateInfo + struct CuModuleTexturingModeCreateInfoNVX { - using NativeType = VkDeviceQueueCreateInfo; + using NativeType = VkCuModuleTexturingModeCreateInfoNVX; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleTexturingModeCreateInfoNVX; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, - uint32_t queueFamilyIndex_ = {}, - uint32_t queueCount_ = {}, - const float * pQueuePriorities_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , queueFamilyIndex( queueFamilyIndex_ ) - , queueCount( queueCount_ ) - , pQueuePriorities( pQueuePriorities_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuModuleTexturingModeCreateInfoNVX( VULKAN_HPP_NAMESPACE::Bool32 use64bitTexturing_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , use64bitTexturing{ use64bitTexturing_ } { } - VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceQueueCreateInfo( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR CuModuleTexturingModeCreateInfoNVX( CuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_, - uint32_t queueFamilyIndex_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queuePriorities_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , queueFamilyIndex( queueFamilyIndex_ ) - , queueCount( static_cast( queuePriorities_.size() ) ) - , pQueuePriorities( queuePriorities_.data() ) + CuModuleTexturingModeCreateInfoNVX( VkCuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuModuleTexturingModeCreateInfoNVX( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CuModuleTexturingModeCreateInfoNVX & operator=( CuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + CuModuleTexturingModeCreateInfoNVX & operator=( VkCuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CuModuleTexturingModeCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT - { - queueFamilyIndex = queueFamilyIndex_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT - { - queueCount = queueCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPQueuePriorities( const float * pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT - { - pQueuePriorities = pQueuePriorities_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DeviceQueueCreateInfo & setQueuePriorities( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CuModuleTexturingModeCreateInfoNVX & setUse64bitTexturing( VULKAN_HPP_NAMESPACE::Bool32 use64bitTexturing_ ) VULKAN_HPP_NOEXCEPT { - queueCount = static_cast( queuePriorities_.size() ); - pQueuePriorities = queuePriorities_.data(); + use64bitTexturing = use64bitTexturing_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkCuModuleTexturingModeCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkCuModuleTexturingModeCreateInfoNVX &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, queueFamilyIndex, queueCount, pQueuePriorities ); + return std::tie( sType, pNext, use64bitTexturing ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceQueueCreateInfo const & ) const = default; + auto operator<=>( CuModuleTexturingModeCreateInfoNVX const & ) const = default; #else - bool operator==( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CuModuleTexturingModeCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && - ( queueCount == rhs.queueCount ) && ( pQueuePriorities == rhs.pQueuePriorities ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( use64bitTexturing == rhs.use64bitTexturing ); # endif } - bool operator!=( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CuModuleTexturingModeCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; - uint32_t queueFamilyIndex = {}; - uint32_t queueCount = {}; - const float * pQueuePriorities = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuModuleTexturingModeCreateInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 use64bitTexturing = {}; }; template <> - struct CppType + struct CppType { - using Type = DeviceQueueCreateInfo; + using Type = CuModuleTexturingModeCreateInfoNVX; }; - struct PhysicalDeviceFeatures +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct CudaFunctionCreateInfoNV { - using NativeType = VkPhysicalDeviceFeatures; + using NativeType = VkCudaFunctionCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT - : robustBufferAccess( robustBufferAccess_ ) - , fullDrawIndexUint32( fullDrawIndexUint32_ ) - , imageCubeArray( imageCubeArray_ ) - , independentBlend( independentBlend_ ) - , geometryShader( geometryShader_ ) - , tessellationShader( tessellationShader_ ) - , sampleRateShading( sampleRateShading_ ) - , dualSrcBlend( dualSrcBlend_ ) - , logicOp( logicOp_ ) - , multiDrawIndirect( multiDrawIndirect_ ) - , drawIndirectFirstInstance( drawIndirectFirstInstance_ ) - , depthClamp( depthClamp_ ) - , depthBiasClamp( depthBiasClamp_ ) - , fillModeNonSolid( fillModeNonSolid_ ) - , depthBounds( depthBounds_ ) - , wideLines( wideLines_ ) - , largePoints( largePoints_ ) - , alphaToOne( alphaToOne_ ) - , multiViewport( multiViewport_ ) - , samplerAnisotropy( samplerAnisotropy_ ) - , textureCompressionETC2( textureCompressionETC2_ ) - , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ) - , textureCompressionBC( textureCompressionBC_ ) - , occlusionQueryPrecise( occlusionQueryPrecise_ ) - , pipelineStatisticsQuery( pipelineStatisticsQuery_ ) - , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ) - , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ) - , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ) - , shaderImageGatherExtended( shaderImageGatherExtended_ ) - , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ) - , shaderStorageImageMultisample( shaderStorageImageMultisample_ ) - , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ) - , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ) - , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ) - , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ) - , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ) - , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ) - , shaderClipDistance( shaderClipDistance_ ) - , shaderCullDistance( shaderCullDistance_ ) - , shaderFloat64( shaderFloat64_ ) - , shaderInt64( shaderInt64_ ) - , shaderInt16( shaderInt16_ ) - , shaderResourceResidency( shaderResourceResidency_ ) - , shaderResourceMinLod( shaderResourceMinLod_ ) - , sparseBinding( sparseBinding_ ) - , sparseResidencyBuffer( sparseResidencyBuffer_ ) - , sparseResidencyImage2D( sparseResidencyImage2D_ ) - , sparseResidencyImage3D( sparseResidencyImage3D_ ) - , sparseResidency2Samples( sparseResidency2Samples_ ) - , sparseResidency4Samples( sparseResidency4Samples_ ) - , sparseResidency8Samples( sparseResidency8Samples_ ) - , sparseResidency16Samples( sparseResidency16Samples_ ) - , sparseResidencyAliased( sparseResidencyAliased_ ) - , variableMultisampleRate( variableMultisampleRate_ ) - , inheritedQueries( inheritedQueries_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaFunctionCreateInfoNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CudaFunctionCreateInfoNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , module{ module_ } + , pName{ pName_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CudaFunctionCreateInfoNV( CudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceFeatures( *reinterpret_cast( &rhs ) ) + CudaFunctionCreateInfoNV( VkCudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CudaFunctionCreateInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CudaFunctionCreateInfoNV & operator=( CudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + CudaFunctionCreateInfoNV & operator=( VkCudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - robustBufferAccess = robustBufferAccess_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setModule( VULKAN_HPP_NAMESPACE::CudaModuleNV module_ ) VULKAN_HPP_NOEXCEPT { - fullDrawIndexUint32 = fullDrawIndexUint32_; + module = module_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT { - imageCubeArray = imageCubeArray_; + pName = pName_; return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT + operator VkCudaFunctionCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - independentBlend = independentBlend_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT + operator VkCudaFunctionCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - geometryShader = geometryShader_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - tessellationShader = tessellationShader_; - return *this; + return std::tie( sType, pNext, module, pName ); } +# endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - sampleRateShading = sampleRateShading_; - return *this; - } + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = module <=> rhs.module; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT - { - dualSrcBlend = dualSrcBlend_; - return *this; + return std::strong_ordering::equivalent; } +# endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT + bool operator==( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - logicOp = logicOp_; - return *this; + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - multiDrawIndirect = multiDrawIndirect_; - return *this; + return !operator==( rhs ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT - { - drawIndirectFirstInstance = drawIndirectFirstInstance_; - return *this; - } + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCudaFunctionCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CudaModuleNV module = {}; + const char * pName = {}; + }; - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT - { - depthClamp = depthClamp_; - return *this; - } + template <> + struct CppType + { + using Type = CudaFunctionCreateInfoNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT - { - depthBiasClamp = depthBiasClamp_; - return *this; - } +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct CudaLaunchInfoNV + { + using NativeType = VkCudaLaunchInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaLaunchInfoNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CudaLaunchInfoNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function_ = {}, + uint32_t gridDimX_ = {}, + uint32_t gridDimY_ = {}, + uint32_t gridDimZ_ = {}, + uint32_t blockDimX_ = {}, + uint32_t blockDimY_ = {}, + uint32_t blockDimZ_ = {}, + uint32_t sharedMemBytes_ = {}, + size_t paramCount_ = {}, + const void * const * pParams_ = {}, + size_t extraCount_ = {}, + const void * const * pExtras_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , function{ function_ } + , gridDimX{ gridDimX_ } + , gridDimY{ gridDimY_ } + , gridDimZ{ gridDimZ_ } + , blockDimX{ blockDimX_ } + , blockDimY{ blockDimY_ } + , blockDimZ{ blockDimZ_ } + , sharedMemBytes{ sharedMemBytes_ } + , paramCount{ paramCount_ } + , pParams{ pParams_ } + , extraCount{ extraCount_ } + , pExtras{ pExtras_ } + { + } + + VULKAN_HPP_CONSTEXPR CudaLaunchInfoNV( CudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CudaLaunchInfoNV( VkCudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : CudaLaunchInfoNV( *reinterpret_cast( &rhs ) ) {} - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CudaLaunchInfoNV( VULKAN_HPP_NAMESPACE::CudaFunctionNV function_, + uint32_t gridDimX_, + uint32_t gridDimY_, + uint32_t gridDimZ_, + uint32_t blockDimX_, + uint32_t blockDimY_, + uint32_t blockDimZ_, + uint32_t sharedMemBytes_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & params_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & extras_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , function( function_ ) + , gridDimX( gridDimX_ ) + , gridDimY( gridDimY_ ) + , gridDimZ( gridDimZ_ ) + , blockDimX( blockDimX_ ) + , blockDimY( blockDimY_ ) + , blockDimZ( blockDimZ_ ) + , sharedMemBytes( sharedMemBytes_ ) + , paramCount( params_.size() ) + , pParams( params_.data() ) + , extraCount( extras_.size() ) + , pExtras( extras_.data() ) { - fillModeNonSolid = fillModeNonSolid_; - return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT + CudaLaunchInfoNV & operator=( CudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + CudaLaunchInfoNV & operator=( VkCudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - depthBounds = depthBounds_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - wideLines = wideLines_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setFunction( VULKAN_HPP_NAMESPACE::CudaFunctionNV function_ ) VULKAN_HPP_NOEXCEPT { - largePoints = largePoints_; + function = function_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimX( uint32_t gridDimX_ ) VULKAN_HPP_NOEXCEPT { - alphaToOne = alphaToOne_; + gridDimX = gridDimX_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimY( uint32_t gridDimY_ ) VULKAN_HPP_NOEXCEPT { - multiViewport = multiViewport_; + gridDimY = gridDimY_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimZ( uint32_t gridDimZ_ ) VULKAN_HPP_NOEXCEPT { - samplerAnisotropy = samplerAnisotropy_; + gridDimZ = gridDimZ_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimX( uint32_t blockDimX_ ) VULKAN_HPP_NOEXCEPT { - textureCompressionETC2 = textureCompressionETC2_; + blockDimX = blockDimX_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & - setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimY( uint32_t blockDimY_ ) VULKAN_HPP_NOEXCEPT { - textureCompressionASTC_LDR = textureCompressionASTC_LDR_; + blockDimY = blockDimY_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimZ( uint32_t blockDimZ_ ) VULKAN_HPP_NOEXCEPT { - textureCompressionBC = textureCompressionBC_; + blockDimZ = blockDimZ_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setSharedMemBytes( uint32_t sharedMemBytes_ ) VULKAN_HPP_NOEXCEPT { - occlusionQueryPrecise = occlusionQueryPrecise_; + sharedMemBytes = sharedMemBytes_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setParamCount( size_t paramCount_ ) VULKAN_HPP_NOEXCEPT { - pipelineStatisticsQuery = pipelineStatisticsQuery_; + paramCount = paramCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & - setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPParams( const void * const * pParams_ ) VULKAN_HPP_NOEXCEPT { - vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_; + pParams = pParams_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CudaLaunchInfoNV & setParams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & params_ ) VULKAN_HPP_NOEXCEPT { - fragmentStoresAndAtomics = fragmentStoresAndAtomics_; + paramCount = params_.size(); + pParams = params_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & - setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setExtraCount( size_t extraCount_ ) VULKAN_HPP_NOEXCEPT { - shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_; + extraCount = extraCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPExtras( const void * const * pExtras_ ) VULKAN_HPP_NOEXCEPT { - shaderImageGatherExtended = shaderImageGatherExtended_; + pExtras = pExtras_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & - setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CudaLaunchInfoNV & setExtras( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & extras_ ) VULKAN_HPP_NOEXCEPT { - shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_; + extraCount = extras_.size(); + pExtras = extras_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & - setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT + operator VkCudaLaunchInfoNV const &() const VULKAN_HPP_NOEXCEPT { - shaderStorageImageMultisample = shaderStorageImageMultisample_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & - setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + operator VkCudaLaunchInfoNV &() VULKAN_HPP_NOEXCEPT { - shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & - setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT - { - shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & - setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT - { - shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & - setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT - { - shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & - setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT - { - shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & - setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT - { - shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT - { - shaderClipDistance = shaderClipDistance_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - shaderCullDistance = shaderCullDistance_; - return *this; + return std::tie( + sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras ); } +# endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CudaLaunchInfoNV const & ) const = default; +# else + bool operator==( CudaLaunchInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderFloat64 = shaderFloat64_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) && ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) && + ( gridDimZ == rhs.gridDimZ ) && ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) && + ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) && ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) && + ( pExtras == rhs.pExtras ); +# endif } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( CudaLaunchInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderInt64 = shaderInt64_; - return *this; + return !operator==( rhs ); } +# endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT - { - shaderInt16 = shaderInt16_; - return *this; - } + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCudaLaunchInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CudaFunctionNV function = {}; + uint32_t gridDimX = {}; + uint32_t gridDimY = {}; + uint32_t gridDimZ = {}; + uint32_t blockDimX = {}; + uint32_t blockDimY = {}; + uint32_t blockDimZ = {}; + uint32_t sharedMemBytes = {}; + size_t paramCount = {}; + const void * const * pParams = {}; + size_t extraCount = {}; + const void * const * pExtras = {}; + }; - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT - { - shaderResourceResidency = shaderResourceResidency_; - return *this; - } + template <> + struct CppType + { + using Type = CudaLaunchInfoNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT - { - shaderResourceMinLod = shaderResourceMinLod_; - return *this; - } +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct CudaModuleCreateInfoNV + { + using NativeType = VkCudaModuleCreateInfoNV; - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT - { - sparseBinding = sparseBinding_; - return *this; - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaModuleCreateInfoNV; - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CudaModuleCreateInfoNV( size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dataSize{ dataSize_ } + , pData{ pData_ } { - sparseResidencyBuffer = sparseResidencyBuffer_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT - { - sparseResidencyImage2D = sparseResidencyImage2D_; - return *this; - } + VULKAN_HPP_CONSTEXPR CudaModuleCreateInfoNV( CudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT + CudaModuleCreateInfoNV( VkCudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CudaModuleCreateInfoNV( *reinterpret_cast( &rhs ) ) { - sparseResidencyImage3D = sparseResidencyImage3D_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + CudaModuleCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) { - sparseResidency2Samples = sparseResidency2Samples_; - return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT - { - sparseResidency4Samples = sparseResidency4Samples_; - return *this; - } + CudaModuleCreateInfoNV & operator=( CudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT + CudaModuleCreateInfoNV & operator=( VkCudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - sparseResidency8Samples = sparseResidency8Samples_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - sparseResidency16Samples = sparseResidency16Samples_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT { - sparseResidencyAliased = sparseResidencyAliased_; + dataSize = dataSize_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT { - variableMultisampleRate = variableMultisampleRate_; + pData = pData_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + CudaModuleCreateInfoNV & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT { - inheritedQueries = inheritedQueries_; + dataSize = data_.size() * sizeof( T ); + pData = data_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT + operator VkCudaModuleCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT + operator VkCudaModuleCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( robustBufferAccess, - fullDrawIndexUint32, - imageCubeArray, - independentBlend, - geometryShader, - tessellationShader, - sampleRateShading, - dualSrcBlend, - logicOp, - multiDrawIndirect, - drawIndirectFirstInstance, - depthClamp, - depthBiasClamp, - fillModeNonSolid, - depthBounds, - wideLines, - largePoints, - alphaToOne, - multiViewport, - samplerAnisotropy, - textureCompressionETC2, - textureCompressionASTC_LDR, - textureCompressionBC, - occlusionQueryPrecise, - pipelineStatisticsQuery, - vertexPipelineStoresAndAtomics, - fragmentStoresAndAtomics, - shaderTessellationAndGeometryPointSize, - shaderImageGatherExtended, - shaderStorageImageExtendedFormats, - shaderStorageImageMultisample, - shaderStorageImageReadWithoutFormat, - shaderStorageImageWriteWithoutFormat, - shaderUniformBufferArrayDynamicIndexing, - shaderSampledImageArrayDynamicIndexing, - shaderStorageBufferArrayDynamicIndexing, - shaderStorageImageArrayDynamicIndexing, - shaderClipDistance, - shaderCullDistance, - shaderFloat64, - shaderInt64, - shaderInt16, - shaderResourceResidency, - shaderResourceMinLod, - sparseBinding, - sparseResidencyBuffer, - sparseResidencyImage2D, - sparseResidencyImage3D, - sparseResidency2Samples, - sparseResidency4Samples, - sparseResidency8Samples, - sparseResidency16Samples, - sparseResidencyAliased, - variableMultisampleRate, - inheritedQueries ); + return std::tie( sType, pNext, dataSize, pData ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceFeatures const & ) const = default; -#else - bool operator==( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CudaModuleCreateInfoNV const & ) const = default; +# else + bool operator==( CudaModuleCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( robustBufferAccess == rhs.robustBufferAccess ) && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) && - ( imageCubeArray == rhs.imageCubeArray ) && ( independentBlend == rhs.independentBlend ) && ( geometryShader == rhs.geometryShader ) && - ( tessellationShader == rhs.tessellationShader ) && ( sampleRateShading == rhs.sampleRateShading ) && ( dualSrcBlend == rhs.dualSrcBlend ) && - ( logicOp == rhs.logicOp ) && ( multiDrawIndirect == rhs.multiDrawIndirect ) && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) && - ( depthClamp == rhs.depthClamp ) && ( depthBiasClamp == rhs.depthBiasClamp ) && ( fillModeNonSolid == rhs.fillModeNonSolid ) && - ( depthBounds == rhs.depthBounds ) && ( wideLines == rhs.wideLines ) && ( largePoints == rhs.largePoints ) && ( alphaToOne == rhs.alphaToOne ) && - ( multiViewport == rhs.multiViewport ) && ( samplerAnisotropy == rhs.samplerAnisotropy ) && - ( textureCompressionETC2 == rhs.textureCompressionETC2 ) && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) && - ( textureCompressionBC == rhs.textureCompressionBC ) && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) && - ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) && - ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) && - ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) && - ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) && - ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) && - ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) && - ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) && - ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) && - ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) && - ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) && - ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) && ( shaderClipDistance == rhs.shaderClipDistance ) && - ( shaderCullDistance == rhs.shaderCullDistance ) && ( shaderFloat64 == rhs.shaderFloat64 ) && ( shaderInt64 == rhs.shaderInt64 ) && - ( shaderInt16 == rhs.shaderInt16 ) && ( shaderResourceResidency == rhs.shaderResourceResidency ) && - ( shaderResourceMinLod == rhs.shaderResourceMinLod ) && ( sparseBinding == rhs.sparseBinding ) && - ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) && - ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) && ( sparseResidency2Samples == rhs.sparseResidency2Samples ) && - ( sparseResidency4Samples == rhs.sparseResidency4Samples ) && ( sparseResidency8Samples == rhs.sparseResidency8Samples ) && - ( sparseResidency16Samples == rhs.sparseResidency16Samples ) && ( sparseResidencyAliased == rhs.sparseResidencyAliased ) && - ( variableMultisampleRate == rhs.variableMultisampleRate ) && ( inheritedQueries == rhs.inheritedQueries ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); +# endif } - bool operator!=( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CudaModuleCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {}; - VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {}; - VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {}; - VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {}; - VULKAN_HPP_NAMESPACE::Bool32 logicOp = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {}; - VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {}; - VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {}; - VULKAN_HPP_NAMESPACE::Bool32 wideLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 largePoints = {}; - VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {}; - VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {}; - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {}; - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {}; - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {}; - VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {}; - VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {}; - VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {}; - VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {}; - VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCudaModuleCreateInfoNV; + const void * pNext = {}; + size_t dataSize = {}; + const void * pData = {}; }; - struct DeviceCreateInfo + template <> + struct CppType { - using NativeType = VkDeviceCreateInfo; + using Type = CudaModuleCreateInfoNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct D3D12FenceSubmitInfoKHR + { + using NativeType = VkD3D12FenceSubmitInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, - uint32_t queueCreateInfoCount_ = {}, - const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ = {}, - uint32_t enabledLayerCount_ = {}, - const char * const * ppEnabledLayerNames_ = {}, - uint32_t enabledExtensionCount_ = {}, - const char * const * ppEnabledExtensionNames_ = {}, - const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , queueCreateInfoCount( queueCreateInfoCount_ ) - , pQueueCreateInfos( pQueueCreateInfos_ ) - , enabledLayerCount( enabledLayerCount_ ) - , ppEnabledLayerNames( ppEnabledLayerNames_ ) - , enabledExtensionCount( enabledExtensionCount_ ) - , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) - , pEnabledFeatures( pEnabledFeatures_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = {}, + const uint64_t * pWaitSemaphoreValues_ = {}, + uint32_t signalSemaphoreValuesCount_ = {}, + const uint64_t * pSignalSemaphoreValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , waitSemaphoreValuesCount{ waitSemaphoreValuesCount_ } + , pWaitSemaphoreValues{ pWaitSemaphoreValues_ } + , signalSemaphoreValuesCount{ signalSemaphoreValuesCount_ } + , pSignalSemaphoreValues{ pSignalSemaphoreValues_ } { } - VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceCreateInfo( *reinterpret_cast( &rhs ) ) {} + D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : D3D12FenceSubmitInfoKHR( *reinterpret_cast( &rhs ) ) + { + } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueCreateInfos_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {}, - const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {}, - const void * pNext_ = nullptr ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , flags( flags_ ) - , queueCreateInfoCount( static_cast( queueCreateInfos_.size() ) ) - , pQueueCreateInfos( queueCreateInfos_.data() ) - , enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ) - , ppEnabledLayerNames( pEnabledLayerNames_.data() ) - , enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ) - , ppEnabledExtensionNames( pEnabledExtensionNames_.data() ) - , pEnabledFeatures( pEnabledFeatures_ ) + , waitSemaphoreValuesCount( static_cast( waitSemaphoreValues_.size() ) ) + , pWaitSemaphoreValues( waitSemaphoreValues_.data() ) + , signalSemaphoreValuesCount( static_cast( signalSemaphoreValues_.size() ) ) + , pSignalSemaphoreValues( signalSemaphoreValues_.data() ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + waitSemaphoreValuesCount = waitSemaphoreValuesCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT { - queueCreateInfoCount = queueCreateInfoCount_; + pWaitSemaphoreValues = pWaitSemaphoreValues_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & - setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR & + setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT { - pQueueCreateInfos = pQueueCreateInfos_; + waitSemaphoreValuesCount = static_cast( waitSemaphoreValues_.size() ); + pWaitSemaphoreValues = waitSemaphoreValues_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DeviceCreateInfo & setQueueCreateInfos( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT { - queueCreateInfoCount = static_cast( queueCreateInfos_.size() ); - pQueueCreateInfos = queueCreateInfos_.data(); + signalSemaphoreValuesCount = signalSemaphoreValuesCount_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT { - enabledLayerCount = enabledLayerCount_; + pSignalSemaphoreValues = pSignalSemaphoreValues_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + D3D12FenceSubmitInfoKHR & + setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT { - ppEnabledLayerNames = ppEnabledLayerNames_; + signalSemaphoreValuesCount = static_cast( signalSemaphoreValues_.size() ); + pSignalSemaphoreValues = signalSemaphoreValues_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DeviceCreateInfo & - setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - enabledLayerCount = static_cast( pEnabledLayerNames_.size() ); - ppEnabledLayerNames = pEnabledLayerNames_.data(); - return *this; + return *reinterpret_cast( this ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT + operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT { - enabledExtensionCount = enabledExtensionCount_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - ppEnabledExtensionNames = ppEnabledExtensionNames_; + return std::tie( sType, pNext, waitSemaphoreValuesCount, pWaitSemaphoreValues, signalSemaphoreValuesCount, pSignalSemaphoreValues ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( D3D12FenceSubmitInfoKHR const & ) const = default; +# else + bool operator==( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) && + ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) && + ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); +# endif + } + + bool operator!=( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR; + const void * pNext = {}; + uint32_t waitSemaphoreValuesCount = {}; + const uint64_t * pWaitSemaphoreValues = {}; + uint32_t signalSemaphoreValuesCount = {}; + const uint64_t * pSignalSemaphoreValues = {}; + }; + + template <> + struct CppType + { + using Type = D3D12FenceSubmitInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct DebugMarkerMarkerInfoEXT + { + using NativeType = VkDebugMarkerMarkerInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + DebugMarkerMarkerInfoEXT( const char * pMarkerName_ = {}, std::array const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pMarkerName{ pMarkerName_ } + , color{ color_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugMarkerMarkerInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DeviceCreateInfo & - setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); - ppEnabledExtensionNames = pEnabledExtensionNames_.data(); + pNext = pNext_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPMarkerName( const char * pMarkerName_ ) VULKAN_HPP_NOEXCEPT { - pEnabledFeatures = pEnabledFeatures_; + pMarkerName = pMarkerName_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + color = color_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDebugMarkerMarkerInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - flags, - queueCreateInfoCount, - pQueueCreateInfos, - enabledLayerCount, - ppEnabledLayerNames, - enabledExtensionCount, - ppEnabledExtensionNames, - pEnabledFeatures ); + return std::tie( sType, pNext, pMarkerName, color ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + std::partial_ordering operator<=>( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; - if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) - return cmp; - if ( auto cmp = queueCreateInfoCount <=> rhs.queueCreateInfoCount; cmp != 0 ) - return cmp; - if ( auto cmp = pQueueCreateInfos <=> rhs.pQueueCreateInfos; cmp != 0 ) - return cmp; - if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 ) - return cmp; - for ( size_t i = 0; i < enabledLayerCount; ++i ) - { - if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] ) - if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 ) - return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; - } - if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) - return cmp; - for ( size_t i = 0; i < enabledExtensionCount; ++i ) - { - if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] ) - if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 ) - return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; - } - if ( auto cmp = pEnabledFeatures <=> rhs.pEnabledFeatures; cmp != 0 ) + if ( pMarkerName != rhs.pMarkerName ) + if ( auto cmp = strcmp( pMarkerName, rhs.pMarkerName ); cmp != 0 ) + return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; + if ( auto cmp = color <=> rhs.color; cmp != 0 ) return cmp; - return std::strong_ordering::equivalent; + return std::partial_ordering::equivalent; } #endif - bool operator==( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueCreateInfoCount == rhs.queueCreateInfoCount ) && - ( pQueueCreateInfos == rhs.pQueueCreateInfos ) && ( enabledLayerCount == rhs.enabledLayerCount ) && - [this, rhs] - { - bool equal = true; - for ( size_t i = 0; equal && ( i < enabledLayerCount ); ++i ) - { - equal = ( ( ppEnabledLayerNames[i] == rhs.ppEnabledLayerNames[i] ) || ( strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ) == 0 ) ); - } - return equal; - }() && ( enabledExtensionCount == rhs.enabledExtensionCount ) && - [this, rhs] - { - bool equal = true; - for ( size_t i = 0; equal && ( i < enabledExtensionCount ); ++i ) - { - equal = ( ( ppEnabledExtensionNames[i] == rhs.ppEnabledExtensionNames[i] ) || - ( strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ) == 0 ) ); - } - return equal; - }() && ( pEnabledFeatures == rhs.pEnabledFeatures ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pMarkerName == rhs.pMarkerName ) || ( strcmp( pMarkerName, rhs.pMarkerName ) == 0 ) ) && + ( color == rhs.color ); } - bool operator!=( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {}; - uint32_t queueCreateInfoCount = {}; - const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos = {}; - uint32_t enabledLayerCount = {}; - const char * const * ppEnabledLayerNames = {}; - uint32_t enabledExtensionCount = {}; - const char * const * ppEnabledExtensionNames = {}; - const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT; + const void * pNext = {}; + const char * pMarkerName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; }; template <> - struct CppType + struct CppType { - using Type = DeviceCreateInfo; + using Type = DebugMarkerMarkerInfoEXT; }; - struct DeviceDeviceMemoryReportCreateInfoEXT + struct DebugMarkerObjectNameInfoEXT { - using NativeType = VkDeviceDeviceMemoryReportCreateInfoEXT; + using NativeType = VkDebugMarkerObjectNameInfoEXT; - static const bool allowDuplicate = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, - PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, - void * pUserData_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pfnUserCallback( pfnUserCallback_ ) - , pUserData( pUserData_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = {}, + const char * pObjectName_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , objectType{ objectType_ } + , object{ object_ } + , pObjectName{ pObjectName_ } { } - VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceDeviceMemoryReportCreateInfoEXT( *reinterpret_cast( &rhs ) ) + DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugMarkerObjectNameInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DeviceDeviceMemoryReportCreateInfoEXT & operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DeviceDeviceMemoryReportCreateInfoEXT & operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + objectType = objectType_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & - setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT { - pfnUserCallback = pfnUserCallback_; + object = object_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT { - pUserData = pUserData_; + pObjectName = pObjectName_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDeviceDeviceMemoryReportCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkDebugMarkerObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -22307,608 +24651,809 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT const &, + uint64_t const &, + const char * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, pfnUserCallback, pUserData ); + return std::tie( sType, pNext, objectType, object, pObjectName ); } #endif - bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -#if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -#else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnUserCallback == rhs.pfnUserCallback ) && - ( pUserData == rhs.pUserData ); + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) + return cmp; + if ( auto cmp = object <=> rhs.object; cmp != 0 ) + return cmp; + if ( pObjectName != rhs.pObjectName ) + if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } #endif + + bool operator==( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( object == rhs.object ) && + ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) ); } - bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; - PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback = {}; - void * pUserData = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + uint64_t object = {}; + const char * pObjectName = {}; }; template <> - struct CppType + struct CppType { - using Type = DeviceDeviceMemoryReportCreateInfoEXT; + using Type = DebugMarkerObjectNameInfoEXT; }; - struct DeviceDiagnosticsConfigCreateInfoNV + struct DebugMarkerObjectTagInfoEXT { - using NativeType = VkDeviceDiagnosticsConfigCreateInfoNV; + using NativeType = VkDebugMarkerObjectTagInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void * pTag_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , objectType{ objectType_ } + , object{ object_ } + , tagName{ tagName_ } + , tagSize{ tagSize_ } + , pTag{ pTag_ } { } - VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceDiagnosticsConfigCreateInfoNV( *reinterpret_cast( &rhs ) ) + DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugMarkerObjectTagInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, + uint64_t object_, + uint64_t tagName_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof( T ) ), pTag( tag_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - DeviceDiagnosticsConfigCreateInfoNV & operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + objectType = objectType_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDeviceDiagnosticsConfigCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + object = object_; + return *this; } - operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + tagName = tagName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tagSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT + { + pTag = pTag_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + DebugMarkerObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tag_.size() * sizeof( T ); + pTag = tag_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDebugMarkerObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags ); + return std::tie( sType, pNext, objectType, object, tagName, tagSize, pTag ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const & ) const = default; + auto operator<=>( DebugMarkerObjectTagInfoEXT const & ) const = default; #else - bool operator==( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( object == rhs.object ) && ( tagName == rhs.tagName ) && + ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag ); # endif } - bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + uint64_t object = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void * pTag = {}; }; template <> - struct CppType + struct CppType { - using Type = DeviceDiagnosticsConfigCreateInfoNV; + using Type = DebugMarkerObjectTagInfoEXT; }; - struct DeviceEventInfoEXT + typedef VULKAN_HPP_NAMESPACE::Bool32( VKAPI_PTR * PFN_DebugReportCallbackEXT )( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage, + void * pUserData ); + + struct DebugReportCallbackCreateInfoEXT { - using NativeType = VkDeviceEventInfoEXT; + using NativeType = VkDebugReportCallbackCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceEvent( deviceEvent_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::PFN_DebugReportCallbackEXT pfnCallback_ = {}, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pfnCallback{ pfnCallback_ } + , pUserData{ pUserData_ } { } - VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceEventInfoEXT( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugReportCallbackCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } - DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." ) - DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_, + PFN_vkDebugReportCallbackEXT pfnCallback_, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : DebugReportCallbackCreateInfoEXT( flags_, reinterpret_cast( pfnCallback_ ), pUserData_, pNext_ ) { - *this = *reinterpret_cast( &rhs ); + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif + + DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { - deviceEvent = deviceEvent_; + flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & + setPfnCallback( VULKAN_HPP_NAMESPACE::PFN_DebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pfnCallback = pfnCallback_; + return *this; } - operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pUserData = pUserData_; + return *this; + } + +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnCallback( reinterpret_cast( pfnCallback_ ) ); + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, deviceEvent ); + return std::tie( sType, pNext, flags, pfnCallback, pUserData ); } #endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceEventInfoEXT const & ) const = default; -#else - bool operator==( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +#if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceEvent == rhs.deviceEvent ); -# endif +#else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnCallback == rhs.pfnCallback ) && ( pUserData == rhs.pUserData ); +#endif } - bool operator!=( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::PFN_DebugReportCallbackEXT pfnCallback = {}; + void * pUserData = {}; }; template <> - struct CppType + struct CppType { - using Type = DeviceEventInfoEXT; + using Type = DebugReportCallbackCreateInfoEXT; }; - struct DeviceGroupBindSparseInfo + struct DebugUtilsLabelEXT { - using NativeType = VkDeviceGroupBindSparseInfo; + using NativeType = VkDebugUtilsLabelEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , resourceDeviceIndex( resourceDeviceIndex_ ) - , memoryDeviceIndex( memoryDeviceIndex_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + DebugUtilsLabelEXT( const char * pLabelName_ = {}, std::array const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pLabelName{ pLabelName_ } + , color{ color_ } { } - VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceGroupBindSparseInfo( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DebugUtilsLabelEXT( *reinterpret_cast( &rhs ) ) {} - DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPLabelName( const char * pLabelName_ ) VULKAN_HPP_NOEXCEPT { - resourceDeviceIndex = resourceDeviceIndex_; + pLabelName = pLabelName_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT { - memoryDeviceIndex = memoryDeviceIndex_; + color = color_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDeviceGroupBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT + operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, resourceDeviceIndex, memoryDeviceIndex ); + return std::tie( sType, pNext, pLabelName, color ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceGroupBindSparseInfo const & ) const = default; -#else - bool operator==( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + std::partial_ordering operator<=>( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) && - ( memoryDeviceIndex == rhs.memoryDeviceIndex ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( pLabelName != rhs.pLabelName ) + if ( auto cmp = strcmp( pLabelName, rhs.pLabelName ); cmp != 0 ) + return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; + if ( auto cmp = color <=> rhs.color; cmp != 0 ) + return cmp; + + return std::partial_ordering::equivalent; } +#endif - bool operator!=( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pLabelName == rhs.pLabelName ) || ( strcmp( pLabelName, rhs.pLabelName ) == 0 ) ) && + ( color == rhs.color ); + } + + bool operator!=( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo; - const void * pNext = {}; - uint32_t resourceDeviceIndex = {}; - uint32_t memoryDeviceIndex = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT; + const void * pNext = {}; + const char * pLabelName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D color = {}; }; template <> - struct CppType + struct CppType { - using Type = DeviceGroupBindSparseInfo; + using Type = DebugUtilsLabelEXT; }; - using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; - struct DeviceGroupCommandBufferBeginInfo + struct DebugUtilsObjectNameInfoEXT { - using NativeType = VkDeviceGroupCommandBufferBeginInfo; + using NativeType = VkDebugUtilsObjectNameInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceMask( deviceMask_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + const char * pObjectName_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , objectType{ objectType_ } + , objectHandle{ objectHandle_ } + , pObjectName{ pObjectName_ } { } - VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceGroupCommandBufferBeginInfo( *reinterpret_cast( &rhs ) ) + DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsObjectNameInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT { - deviceMask = deviceMask_; + objectType = objectType_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + objectHandle = objectHandle_; + return *this; } - operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pObjectName = pObjectName_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std:: + tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, deviceMask ); + return std::tie( sType, pNext, objectType, objectHandle, pObjectName ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceGroupCommandBufferBeginInfo const & ) const = default; -#else - bool operator==( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) + return cmp; + if ( auto cmp = objectHandle <=> rhs.objectHandle; cmp != 0 ) + return cmp; + if ( pObjectName != rhs.pObjectName ) + if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; } +#endif - bool operator!=( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && + ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) ); + } + + bool operator!=( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo; - const void * pNext = {}; - uint32_t deviceMask = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + const char * pObjectName = {}; }; template <> - struct CppType + struct CppType { - using Type = DeviceGroupCommandBufferBeginInfo; + using Type = DebugUtilsObjectNameInfoEXT; }; - using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; - struct DeviceGroupDeviceCreateInfo + struct DebugUtilsMessengerCallbackDataEXT { - using NativeType = VkDeviceGroupDeviceCreateInfo; + using NativeType = VkDebugUtilsMessengerCallbackDataEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = {}, - const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , physicalDeviceCount( physicalDeviceCount_ ) - , pPhysicalDevices( pPhysicalDevices_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, + const char * pMessageIdName_ = {}, + int32_t messageIdNumber_ = {}, + const char * pMessage_ = {}, + uint32_t queueLabelCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ = {}, + uint32_t cmdBufLabelCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ = {}, + uint32_t objectCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pMessageIdName{ pMessageIdName_ } + , messageIdNumber{ messageIdNumber_ } + , pMessage{ pMessage_ } + , queueLabelCount{ queueLabelCount_ } + , pQueueLabels{ pQueueLabels_ } + , cmdBufLabelCount{ cmdBufLabelCount_ } + , pCmdBufLabels{ pCmdBufLabels_ } + , objectCount{ objectCount_ } + , pObjects{ pObjects_ } { } - VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceGroupDeviceCreateInfo( *reinterpret_cast( &rhs ) ) + DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DeviceGroupDeviceCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & physicalDevices_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), physicalDeviceCount( static_cast( physicalDevices_.size() ) ), pPhysicalDevices( physicalDevices_.data() ) + DebugUtilsMessengerCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_, + const char * pMessageIdName_, + int32_t messageIdNumber_, + const char * pMessage_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueLabels_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & cmdBufLabels_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & objects_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , pMessageIdName( pMessageIdName_ ) + , messageIdNumber( messageIdNumber_ ) + , pMessage( pMessage_ ) + , queueLabelCount( static_cast( queueLabels_.size() ) ) + , pQueueLabels( queueLabels_.data() ) + , cmdBufLabelCount( static_cast( cmdBufLabels_.size() ) ) + , pCmdBufLabels( cmdBufLabels_.data() ) + , objectCount( static_cast( objects_.size() ) ) + , pObjects( objects_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCallbackDataEXT & operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { - physicalDeviceCount = physicalDeviceCount_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & - setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char * pMessageIdName_ ) VULKAN_HPP_NOEXCEPT { - pPhysicalDevices = pPhysicalDevices_; + pMessageIdName = pMessageIdName_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DeviceGroupDeviceCreateInfo & setPhysicalDevices( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & physicalDevices_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT { - physicalDeviceCount = static_cast( physicalDevices_.size() ); - pPhysicalDevices = physicalDevices_.data(); + messageIdNumber = messageIdNumber_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessage( const char * pMessage_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pMessage = pMessage_; + return *this; } - operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + queueLabelCount = queueLabelCount_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, physicalDeviceCount, pPhysicalDevices ); + pQueueLabels = pQueueLabels_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceGroupDeviceCreateInfo const & ) const = default; -#else - bool operator==( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & + setQueueLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueLabels_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && - ( pPhysicalDevices == rhs.pPhysicalDevices ); -# endif + queueLabelCount = static_cast( queueLabels_.size() ); + pQueueLabels = queueLabels_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - bool operator!=( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + cmdBufLabelCount = cmdBufLabelCount_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; - const void * pNext = {}; - uint32_t physicalDeviceCount = {}; - const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices = {}; - }; - - template <> - struct CppType - { - using Type = DeviceGroupDeviceCreateInfo; - }; - using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; - - struct DeviceGroupPresentCapabilitiesKHR - { - using NativeType = VkDeviceGroupPresentCapabilitiesKHR; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentCapabilitiesKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( std::array const & presentMask_ = {}, - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , presentMask( presentMask_ ) - , modes( modes_ ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT { + pCmdBufLabels = pCmdBufLabels_; + return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & + setCmdBufLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT + { + cmdBufLabelCount = static_cast( cmdBufLabels_.size() ); + pCmdBufLabels = cmdBufLabels_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceGroupPresentCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT { + objectCount = objectCount_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & + setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ ) VULKAN_HPP_NOEXCEPT + { + pObjects = pObjects_; + return *this; + } - DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DebugUtilsMessengerCallbackDataEXT & + setObjects( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & objects_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + objectCount = static_cast( objects_.size() ); + pObjects = objects_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -22916,135 +25461,232 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple const &, - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR const &> + const void * const &, + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT const &, + const char * const &, + int32_t const &, + const char * const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, presentMask, modes ); + return std::tie( + sType, pNext, flags, pMessageIdName, messageIdNumber, pMessage, queueLabelCount, pQueueLabels, cmdBufLabelCount, pCmdBufLabels, objectCount, pObjects ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceGroupPresentCapabilitiesKHR const & ) const = default; -#else - bool operator==( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMask == rhs.presentMask ) && ( modes == rhs.modes ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( pMessageIdName != rhs.pMessageIdName ) + if ( auto cmp = strcmp( pMessageIdName, rhs.pMessageIdName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = messageIdNumber <=> rhs.messageIdNumber; cmp != 0 ) + return cmp; + if ( pMessage != rhs.pMessage ) + if ( auto cmp = strcmp( pMessage, rhs.pMessage ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = queueLabelCount <=> rhs.queueLabelCount; cmp != 0 ) + return cmp; + if ( auto cmp = pQueueLabels <=> rhs.pQueueLabels; cmp != 0 ) + return cmp; + if ( auto cmp = cmdBufLabelCount <=> rhs.cmdBufLabelCount; cmp != 0 ) + return cmp; + if ( auto cmp = pCmdBufLabels <=> rhs.pCmdBufLabels; cmp != 0 ) + return cmp; + if ( auto cmp = objectCount <=> rhs.objectCount; cmp != 0 ) + return cmp; + if ( auto cmp = pObjects <=> rhs.pObjects; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( ( pMessageIdName == rhs.pMessageIdName ) || ( strcmp( pMessageIdName, rhs.pMessageIdName ) == 0 ) ) && + ( messageIdNumber == rhs.messageIdNumber ) && ( ( pMessage == rhs.pMessage ) || ( strcmp( pMessage, rhs.pMessage ) == 0 ) ) && + ( queueLabelCount == rhs.queueLabelCount ) && ( pQueueLabels == rhs.pQueueLabels ) && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) && + ( pCmdBufLabels == rhs.pCmdBufLabels ) && ( objectCount == rhs.objectCount ) && ( pObjects == rhs.pObjects ); } - bool operator!=( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D presentMask = {}; - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {}; + const char * pMessageIdName = {}; + int32_t messageIdNumber = {}; + const char * pMessage = {}; + uint32_t queueLabelCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pQueueLabels = {}; + uint32_t cmdBufLabelCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pCmdBufLabels = {}; + uint32_t objectCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects = {}; }; template <> - struct CppType + struct CppType { - using Type = DeviceGroupPresentCapabilitiesKHR; + using Type = DebugUtilsMessengerCallbackDataEXT; }; - struct DeviceGroupPresentInfoKHR + typedef VULKAN_HPP_NAMESPACE::Bool32( VKAPI_PTR * PFN_DebugUtilsMessengerCallbackEXT )( + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, + void * pUserData ); + + struct DebugUtilsMessengerCreateInfoEXT { - using NativeType = VkDeviceGroupPresentInfoKHR; + using NativeType = VkDebugUtilsMessengerCreateInfoEXT; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR; + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( - uint32_t swapchainCount_ = {}, - const uint32_t * pDeviceMasks_ = {}, - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchainCount( swapchainCount_ ) - , pDeviceMasks( pDeviceMasks_ ) - , mode( mode_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, + VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , messageSeverity{ messageSeverity_ } + , messageType{ messageType_ } + , pfnUserCallback{ pfnUserCallback_ } + , pUserData{ pUserData_ } { } - VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceGroupPresentInfoKHR( *reinterpret_cast( &rhs ) ) + DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DeviceGroupPresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceMasks_, - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), swapchainCount( static_cast( deviceMasks_.size() ) ), pDeviceMasks( deviceMasks_.data() ), mode( mode_ ) +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." ) + + DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_, + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : DebugUtilsMessengerCreateInfoEXT( flags_, + messageSeverity_, + messageType_, + reinterpret_cast( pfnUserCallback_ ), + pUserData_, + pNext_ ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif - DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { - swapchainCount = swapchainCount_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t * pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & + setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT { - pDeviceMasks = pDeviceMasks_; + messageSeverity = messageSeverity_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DeviceGroupPresentInfoKHR & setDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & + setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT { - swapchainCount = static_cast( deviceMasks_.size() ); - pDeviceMasks = deviceMasks_.data(); + messageType = messageType_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & + setPfnUserCallback( VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT { - mode = mode_; + pfnUserCallback = pfnUserCallback_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pUserData = pUserData_; + return *this; } - operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return setPfnUserCallback( reinterpret_cast( pfnUserCallback_ ) ); + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -23053,680 +25695,712 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT const &, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT const &, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT const &, + VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT const &, + void * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, swapchainCount, pDeviceMasks, mode ); + return std::tie( sType, pNext, flags, messageSeverity, messageType, pfnUserCallback, pUserData ); } #endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceGroupPresentInfoKHR const & ) const = default; -#else - bool operator==( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +#if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pDeviceMasks == rhs.pDeviceMasks ) && - ( mode == rhs.mode ); -# endif +#else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( messageSeverity == rhs.messageSeverity ) && + ( messageType == rhs.messageType ) && ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData ); +#endif } - bool operator!=( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; - const void * pNext = {}; - uint32_t swapchainCount = {}; - const uint32_t * pDeviceMasks = {}; - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {}; + VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback = {}; + void * pUserData = {}; }; template <> - struct CppType + struct CppType { - using Type = DeviceGroupPresentInfoKHR; + using Type = DebugUtilsMessengerCreateInfoEXT; }; - struct DeviceGroupRenderPassBeginInfo + struct DebugUtilsObjectTagInfoEXT { - using NativeType = VkDeviceGroupRenderPassBeginInfo; + using NativeType = VkDebugUtilsObjectTagInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {}, - uint32_t deviceRenderAreaCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceMask( deviceMask_ ) - , deviceRenderAreaCount( deviceRenderAreaCount_ ) - , pDeviceRenderAreas( pDeviceRenderAreas_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void * pTag_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , objectType{ objectType_ } + , objectHandle{ objectHandle_ } + , tagName{ tagName_ } + , tagSize{ tagSize_ } + , pTag{ pTag_ } { } - VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceGroupRenderPassBeginInfo( *reinterpret_cast( &rhs ) ) + DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DebugUtilsObjectTagInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceRenderAreas_, - const void * pNext_ = nullptr ) + template + DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, + uint64_t objectHandle_, + uint64_t tagName_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , deviceMask( deviceMask_ ) - , deviceRenderAreaCount( static_cast( deviceRenderAreas_.size() ) ) - , pDeviceRenderAreas( deviceRenderAreas_.data() ) + , objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , tagName( tagName_ ) + , tagSize( tag_.size() * sizeof( T ) ) + , pTag( tag_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT { - deviceMask = deviceMask_; + objectType = objectType_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT { - deviceRenderAreaCount = deviceRenderAreaCount_; + objectHandle = objectHandle_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & - setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT { - pDeviceRenderAreas = pDeviceRenderAreas_; + tagName = tagName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tagSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT + { + pTag = pTag_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DeviceGroupRenderPassBeginInfo & - setDeviceRenderAreas( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT + template + DebugUtilsObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT { - deviceRenderAreaCount = static_cast( deviceRenderAreas_.size() ); - pDeviceRenderAreas = deviceRenderAreas_.data(); + tagSize = tag_.size() * sizeof( T ); + pTag = tag_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT + operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std:: - tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, deviceMask, deviceRenderAreaCount, pDeviceRenderAreas ); + return std::tie( sType, pNext, objectType, objectHandle, tagName, tagSize, pTag ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceGroupRenderPassBeginInfo const & ) const = default; + auto operator<=>( DebugUtilsObjectTagInfoEXT const & ) const = default; #else - bool operator==( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ) && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) && - ( pDeviceRenderAreas == rhs.pDeviceRenderAreas ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && + ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag ); # endif } - bool operator!=( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; - const void * pNext = {}; - uint32_t deviceMask = {}; - uint32_t deviceRenderAreaCount = {}; - const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void * pTag = {}; }; template <> - struct CppType + struct CppType { - using Type = DeviceGroupRenderPassBeginInfo; + using Type = DebugUtilsObjectTagInfoEXT; }; - using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; - struct DeviceGroupSubmitInfo + struct DecompressMemoryRegionNV { - using NativeType = VkDeviceGroupSubmitInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo; + using NativeType = VkDecompressMemoryRegionNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {}, - const uint32_t * pWaitSemaphoreDeviceIndices_ = {}, - uint32_t commandBufferCount_ = {}, - const uint32_t * pCommandBufferDeviceMasks_ = {}, - uint32_t signalSemaphoreCount_ = {}, - const uint32_t * pSignalSemaphoreDeviceIndices_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ) - , commandBufferCount( commandBufferCount_ ) - , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ) - , signalSemaphoreCount( signalSemaphoreCount_ ) - , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DecompressMemoryRegionNV( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize compressedSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize_ = {}, + VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAddress{ srcAddress_ } + , dstAddress{ dstAddress_ } + , compressedSize{ compressedSize_ } + , decompressedSize{ decompressedSize_ } + , decompressionMethod{ decompressionMethod_ } { } - VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DecompressMemoryRegionNV( DecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceGroupSubmitInfo( *reinterpret_cast( &rhs ) ) + DecompressMemoryRegionNV( VkDecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DecompressMemoryRegionNV( *reinterpret_cast( &rhs ) ) { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DeviceGroupSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , waitSemaphoreCount( static_cast( waitSemaphoreDeviceIndices_.size() ) ) - , pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() ) - , commandBufferCount( static_cast( commandBufferDeviceMasks_.size() ) ) - , pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() ) - , signalSemaphoreCount( static_cast( signalSemaphoreDeviceIndices_.size() ) ) - , pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() ) + DecompressMemoryRegionNV & operator=( DecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DecompressMemoryRegionNV & operator=( VkDecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setSrcAddress( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + srcAddress = srcAddress_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDstAddress( VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + dstAddress = dstAddress_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setCompressedSize( VULKAN_HPP_NAMESPACE::DeviceSize compressedSize_ ) VULKAN_HPP_NOEXCEPT { - waitSemaphoreCount = waitSemaphoreCount_; + compressedSize = compressedSize_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDecompressedSize( VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize_ ) VULKAN_HPP_NOEXCEPT { - pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; + decompressedSize = decompressedSize_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DeviceGroupSubmitInfo & - setWaitSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & + setDecompressionMethod( VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod_ ) VULKAN_HPP_NOEXCEPT { - waitSemaphoreCount = static_cast( waitSemaphoreDeviceIndices_.size() ); - pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data(); + decompressionMethod = decompressionMethod_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + operator VkDecompressMemoryRegionNV const &() const VULKAN_HPP_NOEXCEPT { - commandBufferCount = commandBufferCount_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + operator VkDecompressMemoryRegionNV &() VULKAN_HPP_NOEXCEPT { - pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; - return *this; + return *reinterpret_cast( this ); } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DeviceGroupSubmitInfo & - setCommandBufferDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - commandBufferCount = static_cast( commandBufferDeviceMasks_.size() ); - pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data(); - return *this; + return std::tie( srcAddress, dstAddress, compressedSize, decompressedSize, decompressionMethod ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif - VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DecompressMemoryRegionNV const & ) const = default; +#else + bool operator==( DecompressMemoryRegionNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - signalSemaphoreCount = signalSemaphoreCount_; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( srcAddress == rhs.srcAddress ) && ( dstAddress == rhs.dstAddress ) && ( compressedSize == rhs.compressedSize ) && + ( decompressedSize == rhs.decompressedSize ) && ( decompressionMethod == rhs.decompressionMethod ); +# endif + } + + bool operator!=( DecompressMemoryRegionNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize compressedSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize = {}; + VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod = {}; + }; + + struct DedicatedAllocationBufferCreateInfoNV + { + using NativeType = VkDedicatedAllocationBufferCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationBufferCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dedicatedAllocation{ dedicatedAllocation_ } + { + } + + VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DedicatedAllocationBufferCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DedicatedAllocationBufferCreateInfoNV & operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; + pNext = pNext_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DeviceGroupSubmitInfo & - setSignalSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & + setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT { - signalSemaphoreCount = static_cast( signalSemaphoreDeviceIndices_.size() ); - pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data(); + dedicatedAllocation = dedicatedAllocation_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDedicatedAllocationBufferCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT + operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - waitSemaphoreCount, - pWaitSemaphoreDeviceIndices, - commandBufferCount, - pCommandBufferDeviceMasks, - signalSemaphoreCount, - pSignalSemaphoreDeviceIndices ); + return std::tie( sType, pNext, dedicatedAllocation ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceGroupSubmitInfo const & ) const = default; + auto operator<=>( DedicatedAllocationBufferCreateInfoNV const & ) const = default; #else - bool operator==( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && - ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) && ( commandBufferCount == rhs.commandBufferCount ) && - ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && - ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); # endif } - bool operator!=( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo; - const void * pNext = {}; - uint32_t waitSemaphoreCount = {}; - const uint32_t * pWaitSemaphoreDeviceIndices = {}; - uint32_t commandBufferCount = {}; - const uint32_t * pCommandBufferDeviceMasks = {}; - uint32_t signalSemaphoreCount = {}; - const uint32_t * pSignalSemaphoreDeviceIndices = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; }; template <> - struct CppType + struct CppType { - using Type = DeviceGroupSubmitInfo; + using Type = DedicatedAllocationBufferCreateInfoNV; }; - using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; - struct DeviceGroupSwapchainCreateInfoKHR + struct DedicatedAllocationImageCreateInfoNV { - using NativeType = VkDeviceGroupSwapchainCreateInfoKHR; + using NativeType = VkDedicatedAllocationImageCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationImageCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , modes( modes_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dedicatedAllocation{ dedicatedAllocation_ } { } - VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceGroupSwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DedicatedAllocationImageCreateInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationImageCreateInfoNV & operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & + setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT { - modes = modes_; + dedicatedAllocation = dedicatedAllocation_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkDedicatedAllocationImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, modes ); + return std::tie( sType, pNext, dedicatedAllocation ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const & ) const = default; + auto operator<=>( DedicatedAllocationImageCreateInfoNV const & ) const = default; #else - bool operator==( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( modes == rhs.modes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); # endif } - bool operator!=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; }; template <> - struct CppType + struct CppType { - using Type = DeviceGroupSwapchainCreateInfoKHR; + using Type = DedicatedAllocationImageCreateInfoNV; }; - struct ImageCreateInfo + struct DedicatedAllocationMemoryAllocateInfoNV { - using NativeType = VkImageCreateInfo; + using NativeType = VkDedicatedAllocationMemoryAllocateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, - uint32_t mipLevels_ = {}, - uint32_t arrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = {}, - const uint32_t * pQueueFamilyIndices_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , imageType( imageType_ ) - , format( format_ ) - , extent( extent_ ) - , mipLevels( mipLevels_ ) - , arrayLayers( arrayLayers_ ) - , samples( samples_ ) - , tiling( tiling_ ) - , usage( usage_ ) - , sharingMode( sharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) - , initialLayout( initialLayout_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image{ image_ } + , buffer{ buffer_ } { } - VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCreateInfo( *reinterpret_cast( &rhs ) ) {} + VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, - VULKAN_HPP_NAMESPACE::ImageType imageType_, - VULKAN_HPP_NAMESPACE::Format format_, - VULKAN_HPP_NAMESPACE::Extent3D extent_, - uint32_t mipLevels_, - uint32_t arrayLayers_, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_, - VULKAN_HPP_NAMESPACE::ImageTiling tiling_, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, - VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , imageType( imageType_ ) - , format( format_ ) - , extent( extent_ ) - , mipLevels( mipLevels_ ) - , arrayLayers( arrayLayers_ ) - , samples( samples_ ) - , tiling( tiling_ ) - , usage( usage_ ) - , sharingMode( sharingMode_ ) - , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) - , pQueueFamilyIndices( queueFamilyIndices_.data() ) - , initialLayout( initialLayout_ ) + DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DedicatedAllocationMemoryAllocateInfoNV( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DedicatedAllocationMemoryAllocateInfoNV & operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + image = image_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT { - imageType = imageType_; + buffer = buffer_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + operator VkDedicatedAllocationMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - format = format_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT { - extent = extent_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - mipLevels = mipLevels_; - return *this; + return std::tie( sType, pNext, image, buffer ); } +#endif - VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT - { - arrayLayers = arrayLayers_; - return *this; +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const & ) const = default; +#else + bool operator==( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); +# endif } - VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - samples = samples_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + + template <> + struct CppType + { + using Type = DedicatedAllocationMemoryAllocateInfoNV; + }; + + struct MemoryBarrier2 + { + using NativeType = VkMemoryBarrier2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcStageMask{ srcStageMask_ } + , srcAccessMask{ srcAccessMask_ } + , dstStageMask{ dstStageMask_ } + , dstAccessMask{ dstAccessMask_ } { - tiling = tiling_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR MemoryBarrier2( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryBarrier2( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryBarrier2( *reinterpret_cast( &rhs ) ) {} + + MemoryBarrier2 & operator=( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryBarrier2 & operator=( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT { - usage = usage_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - sharingMode = sharingMode_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT { - queueFamilyIndexCount = queueFamilyIndexCount_; + srcStageMask = srcStageMask_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT { - pQueueFamilyIndices = pQueueFamilyIndices_; + srcAccessMask = srcAccessMask_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ImageCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT { - queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); - pQueueFamilyIndices = queueFamilyIndices_.data(); + dstStageMask = dstStageMask_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT { - initialLayout = initialLayout_; + dstAccessMask = dstAccessMask_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -23735,768 +26409,768 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, + VULKAN_HPP_NAMESPACE::AccessFlags2 const &, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 const &, + VULKAN_HPP_NAMESPACE::AccessFlags2 const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - flags, - imageType, - format, - extent, - mipLevels, - arrayLayers, - samples, - tiling, - usage, - sharingMode, - queueFamilyIndexCount, - pQueueFamilyIndices, - initialLayout ); + return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageCreateInfo const & ) const = default; + auto operator<=>( MemoryBarrier2 const & ) const = default; #else - bool operator==( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( imageType == rhs.imageType ) && ( format == rhs.format ) && - ( extent == rhs.extent ) && ( mipLevels == rhs.mipLevels ) && ( arrayLayers == rhs.arrayLayers ) && ( samples == rhs.samples ) && - ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) && - ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && - ( initialLayout == rhs.initialLayout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ); # endif } - bool operator!=( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::Extent3D extent = {}; - uint32_t mipLevels = {}; - uint32_t arrayLayers = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; - VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; - uint32_t queueFamilyIndexCount = {}; - const uint32_t * pQueueFamilyIndices = {}; - VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageCreateInfo; + using Type = MemoryBarrier2; }; - struct DeviceImageMemoryRequirements + using MemoryBarrier2KHR = MemoryBarrier2; + + struct ImageSubresourceRange { - using NativeType = VkDeviceImageMemoryRequirements; + using NativeType = VkImageSubresourceRange; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageMemoryRequirements; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresourceRange( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t baseMipLevel_ = {}, + uint32_t levelCount_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask{ aspectMask_ } + , baseMipLevel{ baseMipLevel_ } + , levelCount{ levelCount_ } + , baseArrayLayer{ baseArrayLayer_ } + , layerCount{ layerCount_ } + { + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - DeviceImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {}, - VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pCreateInfo( pCreateInfo_ ) - , planeAspect( planeAspect_ ) + VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSubresourceRange( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DeviceImageMemoryRequirements( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceImageMemoryRequirements( *reinterpret_cast( &rhs ) ) + ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DeviceImageMemoryRequirements & operator=( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } - DeviceImageMemoryRequirements & operator=( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + baseMipLevel = baseMipLevel_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + levelCount = levelCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT { - pCreateInfo = pCreateInfo_; + baseArrayLayer = baseArrayLayer_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT { - planeAspect = planeAspect_; + layerCount = layerCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDeviceImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT + operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pCreateInfo, planeAspect ); + return std::tie( aspectMask, baseMipLevel, levelCount, baseArrayLayer, layerCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceImageMemoryRequirements const & ) const = default; + auto operator<=>( ImageSubresourceRange const & ) const = default; #else - bool operator==( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ) && ( planeAspect == rhs.planeAspect ); + return ( aspectMask == rhs.aspectMask ) && ( baseMipLevel == rhs.baseMipLevel ) && ( levelCount == rhs.levelCount ) && + ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); # endif } - bool operator!=( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageMemoryRequirements; - const void * pNext = {}; - const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {}; - VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; - }; - - template <> - struct CppType - { - using Type = DeviceImageMemoryRequirements; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t baseMipLevel = {}; + uint32_t levelCount = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; }; - using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements; - struct DeviceMemoryOpaqueCaptureAddressInfo + struct ImageMemoryBarrier2 { - using NativeType = VkDeviceMemoryOpaqueCaptureAddressInfo; + using NativeType = VkImageMemoryBarrier2; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memory( memory_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcStageMask{ srcStageMask_ } + , srcAccessMask{ srcAccessMask_ } + , dstStageMask{ dstStageMask_ } + , dstAccessMask{ dstAccessMask_ } + , oldLayout{ oldLayout_ } + , newLayout{ newLayout_ } + , srcQueueFamilyIndex{ srcQueueFamilyIndex_ } + , dstQueueFamilyIndex{ dstQueueFamilyIndex_ } + , image{ image_ } + , subresourceRange{ subresourceRange_ } { } - VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceMemoryOpaqueCaptureAddressInfo( *reinterpret_cast( &rhs ) ) + ImageMemoryBarrier2( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier2( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DeviceMemoryOpaqueCaptureAddressInfo & operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageMemoryBarrier2 & operator=( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DeviceMemoryOpaqueCaptureAddressInfo & operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ImageMemoryBarrier2 & operator=( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask_ ) VULKAN_HPP_NOEXCEPT { - memory = memory_; + srcStageMask = srcStageMask_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + srcAccessMask = srcAccessMask_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, memory ); + dstStageMask = dstStageMask_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const & ) const = default; -#else - bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); -# endif + dstAccessMask = dstAccessMask_; + return *this; } - bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + oldLayout = oldLayout_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - }; - - template <> - struct CppType - { - using Type = DeviceMemoryOpaqueCaptureAddressInfo; - }; - using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo; - - struct DeviceMemoryOverallocationCreateInfoAMD - { - using NativeType = VkDeviceMemoryOverallocationCreateInfoAMD; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( - VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , overallocationBehavior( overallocationBehavior_ ) + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT { + newLayout = newLayout_; + return *this; } - VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceMemoryOverallocationCreateInfoAMD( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceMemoryOverallocationCreateInfoAMD & operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + dstQueueFamilyIndex = dstQueueFamilyIndex_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + image = image_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & - setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT { - overallocationBehavior = overallocationBehavior_; + subresourceRange = subresourceRange_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDeviceMemoryOverallocationCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT + operator VkImageMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + operator VkImageMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, overallocationBehavior ); + return std::tie( sType, + pNext, + srcStageMask, + srcAccessMask, + dstStageMask, + dstAccessMask, + oldLayout, + newLayout, + srcQueueFamilyIndex, + dstQueueFamilyIndex, + image, + subresourceRange ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const & ) const = default; + auto operator<=>( ImageMemoryBarrier2 const & ) const = default; #else - bool operator==( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( overallocationBehavior == rhs.overallocationBehavior ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) && + ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && + ( image == rhs.image ) && ( subresourceRange == rhs.subresourceRange ); # endif } - bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 srcStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; }; template <> - struct CppType + struct CppType { - using Type = DeviceMemoryOverallocationCreateInfoAMD; + using Type = ImageMemoryBarrier2; }; - struct DeviceMemoryReportCallbackDataEXT + using ImageMemoryBarrier2KHR = ImageMemoryBarrier2; + + struct DependencyInfo { - using NativeType = VkDeviceMemoryReportCallbackDataEXT; + using NativeType = VkDependencyInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryReportCallbackDataEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDependencyInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( - VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate, - uint64_t memoryObjectId_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, - uint64_t objectHandle_ = {}, - uint32_t heapIndex_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , type( type_ ) - , memoryObjectId( memoryObjectId_ ) - , size( size_ ) - , objectType( objectType_ ) - , objectHandle( objectHandle_ ) - , heapIndex( heapIndex_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DependencyInfo( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, + uint32_t memoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ = {}, + uint32_t bufferMemoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ = {}, + uint32_t imageMemoryBarrierCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dependencyFlags{ dependencyFlags_ } + , memoryBarrierCount{ memoryBarrierCount_ } + , pMemoryBarriers{ pMemoryBarriers_ } + , bufferMemoryBarrierCount{ bufferMemoryBarrierCount_ } + , pBufferMemoryBarriers{ pBufferMemoryBarriers_ } + , imageMemoryBarrierCount{ imageMemoryBarrierCount_ } + , pImageMemoryBarriers{ pImageMemoryBarriers_ } { } - VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DependencyInfo( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceMemoryReportCallbackDataEXT( *reinterpret_cast( &rhs ) ) + DependencyInfo( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DependencyInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfo( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & memoryBarriers_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferMemoryBarriers_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageMemoryBarriers_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , dependencyFlags( dependencyFlags_ ) + , memoryBarrierCount( static_cast( memoryBarriers_.size() ) ) + , pMemoryBarriers( memoryBarriers_.data() ) + , bufferMemoryBarrierCount( static_cast( bufferMemoryBarriers_.size() ) ) + , pBufferMemoryBarriers( bufferMemoryBarriers_.data() ) + , imageMemoryBarrierCount( static_cast( imageMemoryBarriers_.size() ) ) + , pImageMemoryBarriers( imageMemoryBarriers_.data() ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - DeviceMemoryReportCallbackDataEXT & operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DependencyInfo & operator=( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DependencyInfo & operator=( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dependencyFlags = dependencyFlags_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setMemoryBarrierCount( uint32_t memoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, type, memoryObjectId, size, objectType, objectHandle, heapIndex ); + memoryBarrierCount = memoryBarrierCount_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceMemoryReportCallbackDataEXT const & ) const = default; -#else - bool operator==( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPMemoryBarriers( const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( type == rhs.type ) && ( memoryObjectId == rhs.memoryObjectId ) && - ( size == rhs.size ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && ( heapIndex == rhs.heapIndex ); -# endif + pMemoryBarriers = pMemoryBarriers_; + return *this; } - bool operator!=( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfo & + setMemoryBarriers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & memoryBarriers_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + memoryBarrierCount = static_cast( memoryBarriers_.size() ); + pMemoryBarriers = memoryBarriers_.data(); + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate; - uint64_t memoryObjectId = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; - uint64_t objectHandle = {}; - uint32_t heapIndex = {}; - }; - - template <> - struct CppType - { - using Type = DeviceMemoryReportCallbackDataEXT; - }; - - struct DevicePrivateDataCreateInfo - { - using NativeType = VkDevicePrivateDataCreateInfo; - - static const bool allowDuplicate = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfo; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( uint32_t privateDataSlotRequestCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , privateDataSlotRequestCount( privateDataSlotRequestCount_ ) + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT { + bufferMemoryBarrierCount = bufferMemoryBarrierCount_; + return *this; } - VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DevicePrivateDataCreateInfo( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : DevicePrivateDataCreateInfo( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & + setPBufferMemoryBarriers( const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT { + pBufferMemoryBarriers = pBufferMemoryBarriers_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DevicePrivateDataCreateInfo & operator=( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfo & setBufferMemoryBarriers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT + { + bufferMemoryBarrierCount = static_cast( bufferMemoryBarriers_.size() ); + pBufferMemoryBarriers = bufferMemoryBarriers_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - DevicePrivateDataCreateInfo & operator=( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + imageMemoryBarrierCount = imageMemoryBarrierCount_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DependencyInfo & + setPImageMemoryBarriers( const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + pImageMemoryBarriers = pImageMemoryBarriers_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DependencyInfo & setImageMemoryBarriers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT { - privateDataSlotRequestCount = privateDataSlotRequestCount_; + imageMemoryBarrierCount = static_cast( imageMemoryBarriers_.size() ); + pImageMemoryBarriers = imageMemoryBarriers_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDevicePrivateDataCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDependencyInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDevicePrivateDataCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDependencyInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, privateDataSlotRequestCount ); + return std::tie( sType, + pNext, + dependencyFlags, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DevicePrivateDataCreateInfo const & ) const = default; + auto operator<=>( DependencyInfo const & ) const = default; #else - bool operator==( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dependencyFlags == rhs.dependencyFlags ) && + ( memoryBarrierCount == rhs.memoryBarrierCount ) && ( pMemoryBarriers == rhs.pMemoryBarriers ) && + ( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount ) && ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers ) && + ( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount ) && ( pImageMemoryBarriers == rhs.pImageMemoryBarriers ); # endif } - bool operator!=( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePrivateDataCreateInfo; - const void * pNext = {}; - uint32_t privateDataSlotRequestCount = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDependencyInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + uint32_t memoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::MemoryBarrier2 * pMemoryBarriers = {}; + uint32_t bufferMemoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 * pBufferMemoryBarriers = {}; + uint32_t imageMemoryBarrierCount = {}; + const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers = {}; }; template <> - struct CppType + struct CppType { - using Type = DevicePrivateDataCreateInfo; + using Type = DependencyInfo; }; - using DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo; - struct DeviceQueueGlobalPriorityCreateInfoKHR + using DependencyInfoKHR = DependencyInfo; + + struct DepthBiasInfoEXT { - using NativeType = VkDeviceQueueGlobalPriorityCreateInfoKHR; + using NativeType = VkDepthBiasInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDepthBiasInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - DeviceQueueGlobalPriorityCreateInfoKHR( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , globalPriority( globalPriority_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DepthBiasInfoEXT( float depthBiasConstantFactor_ = {}, + float depthBiasClamp_ = {}, + float depthBiasSlopeFactor_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthBiasConstantFactor{ depthBiasConstantFactor_ } + , depthBiasClamp{ depthBiasClamp_ } + , depthBiasSlopeFactor{ depthBiasSlopeFactor_ } { } - VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoKHR( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DepthBiasInfoEXT( DepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DepthBiasInfoEXT( VkDepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DepthBiasInfoEXT( *reinterpret_cast( &rhs ) ) {} + + DepthBiasInfoEXT & operator=( DepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DeviceQueueGlobalPriorityCreateInfoKHR( VkDeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceQueueGlobalPriorityCreateInfoKHR( *reinterpret_cast( &rhs ) ) + DepthBiasInfoEXT & operator=( VkDepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DeviceQueueGlobalPriorityCreateInfoKHR & operator=( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } - DeviceQueueGlobalPriorityCreateInfoKHR & operator=( VkDeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + depthBiasConstantFactor = depthBiasConstantFactor_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + depthBiasClamp = depthBiasClamp_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoKHR & - setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT { - globalPriority = globalPriority_; + depthBiasSlopeFactor = depthBiasSlopeFactor_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDeviceQueueGlobalPriorityCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkDepthBiasInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceQueueGlobalPriorityCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkDepthBiasInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, globalPriority ); + return std::tie( sType, pNext, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceQueueGlobalPriorityCreateInfoKHR const & ) const = default; + auto operator<=>( DepthBiasInfoEXT const & ) const = default; #else - bool operator==( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DepthBiasInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriority == rhs.globalPriority ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) && + ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ); # endif } - bool operator!=( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DepthBiasInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDepthBiasInfoEXT; + const void * pNext = {}; + float depthBiasConstantFactor = {}; + float depthBiasClamp = {}; + float depthBiasSlopeFactor = {}; }; template <> - struct CppType + struct CppType { - using Type = DeviceQueueGlobalPriorityCreateInfoKHR; + using Type = DepthBiasInfoEXT; }; - using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfoKHR; - struct DeviceQueueInfo2 + struct DepthBiasRepresentationInfoEXT { - using NativeType = VkDeviceQueueInfo2; + using NativeType = VkDepthBiasRepresentationInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDepthBiasRepresentationInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, - uint32_t queueFamilyIndex_ = {}, - uint32_t queueIndex_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , queueFamilyIndex( queueFamilyIndex_ ) - , queueIndex( queueIndex_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DepthBiasRepresentationInfoEXT( VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT depthBiasRepresentation_ = + VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT::eLeastRepresentableValueFormat, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthBiasRepresentation{ depthBiasRepresentation_ } + , depthBiasExact{ depthBiasExact_ } { } - VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceQueueInfo2( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DepthBiasRepresentationInfoEXT( DepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + DepthBiasRepresentationInfoEXT( VkDepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DepthBiasRepresentationInfoEXT( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + DepthBiasRepresentationInfoEXT & operator=( DepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DepthBiasRepresentationInfoEXT & operator=( VkDepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT & + setDepthBiasRepresentation( VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT depthBiasRepresentation_ ) VULKAN_HPP_NOEXCEPT { - queueFamilyIndex = queueFamilyIndex_; + depthBiasRepresentation = depthBiasRepresentation_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT & setDepthBiasExact( VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ ) VULKAN_HPP_NOEXCEPT { - queueIndex = queueIndex_; + depthBiasExact = depthBiasExact_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT + operator VkDepthBiasRepresentationInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT + operator VkDepthBiasRepresentationInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -24505,1763 +27179,1801 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, queueFamilyIndex, queueIndex ); + return std::tie( sType, pNext, depthBiasRepresentation, depthBiasExact ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceQueueInfo2 const & ) const = default; + auto operator<=>( DepthBiasRepresentationInfoEXT const & ) const = default; #else - bool operator==( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DepthBiasRepresentationInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && - ( queueIndex == rhs.queueIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthBiasRepresentation == rhs.depthBiasRepresentation ) && + ( depthBiasExact == rhs.depthBiasExact ); # endif } - bool operator!=( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DepthBiasRepresentationInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; - uint32_t queueFamilyIndex = {}; - uint32_t queueIndex = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDepthBiasRepresentationInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT depthBiasRepresentation = VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT::eLeastRepresentableValueFormat; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact = {}; }; template <> - struct CppType + struct CppType { - using Type = DeviceQueueInfo2; + using Type = DepthBiasRepresentationInfoEXT; }; -#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) - struct DirectFBSurfaceCreateInfoEXT + struct DepthClampRangeEXT { - using NativeType = VkDirectFBSurfaceCreateInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {}, - IDirectFB * dfb_ = {}, - IDirectFBSurface * surface_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , dfb( dfb_ ) - , surface( surface_ ) - { - } - - VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + using NativeType = VkDepthClampRangeEXT; - DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : DirectFBSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DepthClampRangeEXT( float minDepthClamp_ = {}, float maxDepthClamp_ = {} ) VULKAN_HPP_NOEXCEPT + : minDepthClamp{ minDepthClamp_ } + , maxDepthClamp{ maxDepthClamp_ } { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DirectFBSurfaceCreateInfoEXT & operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DepthClampRangeEXT( DepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DirectFBSurfaceCreateInfoEXT & operator=( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + DepthClampRangeEXT( VkDepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DepthClampRangeEXT( *reinterpret_cast( &rhs ) ) {} -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + DepthClampRangeEXT & operator=( DepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + DepthClampRangeEXT & operator=( VkDepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setDfb( IDirectFB * dfb_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DepthClampRangeEXT & setMinDepthClamp( float minDepthClamp_ ) VULKAN_HPP_NOEXCEPT { - dfb = dfb_; + minDepthClamp = minDepthClamp_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setSurface( IDirectFBSurface * surface_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DepthClampRangeEXT & setMaxDepthClamp( float maxDepthClamp_ ) VULKAN_HPP_NOEXCEPT { - surface = surface_; + maxDepthClamp = maxDepthClamp_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDirectFBSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkDepthClampRangeEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkDepthClampRangeEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, dfb, surface ); + return std::tie( minDepthClamp, maxDepthClamp ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DirectFBSurfaceCreateInfoEXT const & ) const = default; -# else - bool operator==( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DepthClampRangeEXT const & ) const = default; +#else + bool operator==( DepthClampRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dfb == rhs.dfb ) && ( surface == rhs.surface ); -# endif +# else + return ( minDepthClamp == rhs.minDepthClamp ) && ( maxDepthClamp == rhs.maxDepthClamp ); +# endif } - bool operator!=( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DepthClampRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectfbSurfaceCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags = {}; - IDirectFB * dfb = {}; - IDirectFBSurface * surface = {}; + float minDepthClamp = {}; + float maxDepthClamp = {}; }; - template <> - struct CppType + struct DescriptorAddressInfoEXT { - using Type = DirectFBSurfaceCreateInfoEXT; - }; -#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + using NativeType = VkDescriptorAddressInfoEXT; - struct DispatchIndirectCommand - { - using NativeType = VkDispatchIndirectCommand; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorAddressInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) - , z( z_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorAddressInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress address_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , address{ address_ } + , range{ range_ } + , format{ format_ } { } - VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DescriptorAddressInfoEXT( DescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT - : DispatchIndirectCommand( *reinterpret_cast( &rhs ) ) + DescriptorAddressInfoEXT( VkDescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorAddressInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DispatchIndirectCommand & operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DescriptorAddressInfoEXT & operator=( DescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorAddressInfoEXT & operator=( VkDescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - x = x_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setAddress( VULKAN_HPP_NAMESPACE::DeviceAddress address_ ) VULKAN_HPP_NOEXCEPT { - y = y_; + address = address_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT { - z = z_; + range = range_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + format = format_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT + operator VkDescriptorAddressInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkDescriptorAddressInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( x, y, z ); + return std::tie( sType, pNext, address, range, format ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DispatchIndirectCommand const & ) const = default; + auto operator<=>( DescriptorAddressInfoEXT const & ) const = default; #else - bool operator==( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( address == rhs.address ) && ( range == rhs.range ) && ( format == rhs.format ); # endif } - bool operator!=( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t x = {}; - uint32_t y = {}; - uint32_t z = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorAddressInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress address = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; }; - struct DisplayEventInfoEXT + template <> + struct CppType { - using NativeType = VkDisplayEventInfoEXT; + using Type = DescriptorAddressInfoEXT; + }; + + struct DescriptorBufferBindingInfoEXT + { + using NativeType = VkDescriptorBufferBindingInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorBufferBindingInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , displayEvent( displayEvent_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorBufferBindingInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress address_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , address{ address_ } + , usage{ usage_ } { } - VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DescriptorBufferBindingInfoEXT( DescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayEventInfoEXT( *reinterpret_cast( &rhs ) ) + DescriptorBufferBindingInfoEXT( VkDescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorBufferBindingInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DescriptorBufferBindingInfoEXT & operator=( DescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorBufferBindingInfoEXT & operator=( VkDescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setAddress( VULKAN_HPP_NAMESPACE::DeviceAddress address_ ) VULKAN_HPP_NOEXCEPT { - displayEvent = displayEvent_; + address = address_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + usage = usage_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkDescriptorBufferBindingInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkDescriptorBufferBindingInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, displayEvent ); + return std::tie( sType, pNext, address, usage ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayEventInfoEXT const & ) const = default; + auto operator<=>( DescriptorBufferBindingInfoEXT const & ) const = default; #else - bool operator==( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorBufferBindingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayEvent == rhs.displayEvent ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( address == rhs.address ) && ( usage == rhs.usage ); # endif } - bool operator!=( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorBufferBindingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorBufferBindingInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress address = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; }; template <> - struct CppType + struct CppType { - using Type = DisplayEventInfoEXT; + using Type = DescriptorBufferBindingInfoEXT; }; - struct DisplayModeParametersKHR + struct DescriptorBufferBindingPushDescriptorBufferHandleEXT { - using NativeType = VkDisplayModeParametersKHR; + using NativeType = VkDescriptorBufferBindingPushDescriptorBufferHandleEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT - : visibleRegion( visibleRegion_ ) - , refreshRate( refreshRate_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorBufferBindingPushDescriptorBufferHandleEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , buffer{ buffer_ } { } - VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + DescriptorBufferBindingPushDescriptorBufferHandleEXT( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DisplayModeParametersKHR( *reinterpret_cast( &rhs ) ) + DescriptorBufferBindingPushDescriptorBufferHandleEXT( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorBufferBindingPushDescriptorBufferHandleEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DisplayModeParametersKHR & operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DescriptorBufferBindingPushDescriptorBufferHandleEXT & + operator=( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorBufferBindingPushDescriptorBufferHandleEXT & operator=( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - visibleRegion = visibleRegion_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT { - refreshRate = refreshRate_; + buffer = buffer_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT + operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( visibleRegion, refreshRate ); + return std::tie( sType, pNext, buffer ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayModeParametersKHR const & ) const = default; + auto operator<=>( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & ) const = default; #else - bool operator==( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( visibleRegion == rhs.visibleRegion ) && ( refreshRate == rhs.refreshRate ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); # endif } - bool operator!=( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {}; - uint32_t refreshRate = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; }; - struct DisplayModeCreateInfoKHR + template <> + struct CppType { - using NativeType = VkDisplayModeCreateInfoKHR; + using Type = DescriptorBufferBindingPushDescriptorBufferHandleEXT; + }; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR; + struct DescriptorBufferInfo + { + using NativeType = VkDescriptorBufferInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , parameters( parameters_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer{ buffer_ } + , offset{ offset_ } + , range{ range_ } { } - VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DisplayModeCreateInfoKHR( *reinterpret_cast( &rhs ) ) + DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorBufferInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + buffer = buffer_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + offset = offset_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT { - parameters = parameters_; + range = range_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, parameters ); + return std::tie( buffer, offset, range ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayModeCreateInfoKHR const & ) const = default; + auto operator<=>( DescriptorBufferInfo const & ) const = default; #else - bool operator==( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( parameters == rhs.parameters ); + return ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( range == rhs.range ); # endif } - bool operator!=( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; - }; - - template <> - struct CppType - { - using Type = DisplayModeCreateInfoKHR; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; }; - struct DisplayModePropertiesKHR + struct DescriptorImageInfo { - using NativeType = VkDisplayModePropertiesKHR; + using NativeType = VkDescriptorImageInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, - VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT - : displayMode( displayMode_ ) - , parameters( parameters_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : sampler{ sampler_ } + , imageView{ imageView_ } + , imageLayout{ imageLayout_ } { } - VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DisplayModePropertiesKHR( *reinterpret_cast( &rhs ) ) + DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorImageInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DisplayModePropertiesKHR & operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + sampler = sampler_; + return *this; } - operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + imageView = imageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT + { + imageLayout = imageLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( displayMode, parameters ); + return std::tie( sampler, imageView, imageLayout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayModePropertiesKHR const & ) const = default; + auto operator<=>( DescriptorImageInfo const & ) const = default; #else - bool operator==( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( displayMode == rhs.displayMode ) && ( parameters == rhs.parameters ); + return ( sampler == rhs.sampler ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ); # endif } - bool operator!=( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; - VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; - struct DisplayModeProperties2KHR + union DescriptorDataEXT { - using NativeType = VkDisplayModeProperties2KHR; + using NativeType = VkDescriptorDataEXT; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR; + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const VULKAN_HPP_NAMESPACE::Sampler * pSampler_ = {} ) : pSampler( pSampler_ ) {} -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , displayModeProperties( displayModeProperties_ ) + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pDescriptorImageInfo_ ) + : pCombinedImageSampler( pDescriptorImageInfo_ ) { } - VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pDescriptorAddressInfoEXT_ ) + : pUniformTexelBuffer( pDescriptorAddressInfoEXT_ ) + { + } - DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DisplayModeProperties2KHR( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ ) : accelerationStructure( accelerationStructure_ ) {} +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPSampler( const VULKAN_HPP_NAMESPACE::Sampler * pSampler_ ) VULKAN_HPP_NOEXCEPT { + pSampler = pSampler_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & + setPCombinedImageSampler( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pCombinedImageSampler_ ) VULKAN_HPP_NOEXCEPT + { + pCombinedImageSampler = pCombinedImageSampler_; + return *this; + } - DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & + setPInputAttachmentImage( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pInputAttachmentImage_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + pInputAttachmentImage = pInputAttachmentImage_; return *this; } - operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPSampledImage( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pSampledImage_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pSampledImage = pSampledImage_; + return *this; } - operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageImage( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pStorageImage_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pStorageImage = pStorageImage_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & + setPUniformTexelBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformTexelBuffer_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, displayModeProperties ); + pUniformTexelBuffer = pUniformTexelBuffer_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayModeProperties2KHR const & ) const = default; -#else - bool operator==( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & + setPStorageTexelBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageTexelBuffer_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayModeProperties == rhs.displayModeProperties ); -# endif + pStorageTexelBuffer = pStorageTexelBuffer_; + return *this; } - bool operator!=( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPUniformBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformBuffer_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + pUniformBuffer = pUniformBuffer_; + return *this; } -#endif - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {}; - }; + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageBuffer( const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageBuffer_ ) VULKAN_HPP_NOEXCEPT + { + pStorageBuffer = pStorageBuffer_; + return *this; + } - template <> - struct CppType - { - using Type = DisplayModeProperties2KHR; + VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setAccelerationStructure( VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorDataEXT const &() const + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorDataEXT &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + const VULKAN_HPP_NAMESPACE::Sampler * pSampler; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pCombinedImageSampler; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pInputAttachmentImage; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pSampledImage; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pStorageImage; + const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformTexelBuffer; + const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageTexelBuffer; + const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pUniformBuffer; + const VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT * pStorageBuffer; + VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure; +#else + const VkSampler * pSampler; + const VkDescriptorImageInfo * pCombinedImageSampler; + const VkDescriptorImageInfo * pInputAttachmentImage; + const VkDescriptorImageInfo * pSampledImage; + const VkDescriptorImageInfo * pStorageImage; + const VkDescriptorAddressInfoEXT * pUniformTexelBuffer; + const VkDescriptorAddressInfoEXT * pStorageTexelBuffer; + const VkDescriptorAddressInfoEXT * pUniformBuffer; + const VkDescriptorAddressInfoEXT * pStorageBuffer; + VkDeviceAddress accelerationStructure; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; - struct DisplayNativeHdrSurfaceCapabilitiesAMD + struct DescriptorGetInfoEXT { - using NativeType = VkDisplayNativeHdrSurfaceCapabilitiesAMD; + using NativeType = VkDescriptorGetInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorGetInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , localDimmingSupport( localDimmingSupport_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + VULKAN_HPP_NAMESPACE::DescriptorDataEXT data_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , data{ data_ } { } - VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT( DescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - : DisplayNativeHdrSurfaceCapabilitiesAMD( *reinterpret_cast( &rhs ) ) + DescriptorGetInfoEXT( VkDescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorGetInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DescriptorGetInfoEXT & operator=( DescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorGetInfoEXT & operator=( VkDescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + type = type_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setData( VULKAN_HPP_NAMESPACE::DescriptorDataEXT const & data_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, localDimmingSupport ); + data = data_; + return *this; } -#endif +#endif /*VULKAN_HPP_NO_SETTERS*/ -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const & ) const = default; -#else - bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + operator VkDescriptorGetInfoEXT const &() const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingSupport == rhs.localDimmingSupport ); -# endif + return *reinterpret_cast( this ); } - bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + operator VkDescriptorGetInfoEXT &() VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, data ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorGetInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + VULKAN_HPP_NAMESPACE::DescriptorDataEXT data = {}; }; template <> - struct CppType + struct CppType { - using Type = DisplayNativeHdrSurfaceCapabilitiesAMD; + using Type = DescriptorGetInfoEXT; }; - struct DisplayPlaneCapabilitiesKHR + struct DescriptorPoolSize { - using NativeType = VkDisplayPlaneCapabilitiesKHR; + using NativeType = VkDescriptorPoolSize; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT - : supportedAlpha( supportedAlpha_ ) - , minSrcPosition( minSrcPosition_ ) - , maxSrcPosition( maxSrcPosition_ ) - , minSrcExtent( minSrcExtent_ ) - , maxSrcExtent( maxSrcExtent_ ) - , minDstPosition( minDstPosition_ ) - , maxDstPosition( maxDstPosition_ ) - , minDstExtent( minDstExtent_ ) - , maxDstExtent( maxDstExtent_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + : type{ type_ } + , descriptorCount{ descriptorCount_ } { } - VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DisplayPlaneCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorPoolSize( *reinterpret_cast( &rhs ) ) {} + + DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DisplayPlaneCapabilitiesKHR & operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } - DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + descriptorCount = descriptorCount_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( supportedAlpha, minSrcPosition, maxSrcPosition, minSrcExtent, maxSrcExtent, minDstPosition, maxDstPosition, minDstExtent, maxDstExtent ); + return std::tie( type, descriptorCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayPlaneCapabilitiesKHR const & ) const = default; + auto operator<=>( DescriptorPoolSize const & ) const = default; #else - bool operator==( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( supportedAlpha == rhs.supportedAlpha ) && ( minSrcPosition == rhs.minSrcPosition ) && ( maxSrcPosition == rhs.maxSrcPosition ) && - ( minSrcExtent == rhs.minSrcExtent ) && ( maxSrcExtent == rhs.maxSrcExtent ) && ( minDstPosition == rhs.minDstPosition ) && - ( maxDstPosition == rhs.maxDstPosition ) && ( minDstExtent == rhs.minDstExtent ) && ( maxDstExtent == rhs.maxDstExtent ); + return ( type == rhs.type ) && ( descriptorCount == rhs.descriptorCount ); # endif } - bool operator!=( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {}; - VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {}; - VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {}; - VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {}; - VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {}; - VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {}; - VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {}; + VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + uint32_t descriptorCount = {}; }; - struct DisplayPlaneCapabilities2KHR + struct DescriptorPoolCreateInfo { - using NativeType = VkDisplayPlaneCapabilities2KHR; + using NativeType = VkDescriptorPoolCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , capabilities( capabilities_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, + uint32_t maxSets_ = {}, + uint32_t poolSizeCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , maxSets{ maxSets_ } + , poolSizeCount{ poolSizeCount_ } + , pPoolSizes{ pPoolSizes_ } { } - VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DisplayPlaneCapabilities2KHR( *reinterpret_cast( &rhs ) ) + DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorPoolCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_, + uint32_t maxSets_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & poolSizes_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( static_cast( poolSizes_.size() ) ), pPoolSizes( poolSizes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT + { + maxSets = maxSets_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT + { + poolSizeCount = poolSizeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ ) VULKAN_HPP_NOEXCEPT + { + pPoolSizes = pPoolSizes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorPoolCreateInfo & + setPoolSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & poolSizes_ ) VULKAN_HPP_NOEXCEPT + { + poolSizeCount = static_cast( poolSizes_.size() ); + pPoolSizes = poolSizes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, capabilities ); + return std::tie( sType, pNext, flags, maxSets, poolSizeCount, pPoolSizes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayPlaneCapabilities2KHR const & ) const = default; + auto operator<=>( DescriptorPoolCreateInfo const & ) const = default; #else - bool operator==( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilities == rhs.capabilities ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( maxSets == rhs.maxSets ) && + ( poolSizeCount == rhs.poolSizeCount ) && ( pPoolSizes == rhs.pPoolSizes ); # endif } - bool operator!=( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {}; + uint32_t maxSets = {}; + uint32_t poolSizeCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes = {}; }; template <> - struct CppType + struct CppType { - using Type = DisplayPlaneCapabilities2KHR; + using Type = DescriptorPoolCreateInfo; }; - struct DisplayPlaneInfo2KHR + struct DescriptorPoolInlineUniformBlockCreateInfo { - using NativeType = VkDisplayPlaneInfo2KHR; + using NativeType = VkDescriptorPoolInlineUniformBlockCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, uint32_t planeIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , mode( mode_ ) - , planeIndex( planeIndex_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( uint32_t maxInlineUniformBlockBindings_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxInlineUniformBlockBindings{ maxInlineUniformBlockBindings_ } { } - VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DisplayPlaneInfo2KHR( *reinterpret_cast( &rhs ) ) + DescriptorPoolInlineUniformBlockCreateInfo( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorPoolInlineUniformBlockCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DescriptorPoolInlineUniformBlockCreateInfo & operator=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorPoolInlineUniformBlockCreateInfo & operator=( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT - { - mode = mode_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & + setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT { - planeIndex = planeIndex_; + maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorPoolInlineUniformBlockCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT + operator VkDescriptorPoolInlineUniformBlockCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, mode, planeIndex ); + return std::tie( sType, pNext, maxInlineUniformBlockBindings ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayPlaneInfo2KHR const & ) const = default; + auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfo const & ) const = default; #else - bool operator==( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( planeIndex == rhs.planeIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings ); # endif } - bool operator!=( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {}; - uint32_t planeIndex = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo; + const void * pNext = {}; + uint32_t maxInlineUniformBlockBindings = {}; }; template <> - struct CppType + struct CppType { - using Type = DisplayPlaneInfo2KHR; + using Type = DescriptorPoolInlineUniformBlockCreateInfo; }; - struct DisplayPlanePropertiesKHR + using DescriptorPoolInlineUniformBlockCreateInfoEXT = DescriptorPoolInlineUniformBlockCreateInfo; + + struct DescriptorSetAllocateInfo { - using NativeType = VkDisplayPlanePropertiesKHR; + using NativeType = VkDescriptorSetAllocateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, - uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : currentDisplay( currentDisplay_ ) - , currentStackIndex( currentStackIndex_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, + uint32_t descriptorSetCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorPool{ descriptorPool_ } + , descriptorSetCount{ descriptorSetCount_ } + , pSetLayouts{ pSetLayouts_ } { } - VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DisplayPlanePropertiesKHR( *reinterpret_cast( &rhs ) ) + DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetAllocateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DisplayPlanePropertiesKHR & operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), descriptorPool( descriptorPool_ ), descriptorSetCount( static_cast( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + descriptorPool = descriptorPool_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( currentDisplay, currentStackIndex ); + descriptorSetCount = descriptorSetCount_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayPlanePropertiesKHR const & ) const = default; -#else - bool operator==( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( currentDisplay == rhs.currentDisplay ) && ( currentStackIndex == rhs.currentStackIndex ); -# endif - } - - bool operator!=( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {}; - uint32_t currentStackIndex = {}; - }; - - struct DisplayPlaneProperties2KHR - { - using NativeType = VkDisplayPlaneProperties2KHR; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , displayPlaneProperties( displayPlaneProperties_ ) - { - } - - VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DisplayPlaneProperties2KHR( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT { + pSetLayouts = pSetLayouts_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetAllocateInfo & + setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + descriptorSetCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, displayPlaneProperties ); + return std::tie( sType, pNext, descriptorPool, descriptorSetCount, pSetLayouts ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayPlaneProperties2KHR const & ) const = default; + auto operator<=>( DescriptorSetAllocateInfo const & ) const = default; #else - bool operator==( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPlaneProperties == rhs.displayPlaneProperties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPool == rhs.descriptorPool ) && ( descriptorSetCount == rhs.descriptorSetCount ) && + ( pSetLayouts == rhs.pSetLayouts ); # endif } - bool operator!=( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {}; + uint32_t descriptorSetCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; }; template <> - struct CppType + struct CppType { - using Type = DisplayPlaneProperties2KHR; + using Type = DescriptorSetAllocateInfo; }; - struct DisplayPowerInfoEXT + struct DescriptorSetBindingReferenceVALVE { - using NativeType = VkDisplayPowerInfoEXT; + using NativeType = VkDescriptorSetBindingReferenceVALVE; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetBindingReferenceVALVE; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , powerState( powerState_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, + uint32_t binding_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorSetLayout{ descriptorSetLayout_ } + , binding{ binding_ } { } - VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayPowerInfoEXT( *reinterpret_cast( &rhs ) ) + DescriptorSetBindingReferenceVALVE( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetBindingReferenceVALVE( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DescriptorSetBindingReferenceVALVE & operator=( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetBindingReferenceVALVE & operator=( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & + setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT { - powerState = powerState_; + descriptorSetLayout = descriptorSetLayout_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + binding = binding_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetBindingReferenceVALVE const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetBindingReferenceVALVE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, powerState ); + return std::tie( sType, pNext, descriptorSetLayout, binding ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayPowerInfoEXT const & ) const = default; + auto operator<=>( DescriptorSetBindingReferenceVALVE const & ) const = default; #else - bool operator==( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( powerState == rhs.powerState ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( binding == rhs.binding ); # endif } - bool operator!=( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetBindingReferenceVALVE; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {}; + uint32_t binding = {}; }; template <> - struct CppType + struct CppType { - using Type = DisplayPowerInfoEXT; + using Type = DescriptorSetBindingReferenceVALVE; }; - struct DisplayPresentInfoKHR + struct DescriptorSetLayoutBinding { - using NativeType = VkDisplayPresentInfoKHR; + using NativeType = VkDescriptorSetLayoutBinding; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT + : binding{ binding_ } + , descriptorType{ descriptorType_ } + , descriptorCount{ descriptorCount_ } + , stageFlags{ stageFlags_ } + , pImmutableSamplers{ pImmutableSamplers_ } + { + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, - VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcRect( srcRect_ ) - , dstRect( dstRect_ ) - , persistent( persistent_ ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutBinding( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBinding( uint32_t binding_, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & immutableSamplers_ ) + : binding( binding_ ) + , descriptorType( descriptorType_ ) + , descriptorCount( static_cast( immutableSamplers_.size() ) ) + , stageFlags( stageFlags_ ) + , pImmutableSamplers( immutableSamplers_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DisplayPresentInfoKHR( *reinterpret_cast( &rhs ) ) + DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } - DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + descriptorType = descriptorType_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + descriptorCount = descriptorCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT { - srcRect = srcRect_; + stageFlags = stageFlags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT { - dstRect = dstRect_; + pImmutableSamplers = pImmutableSamplers_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBinding & + setImmutableSamplers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & immutableSamplers_ ) VULKAN_HPP_NOEXCEPT { - persistent = persistent_; + descriptorCount = static_cast( immutableSamplers_.size() ); + pImmutableSamplers = immutableSamplers_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, srcRect, dstRect, persistent ); + return std::tie( binding, descriptorType, descriptorCount, stageFlags, pImmutableSamplers ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayPresentInfoKHR const & ) const = default; + auto operator<=>( DescriptorSetLayoutBinding const & ) const = default; #else - bool operator==( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcRect == rhs.srcRect ) && ( dstRect == rhs.dstRect ) && ( persistent == rhs.persistent ); + return ( binding == rhs.binding ) && ( descriptorType == rhs.descriptorType ) && ( descriptorCount == rhs.descriptorCount ) && + ( stageFlags == rhs.stageFlags ) && ( pImmutableSamplers == rhs.pImmutableSamplers ); # endif } - bool operator!=( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Rect2D srcRect = {}; - VULKAN_HPP_NAMESPACE::Rect2D dstRect = {}; - VULKAN_HPP_NAMESPACE::Bool32 persistent = {}; - }; - - template <> - struct CppType - { - using Type = DisplayPresentInfoKHR; + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers = {}; }; - struct DisplayPropertiesKHR + struct DescriptorSetLayoutBindingFlagsCreateInfo { - using NativeType = VkDisplayPropertiesKHR; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, - const char * displayName_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT - : display( display_ ) - , displayName( displayName_ ) - , physicalDimensions( physicalDimensions_ ) - , physicalResolution( physicalResolution_ ) - , supportedTransforms( supportedTransforms_ ) - , planeReorderPossible( planeReorderPossible_ ) - , persistentContent( persistentContent_ ) - { - } - - VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DisplayPropertiesKHR( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + using NativeType = VkDescriptorSetLayoutBindingFlagsCreateInfo; - DisplayPropertiesKHR & operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; - DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( uint32_t bindingCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , bindingCount{ bindingCount_ } + , pBindingFlags{ pBindingFlags_ } { - *this = *reinterpret_cast( &rhs ); - return *this; } - operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutBindingFlagsCreateInfo( *reinterpret_cast( &rhs ) ) { - return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBindingFlagsCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindingFlags_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), bindingCount( static_cast( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() ) { - return std::tie( display, displayName, physicalDimensions, physicalResolution, supportedTransforms, planeReorderPossible, persistentContent ); } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - if ( auto cmp = display <=> rhs.display; cmp != 0 ) - return cmp; - if ( displayName != rhs.displayName ) - if ( auto cmp = strcmp( displayName, rhs.displayName ); cmp != 0 ) - return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - if ( auto cmp = physicalDimensions <=> rhs.physicalDimensions; cmp != 0 ) - return cmp; - if ( auto cmp = physicalResolution <=> rhs.physicalResolution; cmp != 0 ) - return cmp; - if ( auto cmp = supportedTransforms <=> rhs.supportedTransforms; cmp != 0 ) - return cmp; - if ( auto cmp = planeReorderPossible <=> rhs.planeReorderPossible; cmp != 0 ) - return cmp; - if ( auto cmp = persistentContent <=> rhs.persistentContent; cmp != 0 ) - return cmp; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - return std::strong_ordering::equivalent; - } -#endif + DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - bool operator==( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - return ( display == rhs.display ) && ( ( displayName == rhs.displayName ) || ( strcmp( displayName, rhs.displayName ) == 0 ) ) && - ( physicalDimensions == rhs.physicalDimensions ) && ( physicalResolution == rhs.physicalResolution ) && - ( supportedTransforms == rhs.supportedTransforms ) && ( planeReorderPossible == rhs.planeReorderPossible ) && - ( persistentContent == rhs.persistentContent ); + *this = *reinterpret_cast( &rhs ); + return *this; } - bool operator!=( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + pNext = pNext_; + return *this; } - public: - VULKAN_HPP_NAMESPACE::DisplayKHR display = {}; - const char * displayName = {}; - VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {}; - VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; - VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {}; - VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {}; - }; - - struct DisplayProperties2KHR - { - using NativeType = VkDisplayProperties2KHR; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , displayProperties( displayProperties_ ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT { + bindingCount = bindingCount_; + return *this; } - VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DisplayProperties2KHR( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & + setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ ) VULKAN_HPP_NOEXCEPT { + pBindingFlags = pBindingFlags_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindingFlags_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + bindingCount = static_cast( bindingFlags_.size() ); + pBindingFlags = bindingFlags_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std:: + tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, displayProperties ); + return std::tie( sType, pNext, bindingCount, pBindingFlags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayProperties2KHR const & ) const = default; + auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const & ) const = default; #else - bool operator==( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayProperties == rhs.displayProperties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bindingCount == rhs.bindingCount ) && ( pBindingFlags == rhs.pBindingFlags ); # endif } - bool operator!=( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; + const void * pNext = {}; + uint32_t bindingCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags = {}; }; template <> - struct CppType + struct CppType { - using Type = DisplayProperties2KHR; + using Type = DescriptorSetLayoutBindingFlagsCreateInfo; }; - struct DisplaySurfaceCreateInfoKHR + using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo; + + struct DescriptorSetLayoutCreateInfo { - using NativeType = VkDisplaySurfaceCreateInfoKHR; + using NativeType = VkDescriptorSetLayoutCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - DisplaySurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, - uint32_t planeIndex_ = {}, - uint32_t planeStackIndex_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - float globalAlpha_ = {}, - VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, - VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , displayMode( displayMode_ ) - , planeIndex( planeIndex_ ) - , planeStackIndex( planeStackIndex_ ) - , transform( transform_ ) - , globalAlpha( globalAlpha_ ) - , alphaMode( alphaMode_ ) - , imageExtent( imageExtent_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, + uint32_t bindingCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , bindingCount{ bindingCount_ } + , pBindings{ pBindings_ } { } - VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DisplaySurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindings_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), bindingCount( static_cast( bindings_.size() ) ), pBindings( bindings_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT { - displayMode = displayMode_; + bindingCount = bindingCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & + setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ ) VULKAN_HPP_NOEXCEPT { - planeIndex = planeIndex_; + pBindings = pBindings_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetLayoutCreateInfo & + setBindings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bindings_ ) VULKAN_HPP_NOEXCEPT { - planeStackIndex = planeStackIndex_; + bindingCount = static_cast( bindings_.size() ); + pBindings = bindings_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - transform = transform_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT { - globalAlpha = globalAlpha_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT - { - alphaMode = alphaMode_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT - { - imageExtent = imageExtent_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -26270,2907 +28982,3040 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, displayMode, planeIndex, planeStackIndex, transform, globalAlpha, alphaMode, imageExtent ); + return std::tie( sType, pNext, flags, bindingCount, pBindings ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplaySurfaceCreateInfoKHR const & ) const = default; + auto operator<=>( DescriptorSetLayoutCreateInfo const & ) const = default; #else - bool operator==( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( displayMode == rhs.displayMode ) && - ( planeIndex == rhs.planeIndex ) && ( planeStackIndex == rhs.planeStackIndex ) && ( transform == rhs.transform ) && - ( globalAlpha == rhs.globalAlpha ) && ( alphaMode == rhs.alphaMode ) && ( imageExtent == rhs.imageExtent ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( bindingCount == rhs.bindingCount ) && + ( pBindings == rhs.pBindings ); # endif } - bool operator!=( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; - uint32_t planeIndex = {}; - uint32_t planeStackIndex = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; - float globalAlpha = {}; - VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque; - VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {}; + uint32_t bindingCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings = {}; }; template <> - struct CppType + struct CppType { - using Type = DisplaySurfaceCreateInfoKHR; + using Type = DescriptorSetLayoutCreateInfo; }; - struct DrawIndexedIndirectCommand + struct DescriptorSetLayoutHostMappingInfoVALVE { - using NativeType = VkDrawIndexedIndirectCommand; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = {}, - uint32_t instanceCount_ = {}, - uint32_t firstIndex_ = {}, - int32_t vertexOffset_ = {}, - uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT - : indexCount( indexCount_ ) - , instanceCount( instanceCount_ ) - , firstIndex( firstIndex_ ) - , vertexOffset( vertexOffset_ ) - , firstInstance( firstInstance_ ) - { - } + using NativeType = VkDescriptorSetLayoutHostMappingInfoVALVE; - VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE; - DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT - : DrawIndexedIndirectCommand( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DescriptorSetLayoutHostMappingInfoVALVE( size_t descriptorOffset_ = {}, uint32_t descriptorSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorOffset{ descriptorOffset_ } + , descriptorSize{ descriptorSize_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DrawIndexedIndirectCommand & operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutHostMappingInfoVALVE( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutHostMappingInfoVALVE( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutHostMappingInfoVALVE( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT - { - indexCount = indexCount_; - return *this; - } + DescriptorSetLayoutHostMappingInfoVALVE & operator=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutHostMappingInfoVALVE & operator=( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT { - instanceCount = instanceCount_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - firstIndex = firstIndex_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorOffset( size_t descriptorOffset_ ) VULKAN_HPP_NOEXCEPT { - vertexOffset = vertexOffset_; + descriptorOffset = descriptorOffset_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorSize( uint32_t descriptorSize_ ) VULKAN_HPP_NOEXCEPT { - firstInstance = firstInstance_; + descriptorSize = descriptorSize_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetLayoutHostMappingInfoVALVE const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetLayoutHostMappingInfoVALVE &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + return std::tie( sType, pNext, descriptorOffset, descriptorSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DrawIndexedIndirectCommand const & ) const = default; + auto operator<=>( DescriptorSetLayoutHostMappingInfoVALVE const & ) const = default; #else - bool operator==( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( indexCount == rhs.indexCount ) && ( instanceCount == rhs.instanceCount ) && ( firstIndex == rhs.firstIndex ) && - ( vertexOffset == rhs.vertexOffset ) && ( firstInstance == rhs.firstInstance ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorOffset == rhs.descriptorOffset ) && ( descriptorSize == rhs.descriptorSize ); # endif } - bool operator!=( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t indexCount = {}; - uint32_t instanceCount = {}; - uint32_t firstIndex = {}; - int32_t vertexOffset = {}; - uint32_t firstInstance = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE; + void * pNext = {}; + size_t descriptorOffset = {}; + uint32_t descriptorSize = {}; }; - struct DrawIndirectCommand + template <> + struct CppType { - using NativeType = VkDrawIndirectCommand; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = {}, - uint32_t instanceCount_ = {}, - uint32_t firstVertex_ = {}, - uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT - : vertexCount( vertexCount_ ) - , instanceCount( instanceCount_ ) - , firstVertex( firstVertex_ ) - , firstInstance( firstInstance_ ) - { - } - - VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + using Type = DescriptorSetLayoutHostMappingInfoVALVE; + }; - DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT : DrawIndirectCommand( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + struct DescriptorSetLayoutSupport + { + using NativeType = VkDescriptorSetLayoutSupport; - DrawIndirectCommand & operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport; - DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , supported{ supported_ } { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT - { - vertexCount = vertexCount_; - return *this; - } + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetLayoutSupport( *reinterpret_cast( &rhs ) ) { - instanceCount = instanceCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT - { - firstVertex = firstVertex_; - return *this; - } + DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT { - firstInstance = firstInstance_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( vertexCount, instanceCount, firstVertex, firstInstance ); + return std::tie( sType, pNext, supported ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DrawIndirectCommand const & ) const = default; + auto operator<=>( DescriptorSetLayoutSupport const & ) const = default; #else - bool operator==( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( vertexCount == rhs.vertexCount ) && ( instanceCount == rhs.instanceCount ) && ( firstVertex == rhs.firstVertex ) && - ( firstInstance == rhs.firstInstance ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supported == rhs.supported ); # endif } - bool operator!=( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t vertexCount = {}; - uint32_t instanceCount = {}; - uint32_t firstVertex = {}; - uint32_t firstInstance = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supported = {}; }; - struct DrawMeshTasksIndirectCommandEXT + template <> + struct CppType { - using NativeType = VkDrawMeshTasksIndirectCommandEXT; + using Type = DescriptorSetLayoutSupport; + }; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - DrawMeshTasksIndirectCommandEXT( uint32_t groupCountX_ = {}, uint32_t groupCountY_ = {}, uint32_t groupCountZ_ = {} ) VULKAN_HPP_NOEXCEPT - : groupCountX( groupCountX_ ) - , groupCountY( groupCountY_ ) - , groupCountZ( groupCountZ_ ) + using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; + + struct DescriptorSetVariableDescriptorCountAllocateInfo + { + using NativeType = VkDescriptorSetVariableDescriptorCountAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {}, + const uint32_t * pDescriptorCounts_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorSetCount{ descriptorSetCount_ } + , pDescriptorCounts{ pDescriptorCounts_ } { } - VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandEXT( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + DescriptorSetVariableDescriptorCountAllocateInfo( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DrawMeshTasksIndirectCommandEXT( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : DrawMeshTasksIndirectCommandEXT( *reinterpret_cast( &rhs ) ) + DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetVariableDescriptorCountAllocateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DrawMeshTasksIndirectCommandEXT & operator=( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetVariableDescriptorCountAllocateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorCounts_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), descriptorSetCount( static_cast( descriptorCounts_.size() ) ), pDescriptorCounts( descriptorCounts_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - DrawMeshTasksIndirectCommandEXT & operator=( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountX( uint32_t groupCountX_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - groupCountX = groupCountX_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountY( uint32_t groupCountY_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT { - groupCountY = groupCountY_; + descriptorSetCount = descriptorSetCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountZ( uint32_t groupCountZ_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT { - groupCountZ = groupCountZ_; + pDescriptorCounts = pDescriptorCounts_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDrawMeshTasksIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorSetVariableDescriptorCountAllocateInfo & + setDescriptorCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + descriptorSetCount = static_cast( descriptorCounts_.size() ); + pDescriptorCounts = descriptorCounts_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDrawMeshTasksIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( groupCountX, groupCountY, groupCountZ ); + return std::tie( sType, pNext, descriptorSetCount, pDescriptorCounts ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DrawMeshTasksIndirectCommandEXT const & ) const = default; + auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const & ) const = default; #else - bool operator==( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( groupCountX == rhs.groupCountX ) && ( groupCountY == rhs.groupCountY ) && ( groupCountZ == rhs.groupCountZ ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetCount == rhs.descriptorSetCount ) && + ( pDescriptorCounts == rhs.pDescriptorCounts ); # endif } - bool operator!=( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t groupCountX = {}; - uint32_t groupCountY = {}; - uint32_t groupCountZ = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; + const void * pNext = {}; + uint32_t descriptorSetCount = {}; + const uint32_t * pDescriptorCounts = {}; }; - struct DrawMeshTasksIndirectCommandNV + template <> + struct CppType { - using NativeType = VkDrawMeshTasksIndirectCommandNV; + using Type = DescriptorSetVariableDescriptorCountAllocateInfo; + }; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {}, uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT - : taskCount( taskCount_ ) - , firstTask( firstTask_ ) - { - } + using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo; - VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + struct DescriptorSetVariableDescriptorCountLayoutSupport + { + using NativeType = VkDescriptorSetVariableDescriptorCountLayoutSupport; - DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT - : DrawMeshTasksIndirectCommandNV( *reinterpret_cast( &rhs ) ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxVariableDescriptorCount{ maxVariableDescriptorCount_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DrawMeshTasksIndirectCommandNV & operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + DescriptorSetVariableDescriptorCountLayoutSupport( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorSetVariableDescriptorCountLayoutSupport( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT - { - taskCount = taskCount_; - return *this; - } + DescriptorSetVariableDescriptorCountLayoutSupport & + operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT + DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT { - firstTask = firstTask_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDrawMeshTasksIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( taskCount, firstTask ); + return std::tie( sType, pNext, maxVariableDescriptorCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DrawMeshTasksIndirectCommandNV const & ) const = default; + auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const & ) const = default; #else - bool operator==( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( taskCount == rhs.taskCount ) && ( firstTask == rhs.firstTask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount ); # endif } - bool operator!=( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t taskCount = {}; - uint32_t firstTask = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; + void * pNext = {}; + uint32_t maxVariableDescriptorCount = {}; }; - struct DrmFormatModifierProperties2EXT + template <> + struct CppType { - using NativeType = VkDrmFormatModifierProperties2EXT; + using Type = DescriptorSetVariableDescriptorCountLayoutSupport; + }; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( uint64_t drmFormatModifier_ = {}, - uint32_t drmFormatModifierPlaneCount_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT - : drmFormatModifier( drmFormatModifier_ ) - , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) - , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ ) - { - } + using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport; - VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + struct DescriptorUpdateTemplateEntry + { + using NativeType = VkDescriptorUpdateTemplateEntry; - DrmFormatModifierProperties2EXT( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT - : DrmFormatModifierProperties2EXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + size_t offset_ = {}, + size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : dstBinding{ dstBinding_ } + , dstArrayElement{ dstArrayElement_ } + , descriptorCount{ descriptorCount_ } + , descriptorType{ descriptorType_ } + , offset{ offset_ } + , stride{ stride_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DrmFormatModifierProperties2EXT & operator=( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DrmFormatModifierProperties2EXT & operator=( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorUpdateTemplateEntry( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } - operator VkDrmFormatModifierProperties2EXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } + DescriptorUpdateTemplateEntry & operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - operator VkDrmFormatModifierProperties2EXT &() VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + *this = *reinterpret_cast( &rhs ); + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures ); + dstBinding = dstBinding_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DrmFormatModifierProperties2EXT const & ) const = default; -#else - bool operator==( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( drmFormatModifier == rhs.drmFormatModifier ) && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && - ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); -# endif + dstArrayElement = dstArrayElement_; + return *this; } - bool operator!=( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + descriptorCount = descriptorCount_; + return *this; } -#endif - - public: - uint64_t drmFormatModifier = {}; - uint32_t drmFormatModifierPlaneCount = {}; - VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures = {}; - }; - struct DrmFormatModifierPropertiesEXT - { - using NativeType = VkDrmFormatModifierPropertiesEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, - uint32_t drmFormatModifierPlaneCount_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT - : drmFormatModifier( drmFormatModifier_ ) - , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) - , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ ) + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT { + descriptorType = descriptorType_; + return *this; } - VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : DrmFormatModifierPropertiesEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) VULKAN_HPP_NOEXCEPT { + offset = offset_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - DrmFormatModifierPropertiesEXT & operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + stride = stride_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorUpdateTemplateEntry const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures ); + return std::tie( dstBinding, dstArrayElement, descriptorCount, descriptorType, offset, stride ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DrmFormatModifierPropertiesEXT const & ) const = default; + auto operator<=>( DescriptorUpdateTemplateEntry const & ) const = default; #else - bool operator==( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( drmFormatModifier == rhs.drmFormatModifier ) && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && - ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); + return ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ) && + ( descriptorType == rhs.descriptorType ) && ( offset == rhs.offset ) && ( stride == rhs.stride ); # endif } - bool operator!=( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint64_t drmFormatModifier = {}; - uint32_t drmFormatModifierPlaneCount = {}; - VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + size_t offset = {}; + size_t stride = {}; }; - struct DrmFormatModifierPropertiesList2EXT - { - using NativeType = VkDrmFormatModifierPropertiesList2EXT; + using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; + + struct DescriptorUpdateTemplateCreateInfo + { + using NativeType = VkDescriptorUpdateTemplateCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesList2EXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorUpdateTemplateCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( uint32_t drmFormatModifierCount_ = {}, - VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , drmFormatModifierCount( drmFormatModifierCount_ ) - , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, + uint32_t descriptorUpdateEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + uint32_t set_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , descriptorUpdateEntryCount{ descriptorUpdateEntryCount_ } + , pDescriptorUpdateEntries{ pDescriptorUpdateEntries_ } + , templateType{ templateType_ } + , descriptorSetLayout{ descriptorSetLayout_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , pipelineLayout{ pipelineLayout_ } + , set{ set_ } { } - VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DrmFormatModifierPropertiesList2EXT( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT - : DrmFormatModifierPropertiesList2EXT( *reinterpret_cast( &rhs ) ) + DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DescriptorUpdateTemplateCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DrmFormatModifierPropertiesList2EXT( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifierProperties_, - void * pNext_ = nullptr ) + DescriptorUpdateTemplateCreateInfo( + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorUpdateEntries_, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + uint32_t set_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , drmFormatModifierCount( static_cast( drmFormatModifierProperties_.size() ) ) - , pDrmFormatModifierProperties( drmFormatModifierProperties_.data() ) + , flags( flags_ ) + , descriptorUpdateEntryCount( static_cast( descriptorUpdateEntries_.size() ) ) + , pDescriptorUpdateEntries( descriptorUpdateEntries_.data() ) + , templateType( templateType_ ) + , descriptorSetLayout( descriptorSetLayout_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipelineLayout( pipelineLayout_ ) + , set( set_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DrmFormatModifierPropertiesList2EXT & operator=( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - DrmFormatModifierPropertiesList2EXT & operator=( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + DescriptorUpdateTemplateCreateInfo & operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkDrmFormatModifierPropertiesList2EXT const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkDrmFormatModifierPropertiesList2EXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties ); + descriptorUpdateEntryCount = descriptorUpdateEntryCount_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DrmFormatModifierPropertiesList2EXT const & ) const = default; -#else - bool operator==( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setPDescriptorUpdateEntries( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && - ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); -# endif + pDescriptorUpdateEntries = pDescriptorUpdateEntries_; + return *this; } - bool operator!=( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorUpdateEntries_ ) + VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + descriptorUpdateEntryCount = static_cast( descriptorUpdateEntries_.size() ); + pDescriptorUpdateEntries = descriptorUpdateEntries_.data(); + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesList2EXT; - void * pNext = {}; - uint32_t drmFormatModifierCount = {}; - VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties = {}; - }; - - template <> - struct CppType - { - using Type = DrmFormatModifierPropertiesList2EXT; - }; - - struct DrmFormatModifierPropertiesListEXT - { - using NativeType = VkDrmFormatModifierPropertiesListEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesListEXT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = {}, - VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , drmFormatModifierCount( drmFormatModifierCount_ ) - , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ ) + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT { + templateType = templateType_; + return *this; } - VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : DrmFormatModifierPropertiesListEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT { + descriptorSetLayout = descriptorSetLayout_; + return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DrmFormatModifierPropertiesListEXT( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifierProperties_, - void * pNext_ = nullptr ) - : pNext( pNext_ ) - , drmFormatModifierCount( static_cast( drmFormatModifierProperties_.size() ) ) - , pDrmFormatModifierProperties( drmFormatModifierProperties_.data() ) + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT { + pipelineBindPoint = pipelineBindPoint_; + return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + pipelineLayout = pipelineLayout_; + return *this; + } - DrmFormatModifierPropertiesListEXT & operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + set = set_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkDescriptorUpdateTemplateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT + operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties ); + return std::tie( + sType, pNext, flags, descriptorUpdateEntryCount, pDescriptorUpdateEntries, templateType, descriptorSetLayout, pipelineBindPoint, pipelineLayout, set ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DrmFormatModifierPropertiesListEXT const & ) const = default; + auto operator<=>( DescriptorUpdateTemplateCreateInfo const & ) const = default; #else - bool operator==( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && - ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) && + ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) && ( templateType == rhs.templateType ) && + ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipelineLayout == rhs.pipelineLayout ) && + ( set == rhs.set ); # endif } - bool operator!=( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT; - void * pNext = {}; - uint32_t drmFormatModifierCount = {}; - VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {}; + uint32_t descriptorUpdateEntryCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet; + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; + uint32_t set = {}; }; template <> - struct CppType + struct CppType { - using Type = DrmFormatModifierPropertiesListEXT; + using Type = DescriptorUpdateTemplateCreateInfo; }; - struct EventCreateInfo + using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; + + struct DeviceAddressBindingCallbackDataEXT { - using NativeType = VkEventCreateInfo; + using NativeType = VkDeviceAddressBindingCallbackDataEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceAddressBindingCallbackDataEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR EventCreateInfo( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType_ = VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT::eBind, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , baseAddress{ baseAddress_ } + , size{ size_ } + , bindingType{ bindingType_ } { } - VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : EventCreateInfo( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + DeviceAddressBindingCallbackDataEXT( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceAddressBindingCallbackDataEXT( *reinterpret_cast( &rhs ) ) + { + } - EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceAddressBindingCallbackDataEXT & operator=( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceAddressBindingCallbackDataEXT & operator=( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setBaseAddress( VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + baseAddress = baseAddress_; + return *this; } - operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & + setBindingType( VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType_ ) VULKAN_HPP_NOEXCEPT + { + bindingType = bindingType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceAddressBindingCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceAddressBindingCallbackDataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags ); + return std::tie( sType, pNext, flags, baseAddress, size, bindingType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( EventCreateInfo const & ) const = default; + auto operator<=>( DeviceAddressBindingCallbackDataEXT const & ) const = default; #else - bool operator==( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( baseAddress == rhs.baseAddress ) && ( size == rhs.size ) && + ( bindingType == rhs.bindingType ); # endif } - bool operator!=( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceAddressBindingCallbackDataEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress baseAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType = VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT::eBind; }; template <> - struct CppType + struct CppType { - using Type = EventCreateInfo; + using Type = DeviceAddressBindingCallbackDataEXT; }; - struct ExportFenceCreateInfo + struct DeviceBufferMemoryRequirements { - using NativeType = VkExportFenceCreateInfo; + using NativeType = VkDeviceBufferMemoryRequirements; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceBufferMemoryRequirements; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleTypes( handleTypes_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pCreateInfo{ pCreateInfo_ } { } - VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : ExportFenceCreateInfo( *reinterpret_cast( &rhs ) ) + DeviceBufferMemoryRequirements( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceBufferMemoryRequirements( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceBufferMemoryRequirements & operator=( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceBufferMemoryRequirements & operator=( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT { - handleTypes = handleTypes_; + pCreateInfo = pCreateInfo_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceBufferMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDeviceBufferMemoryRequirements &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, handleTypes ); + return std::tie( sType, pNext, pCreateInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExportFenceCreateInfo const & ) const = default; + auto operator<=>( DeviceBufferMemoryRequirements const & ) const = default; #else - bool operator==( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ); # endif } - bool operator!=( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceBufferMemoryRequirements; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo = {}; }; template <> - struct CppType + struct CppType { - using Type = ExportFenceCreateInfo; + using Type = DeviceBufferMemoryRequirements; }; - using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - struct ExportFenceWin32HandleInfoKHR + using DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements; + + struct DeviceQueueCreateInfo { - using NativeType = VkExportFenceWin32HandleInfoKHR; + using NativeType = VkDeviceQueueCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, - DWORD dwAccess_ = {}, - LPCWSTR name_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - , name( name_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + uint32_t queueCount_ = {}, + const float * pQueuePriorities_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , queueFamilyIndex{ queueFamilyIndex_ } + , queueCount{ queueCount_ } + , pQueuePriorities{ pQueuePriorities_ } { } - VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : ExportFenceWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueCreateInfo( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ExportFenceWin32HandleInfoKHR & operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_, + uint32_t queueFamilyIndex_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queuePriorities_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueCount( static_cast( queuePriorities_.size() ) ) + , pQueuePriorities( queuePriorities_.data() ) { - *this = *reinterpret_cast( &rhs ); - return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - pAttributes = pAttributes_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - dwAccess = dwAccess_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - name = name_; + flags = flags_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkExportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, pAttributes, dwAccess, name ); - } -# endif - -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExportFenceWin32HandleInfoKHR const & ) const = default; -# else - bool operator==( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); -# endif - } - - bool operator!=( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -# endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; - const void * pNext = {}; - const SECURITY_ATTRIBUTES * pAttributes = {}; - DWORD dwAccess = {}; - LPCWSTR name = {}; - }; - - template <> - struct CppType - { - using Type = ExportFenceWin32HandleInfoKHR; - }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct ExportMemoryAllocateInfo - { - using NativeType = VkExportMemoryAllocateInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleTypes( handleTypes_ ) - { - } - VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : ExportMemoryAllocateInfo( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT { + queueFamilyIndex = queueFamilyIndex_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + queueCount = queueCount_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPQueuePriorities( const float * pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + pQueuePriorities = pQueuePriorities_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceQueueCreateInfo & setQueuePriorities( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT { - handleTypes = handleTypes_; + queueCount = static_cast( queuePriorities_.size() ); + pQueuePriorities = queuePriorities_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, handleTypes ); + return std::tie( sType, pNext, flags, queueFamilyIndex, queueCount, pQueuePriorities ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExportMemoryAllocateInfo const & ) const = default; + auto operator<=>( DeviceQueueCreateInfo const & ) const = default; #else - bool operator==( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && + ( queueCount == rhs.queueCount ) && ( pQueuePriorities == rhs.pQueuePriorities ); # endif } - bool operator!=( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + uint32_t queueCount = {}; + const float * pQueuePriorities = {}; }; template <> - struct CppType + struct CppType { - using Type = ExportMemoryAllocateInfo; + using Type = DeviceQueueCreateInfo; }; - using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; - struct ExportMemoryAllocateInfoNV + struct PhysicalDeviceFeatures { - using NativeType = VkExportMemoryAllocateInfoNV; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV; + using NativeType = VkPhysicalDeviceFeatures; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleTypes( handleTypes_ ) - { - } +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT + : robustBufferAccess{ robustBufferAccess_ } + , fullDrawIndexUint32{ fullDrawIndexUint32_ } + , imageCubeArray{ imageCubeArray_ } + , independentBlend{ independentBlend_ } + , geometryShader{ geometryShader_ } + , tessellationShader{ tessellationShader_ } + , sampleRateShading{ sampleRateShading_ } + , dualSrcBlend{ dualSrcBlend_ } + , logicOp{ logicOp_ } + , multiDrawIndirect{ multiDrawIndirect_ } + , drawIndirectFirstInstance{ drawIndirectFirstInstance_ } + , depthClamp{ depthClamp_ } + , depthBiasClamp{ depthBiasClamp_ } + , fillModeNonSolid{ fillModeNonSolid_ } + , depthBounds{ depthBounds_ } + , wideLines{ wideLines_ } + , largePoints{ largePoints_ } + , alphaToOne{ alphaToOne_ } + , multiViewport{ multiViewport_ } + , samplerAnisotropy{ samplerAnisotropy_ } + , textureCompressionETC2{ textureCompressionETC2_ } + , textureCompressionASTC_LDR{ textureCompressionASTC_LDR_ } + , textureCompressionBC{ textureCompressionBC_ } + , occlusionQueryPrecise{ occlusionQueryPrecise_ } + , pipelineStatisticsQuery{ pipelineStatisticsQuery_ } + , vertexPipelineStoresAndAtomics{ vertexPipelineStoresAndAtomics_ } + , fragmentStoresAndAtomics{ fragmentStoresAndAtomics_ } + , shaderTessellationAndGeometryPointSize{ shaderTessellationAndGeometryPointSize_ } + , shaderImageGatherExtended{ shaderImageGatherExtended_ } + , shaderStorageImageExtendedFormats{ shaderStorageImageExtendedFormats_ } + , shaderStorageImageMultisample{ shaderStorageImageMultisample_ } + , shaderStorageImageReadWithoutFormat{ shaderStorageImageReadWithoutFormat_ } + , shaderStorageImageWriteWithoutFormat{ shaderStorageImageWriteWithoutFormat_ } + , shaderUniformBufferArrayDynamicIndexing{ shaderUniformBufferArrayDynamicIndexing_ } + , shaderSampledImageArrayDynamicIndexing{ shaderSampledImageArrayDynamicIndexing_ } + , shaderStorageBufferArrayDynamicIndexing{ shaderStorageBufferArrayDynamicIndexing_ } + , shaderStorageImageArrayDynamicIndexing{ shaderStorageImageArrayDynamicIndexing_ } + , shaderClipDistance{ shaderClipDistance_ } + , shaderCullDistance{ shaderCullDistance_ } + , shaderFloat64{ shaderFloat64_ } + , shaderInt64{ shaderInt64_ } + , shaderInt16{ shaderInt16_ } + , shaderResourceResidency{ shaderResourceResidency_ } + , shaderResourceMinLod{ shaderResourceMinLod_ } + , sparseBinding{ sparseBinding_ } + , sparseResidencyBuffer{ sparseResidencyBuffer_ } + , sparseResidencyImage2D{ sparseResidencyImage2D_ } + , sparseResidencyImage3D{ sparseResidencyImage3D_ } + , sparseResidency2Samples{ sparseResidency2Samples_ } + , sparseResidency4Samples{ sparseResidency4Samples_ } + , sparseResidency8Samples{ sparseResidency8Samples_ } + , sparseResidency16Samples{ sparseResidency16Samples_ } + , sparseResidencyAliased{ sparseResidencyAliased_ } + , variableMultisampleRate{ variableMultisampleRate_ } + , inheritedQueries{ inheritedQueries_ } + { + } - VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : ExportMemoryAllocateInfoNV( *reinterpret_cast( &rhs ) ) + PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFeatures( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + robustBufferAccess = robustBufferAccess_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & - setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT { - handleTypes = handleTypes_; + fullDrawIndexUint32 = fullDrawIndexUint32_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkExportMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + imageCubeArray = imageCubeArray_; + return *this; } - operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + independentBlend = independentBlend_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, handleTypes ); + geometryShader = geometryShader_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExportMemoryAllocateInfoNV const & ) const = default; -#else - bool operator==( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); -# endif + tessellationShader = tessellationShader_; + return *this; } - bool operator!=( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + sampleRateShading = sampleRateShading_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; - }; - - template <> - struct CppType - { - using Type = ExportMemoryAllocateInfoNV; - }; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - struct ExportMemoryWin32HandleInfoKHR - { - using NativeType = VkExportMemoryWin32HandleInfoKHR; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, - DWORD dwAccess_ = {}, - LPCWSTR name_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - , name( name_ ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT { + dualSrcBlend = dualSrcBlend_; + return *this; } - VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : ExportMemoryWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT { + logicOp = logicOp_; + return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ExportMemoryWin32HandleInfoKHR & operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + multiDrawIndirect = multiDrawIndirect_; return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + drawIndirectFirstInstance = drawIndirectFirstInstance_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT { - pAttributes = pAttributes_; + depthClamp = depthClamp_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT { - dwAccess = dwAccess_; + depthBiasClamp = depthBiasClamp_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT { - name = name_; + fillModeNonSolid = fillModeNonSolid_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkExportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + depthBounds = depthBounds_; + return *this; } - operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + wideLines = wideLines_; + return *this; } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pAttributes, dwAccess, name ); + largePoints = largePoints_; + return *this; } -# endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExportMemoryWin32HandleInfoKHR const & ) const = default; -# else - bool operator==( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); -# endif + alphaToOne = alphaToOne_; + return *this; } - bool operator!=( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + multiViewport = multiViewport_; + return *this; } -# endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; - const void * pNext = {}; - const SECURITY_ATTRIBUTES * pAttributes = {}; - DWORD dwAccess = {}; - LPCWSTR name = {}; - }; - - template <> - struct CppType - { - using Type = ExportMemoryWin32HandleInfoKHR; - }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - struct ExportMemoryWin32HandleInfoNV - { - using NativeType = VkExportMemoryWin32HandleInfoNV; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT { + samplerAnisotropy = samplerAnisotropy_; + return *this; } - VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : ExportMemoryWin32HandleInfoNV( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT { + textureCompressionETC2 = textureCompressionETC2_; + return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ExportMemoryWin32HandleInfoNV & operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + textureCompressionASTC_LDR = textureCompressionASTC_LDR_; return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + textureCompressionBC = textureCompressionBC_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT { - pAttributes = pAttributes_; + occlusionQueryPrecise = occlusionQueryPrecise_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT { - dwAccess = dwAccess_; + pipelineStatisticsQuery = pipelineStatisticsQuery_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkExportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_; + return *this; } - operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + fragmentStoresAndAtomics = fragmentStoresAndAtomics_; + return *this; } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pAttributes, dwAccess ); + shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_; + return *this; } -# endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExportMemoryWin32HandleInfoNV const & ) const = default; -# else - bool operator==( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ); -# endif + shaderImageGatherExtended = shaderImageGatherExtended_; + return *this; } - bool operator!=( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_; + return *this; } -# endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; - const void * pNext = {}; - const SECURITY_ATTRIBUTES * pAttributes = {}; - DWORD dwAccess = {}; - }; - - template <> - struct CppType - { - using Type = ExportMemoryWin32HandleInfoNV; - }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#if defined( VK_USE_PLATFORM_METAL_EXT ) - struct ExportMetalBufferInfoEXT - { - using NativeType = VkExportMetalBufferInfoEXT; - static const bool allowDuplicate = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalBufferInfoEXT; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - MTLBuffer_id mtlBuffer_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memory( memory_ ) - , mtlBuffer( mtlBuffer_ ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT { + shaderStorageImageMultisample = shaderStorageImageMultisample_; + return *this; } - VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ExportMetalBufferInfoEXT( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ExportMetalBufferInfoEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT { + shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_; + return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ExportMetalBufferInfoEXT & operator=( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExportMetalBufferInfoEXT & operator=( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_; return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { - memory = memory_; + shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { - mtlBuffer = mtlBuffer_; + shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkExportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & + setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_; + return *this; } - operator VkExportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + shaderClipDistance = shaderClipDistance_; + return *this; } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, memory, mtlBuffer ); + shaderCullDistance = shaderCullDistance_; + return *this; } -# endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExportMetalBufferInfoEXT const & ) const = default; -# else - bool operator==( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( mtlBuffer == rhs.mtlBuffer ); -# endif + shaderFloat64 = shaderFloat64_; + return *this; } - bool operator!=( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + shaderInt64 = shaderInt64_; + return *this; } -# endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalBufferInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - MTLBuffer_id mtlBuffer = {}; - }; - - template <> - struct CppType - { - using Type = ExportMetalBufferInfoEXT; - }; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - -#if defined( VK_USE_PLATFORM_METAL_EXT ) - struct ExportMetalCommandQueueInfoEXT - { - using NativeType = VkExportMetalCommandQueueInfoEXT; - - static const bool allowDuplicate = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalCommandQueueInfoEXT; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT( VULKAN_HPP_NAMESPACE::Queue queue_ = {}, - MTLCommandQueue_id mtlCommandQueue_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , queue( queue_ ) - , mtlCommandQueue( mtlCommandQueue_ ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT { + shaderInt16 = shaderInt16_; + return *this; } - VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT + { + shaderResourceResidency = shaderResourceResidency_; + return *this; + } - ExportMetalCommandQueueInfoEXT( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ExportMetalCommandQueueInfoEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT { + shaderResourceMinLod = shaderResourceMinLod_; + return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ExportMetalCommandQueueInfoEXT & operator=( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT + { + sparseBinding = sparseBinding_; + return *this; + } - ExportMetalCommandQueueInfoEXT & operator=( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + sparseResidencyBuffer = sparseResidencyBuffer_; return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + sparseResidencyImage2D = sparseResidencyImage2D_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setQueue( VULKAN_HPP_NAMESPACE::Queue queue_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT { - queue = queue_; + sparseResidencyImage3D = sparseResidencyImage3D_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setMtlCommandQueue( MTLCommandQueue_id mtlCommandQueue_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT { - mtlCommandQueue = mtlCommandQueue_; + sparseResidency2Samples = sparseResidency2Samples_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkExportMetalCommandQueueInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + sparseResidency4Samples = sparseResidency4Samples_; + return *this; } - operator VkExportMetalCommandQueueInfoEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + sparseResidency8Samples = sparseResidency8Samples_; + return *this; } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, queue, mtlCommandQueue ); + sparseResidency16Samples = sparseResidency16Samples_; + return *this; } -# endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExportMetalCommandQueueInfoEXT const & ) const = default; -# else - bool operator==( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queue == rhs.queue ) && ( mtlCommandQueue == rhs.mtlCommandQueue ); -# endif + sparseResidencyAliased = sparseResidencyAliased_; + return *this; } - bool operator!=( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + variableMultisampleRate = variableMultisampleRate_; + return *this; } -# endif - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalCommandQueueInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Queue queue = {}; - MTLCommandQueue_id mtlCommandQueue = {}; - }; - - template <> - struct CppType - { - using Type = ExportMetalCommandQueueInfoEXT; - }; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - -#if defined( VK_USE_PLATFORM_METAL_EXT ) - struct ExportMetalDeviceInfoEXT - { - using NativeType = VkExportMetalDeviceInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalDeviceInfoEXT; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT( MTLDevice_id mtlDevice_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , mtlDevice( mtlDevice_ ) - { - } - - VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ExportMetalDeviceInfoEXT( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ExportMetalDeviceInfoEXT( *reinterpret_cast( &rhs ) ) - { - } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ExportMetalDeviceInfoEXT & operator=( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ExportMetalDeviceInfoEXT & operator=( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setMtlDevice( MTLDevice_id mtlDevice_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT { - mtlDevice = mtlDevice_; + inheritedQueries = inheritedQueries_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkExportMetalDeviceInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkExportMetalDeviceInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, mtlDevice ); + return std::tie( robustBufferAccess, + fullDrawIndexUint32, + imageCubeArray, + independentBlend, + geometryShader, + tessellationShader, + sampleRateShading, + dualSrcBlend, + logicOp, + multiDrawIndirect, + drawIndirectFirstInstance, + depthClamp, + depthBiasClamp, + fillModeNonSolid, + depthBounds, + wideLines, + largePoints, + alphaToOne, + multiViewport, + samplerAnisotropy, + textureCompressionETC2, + textureCompressionASTC_LDR, + textureCompressionBC, + occlusionQueryPrecise, + pipelineStatisticsQuery, + vertexPipelineStoresAndAtomics, + fragmentStoresAndAtomics, + shaderTessellationAndGeometryPointSize, + shaderImageGatherExtended, + shaderStorageImageExtendedFormats, + shaderStorageImageMultisample, + shaderStorageImageReadWithoutFormat, + shaderStorageImageWriteWithoutFormat, + shaderUniformBufferArrayDynamicIndexing, + shaderSampledImageArrayDynamicIndexing, + shaderStorageBufferArrayDynamicIndexing, + shaderStorageImageArrayDynamicIndexing, + shaderClipDistance, + shaderCullDistance, + shaderFloat64, + shaderInt64, + shaderInt16, + shaderResourceResidency, + shaderResourceMinLod, + sparseBinding, + sparseResidencyBuffer, + sparseResidencyImage2D, + sparseResidencyImage3D, + sparseResidency2Samples, + sparseResidency4Samples, + sparseResidency8Samples, + sparseResidency16Samples, + sparseResidencyAliased, + variableMultisampleRate, + inheritedQueries ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExportMetalDeviceInfoEXT const & ) const = default; -# else - bool operator==( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlDevice == rhs.mtlDevice ); -# endif +# else + return ( robustBufferAccess == rhs.robustBufferAccess ) && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) && + ( imageCubeArray == rhs.imageCubeArray ) && ( independentBlend == rhs.independentBlend ) && ( geometryShader == rhs.geometryShader ) && + ( tessellationShader == rhs.tessellationShader ) && ( sampleRateShading == rhs.sampleRateShading ) && ( dualSrcBlend == rhs.dualSrcBlend ) && + ( logicOp == rhs.logicOp ) && ( multiDrawIndirect == rhs.multiDrawIndirect ) && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) && + ( depthClamp == rhs.depthClamp ) && ( depthBiasClamp == rhs.depthBiasClamp ) && ( fillModeNonSolid == rhs.fillModeNonSolid ) && + ( depthBounds == rhs.depthBounds ) && ( wideLines == rhs.wideLines ) && ( largePoints == rhs.largePoints ) && ( alphaToOne == rhs.alphaToOne ) && + ( multiViewport == rhs.multiViewport ) && ( samplerAnisotropy == rhs.samplerAnisotropy ) && + ( textureCompressionETC2 == rhs.textureCompressionETC2 ) && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) && + ( textureCompressionBC == rhs.textureCompressionBC ) && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) && + ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) && + ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) && + ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) && + ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) && + ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) && + ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) && + ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) && + ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) && + ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) && + ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) && + ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) && ( shaderClipDistance == rhs.shaderClipDistance ) && + ( shaderCullDistance == rhs.shaderCullDistance ) && ( shaderFloat64 == rhs.shaderFloat64 ) && ( shaderInt64 == rhs.shaderInt64 ) && + ( shaderInt16 == rhs.shaderInt16 ) && ( shaderResourceResidency == rhs.shaderResourceResidency ) && + ( shaderResourceMinLod == rhs.shaderResourceMinLod ) && ( sparseBinding == rhs.sparseBinding ) && + ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) && + ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) && ( sparseResidency2Samples == rhs.sparseResidency2Samples ) && + ( sparseResidency4Samples == rhs.sparseResidency4Samples ) && ( sparseResidency8Samples == rhs.sparseResidency8Samples ) && + ( sparseResidency16Samples == rhs.sparseResidency16Samples ) && ( sparseResidencyAliased == rhs.sparseResidencyAliased ) && + ( variableMultisampleRate == rhs.variableMultisampleRate ) && ( inheritedQueries == rhs.inheritedQueries ); +# endif } - bool operator!=( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalDeviceInfoEXT; - const void * pNext = {}; - MTLDevice_id mtlDevice = {}; - }; - - template <> - struct CppType - { - using Type = ExportMetalDeviceInfoEXT; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {}; + VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 logicOp = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {}; + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {}; + VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {}; + VULKAN_HPP_NAMESPACE::Bool32 wideLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 largePoints = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {}; + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {}; + VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {}; }; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ -#if defined( VK_USE_PLATFORM_METAL_EXT ) - struct ExportMetalIOSurfaceInfoEXT + struct DeviceCreateInfo { - using NativeType = VkExportMetalIOSurfaceInfoEXT; + using NativeType = VkDeviceCreateInfo; - static const bool allowDuplicate = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalIoSurfaceInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - ExportMetalIOSurfaceInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {}, IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image( image_ ) - , ioSurface( ioSurface_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, + uint32_t queueCreateInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ = {}, + uint32_t enabledLayerCount_ = {}, + const char * const * ppEnabledLayerNames_ = {}, + uint32_t enabledExtensionCount_ = {}, + const char * const * ppEnabledExtensionNames_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , queueCreateInfoCount{ queueCreateInfoCount_ } + , pQueueCreateInfos{ pQueueCreateInfos_ } + , enabledLayerCount{ enabledLayerCount_ } + , ppEnabledLayerNames{ ppEnabledLayerNames_ } + , enabledExtensionCount{ enabledExtensionCount_ } + , ppEnabledExtensionNames{ ppEnabledExtensionNames_ } + , pEnabledFeatures{ pEnabledFeatures_ } { } - VULKAN_HPP_CONSTEXPR ExportMetalIOSurfaceInfoEXT( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExportMetalIOSurfaceInfoEXT( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ExportMetalIOSurfaceInfoEXT( *reinterpret_cast( &rhs ) ) + DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceCreateInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueCreateInfos_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , queueCreateInfoCount( static_cast( queueCreateInfos_.size() ) ) + , pQueueCreateInfos( queueCreateInfos_.data() ) + , enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ) + , ppEnabledLayerNames( pEnabledLayerNames_.data() ) + , enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ) + , ppEnabledExtensionNames( pEnabledExtensionNames_.data() ) + , pEnabledFeatures( pEnabledFeatures_ ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - ExportMetalIOSurfaceInfoEXT & operator=( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ExportMetalIOSurfaceInfoEXT & operator=( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - image = image_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT { - ioSurface = ioSurface_; + queueCreateInfoCount = queueCreateInfoCount_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkExportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkExportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & + setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pQueueCreateInfos = pQueueCreateInfos_; + return *this; } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & setQueueCreateInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, image, ioSurface ); + queueCreateInfoCount = static_cast( queueCreateInfos_.size() ); + pQueueCreateInfos = queueCreateInfos_.data(); + return *this; } -# endif +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExportMetalIOSurfaceInfoEXT const & ) const = default; -# else - bool operator==( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( ioSurface == rhs.ioSurface ); -# endif + enabledLayerCount = enabledLayerCount_; + return *this; } - bool operator!=( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + ppEnabledLayerNames = ppEnabledLayerNames_; + return *this; } -# endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalIoSurfaceInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - IOSurfaceRef ioSurface = {}; - }; - - template <> - struct CppType - { - using Type = ExportMetalIOSurfaceInfoEXT; - }; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - -#if defined( VK_USE_PLATFORM_METAL_EXT ) - struct ExportMetalObjectCreateInfoEXT - { - using NativeType = VkExportMetalObjectCreateInfoEXT; - - static const bool allowDuplicate = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalObjectCreateInfoEXT; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT( - VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType_ = VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT::eMetalDevice, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , exportObjectType( exportObjectType_ ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & + setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT { + enabledLayerCount = static_cast( pEnabledLayerNames_.size() ); + ppEnabledLayerNames = pEnabledLayerNames_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ExportMetalObjectCreateInfoEXT( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ExportMetalObjectCreateInfoEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT { + enabledExtensionCount = enabledExtensionCount_; + return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ExportMetalObjectCreateInfoEXT & operator=( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ExportMetalObjectCreateInfoEXT & operator=( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + ppEnabledExtensionNames = ppEnabledExtensionNames_; return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceCreateInfo & + setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); + ppEnabledExtensionNames = pEnabledExtensionNames_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT & - setExportObjectType( VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT { - exportObjectType = exportObjectType_; + pEnabledFeatures = pEnabledFeatures_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkExportMetalObjectCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkExportMetalObjectCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, exportObjectType ); - } -# endif - -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExportMetalObjectCreateInfoEXT const & ) const = default; # else - bool operator==( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportObjectType == rhs.exportObjectType ); -# endif + return std::tie( sType, + pNext, + flags, + queueCreateInfoCount, + pQueueCreateInfos, + enabledLayerCount, + ppEnabledLayerNames, + enabledExtensionCount, + ppEnabledExtensionNames, + pEnabledFeatures ); } +#endif - bool operator!=( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); - } -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = queueCreateInfoCount <=> rhs.queueCreateInfoCount; cmp != 0 ) + return cmp; + if ( auto cmp = pQueueCreateInfos <=> rhs.pQueueCreateInfos; cmp != 0 ) + return cmp; + if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < enabledLayerCount; ++i ) + { + if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] ) + if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 ) + return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; + } + if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < enabledExtensionCount; ++i ) + { + if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] ) + if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 ) + return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; + } + if ( auto cmp = pEnabledFeatures <=> rhs.pEnabledFeatures; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueCreateInfoCount == rhs.queueCreateInfoCount ) && + ( pQueueCreateInfos == rhs.pQueueCreateInfos ) && ( enabledLayerCount == rhs.enabledLayerCount ) && + std::equal( ppEnabledLayerNames, + ppEnabledLayerNames + enabledLayerCount, + rhs.ppEnabledLayerNames, + []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ) && + ( enabledExtensionCount == rhs.enabledExtensionCount ) && + std::equal( ppEnabledExtensionNames, + ppEnabledExtensionNames + enabledExtensionCount, + rhs.ppEnabledExtensionNames, + []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ) && + ( pEnabledFeatures == rhs.pEnabledFeatures ); + } + + bool operator!=( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalObjectCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType = VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT::eMetalDevice; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {}; + uint32_t queueCreateInfoCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo * pQueueCreateInfos = {}; + uint32_t enabledLayerCount = {}; + const char * const * ppEnabledLayerNames = {}; + uint32_t enabledExtensionCount = {}; + const char * const * ppEnabledExtensionNames = {}; + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures = {}; }; template <> - struct CppType + struct CppType { - using Type = ExportMetalObjectCreateInfoEXT; + using Type = DeviceCreateInfo; }; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ -#if defined( VK_USE_PLATFORM_METAL_EXT ) - struct ExportMetalObjectsInfoEXT + struct DeviceMemoryReportCallbackDataEXT { - using NativeType = VkExportMetalObjectsInfoEXT; + using NativeType = VkDeviceMemoryReportCallbackDataEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalObjectsInfoEXT; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext( pNext_ ) {} - - VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryReportCallbackDataEXT; - ExportMetalObjectsInfoEXT( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ExportMetalObjectsInfoEXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate, + uint64_t memoryObjectId_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + uint32_t heapIndex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , type{ type_ } + , memoryObjectId{ memoryObjectId_ } + , size{ size_ } + , objectType{ objectType_ } + , objectHandle{ objectHandle_ } + , heapIndex{ heapIndex_ } { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ExportMetalObjectsInfoEXT & operator=( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExportMetalObjectsInfoEXT & operator=( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryReportCallbackDataEXT( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceMemoryReportCallbackDataEXT & operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkExportMetalObjectsInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkExportMetalObjectsInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext ); + return std::tie( sType, pNext, flags, type, memoryObjectId, size, objectType, objectHandle, heapIndex ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExportMetalObjectsInfoEXT const & ) const = default; -# else - bool operator==( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryReportCallbackDataEXT const & ) const = default; +#else + bool operator==( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( type == rhs.type ) && ( memoryObjectId == rhs.memoryObjectId ) && + ( size == rhs.size ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && ( heapIndex == rhs.heapIndex ); +# endif } - bool operator!=( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalObjectsInfoEXT; - const void * pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate; + uint64_t memoryObjectId = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + uint32_t heapIndex = {}; }; template <> - struct CppType + struct CppType { - using Type = ExportMetalObjectsInfoEXT; + using Type = DeviceMemoryReportCallbackDataEXT; }; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ -#if defined( VK_USE_PLATFORM_METAL_EXT ) - struct ExportMetalSharedEventInfoEXT + typedef void( VKAPI_PTR * PFN_DeviceMemoryReportCallbackEXT )( const VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT * pCallbackData, + void * pUserData ); + + struct DeviceDeviceMemoryReportCreateInfoEXT { - using NativeType = VkExportMetalSharedEventInfoEXT; + using NativeType = VkDeviceDeviceMemoryReportCreateInfoEXT; static const bool allowDuplicate = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalSharedEventInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::Event event_ = {}, - MTLSharedEvent_id mtlSharedEvent_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphore( semaphore_ ) - , event( event_ ) - , mtlSharedEvent( mtlSharedEvent_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pfnUserCallback{ pfnUserCallback_ } + , pUserData{ pUserData_ } { } - VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExportMetalSharedEventInfoEXT( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ExportMetalSharedEventInfoEXT( *reinterpret_cast( &rhs ) ) + DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceDeviceMemoryReportCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ExportMetalSharedEventInfoEXT & operator=( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." ) - ExportMetalSharedEventInfoEXT & operator=( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceDeviceMemoryReportCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_, + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : DeviceDeviceMemoryReportCreateInfoEXT( flags_, + reinterpret_cast( pfnUserCallback_ ), + pUserData_, + pNext_ ) { - *this = *reinterpret_cast( &rhs ); + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif + + DeviceDeviceMemoryReportCreateInfoEXT & operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceDeviceMemoryReportCreateInfoEXT & operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { - semaphore = semaphore_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setEvent( VULKAN_HPP_NAMESPACE::Event event_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & + setPfnUserCallback( VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT { - event = event_; + pfnUserCallback = pfnUserCallback_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT { - mtlSharedEvent = mtlSharedEvent_; + pUserData = pUserData_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkExportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + DeviceDeviceMemoryReportCreateInfoEXT & setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return setPfnUserCallback( reinterpret_cast( pfnUserCallback_ ) ); } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkExportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkDeviceDeviceMemoryReportCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT const &, + VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT const &, + void * const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, semaphore, event, mtlSharedEvent ); + return std::tie( sType, pNext, flags, pfnUserCallback, pUserData ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExportMetalSharedEventInfoEXT const & ) const = default; -# else - bool operator==( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +#if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( event == rhs.event ) && - ( mtlSharedEvent == rhs.mtlSharedEvent ); -# endif +#else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnUserCallback == rhs.pfnUserCallback ) && + ( pUserData == rhs.pUserData ); +#endif } - bool operator!=( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalSharedEventInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::Event event = {}; - MTLSharedEvent_id mtlSharedEvent = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT pfnUserCallback = {}; + void * pUserData = {}; }; template <> - struct CppType + struct CppType { - using Type = ExportMetalSharedEventInfoEXT; + using Type = DeviceDeviceMemoryReportCreateInfoEXT; }; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ -#if defined( VK_USE_PLATFORM_METAL_EXT ) - struct ExportMetalTextureInfoEXT + struct DeviceDiagnosticsConfigCreateInfoNV { - using NativeType = VkExportMetalTextureInfoEXT; + using NativeType = VkDeviceDiagnosticsConfigCreateInfoNV; - static const bool allowDuplicate = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalTextureInfoEXT; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, - VULKAN_HPP_NAMESPACE::BufferView bufferView_ = {}, - VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, - MTLTexture_id mtlTexture_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image( image_ ) - , imageView( imageView_ ) - , bufferView( bufferView_ ) - , plane( plane_ ) - , mtlTexture( mtlTexture_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } { } - VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExportMetalTextureInfoEXT( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ExportMetalTextureInfoEXT( *reinterpret_cast( &rhs ) ) + DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceDiagnosticsConfigCreateInfoNV( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ExportMetalTextureInfoEXT & operator=( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ExportMetalTextureInfoEXT & operator=( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceDiagnosticsConfigCreateInfoNV & operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT - { - image = image_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT - { - imageView = imageView_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView_ ) VULKAN_HPP_NOEXCEPT - { - bufferView = bufferView_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPlane( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ ) VULKAN_HPP_NOEXCEPT - { - plane = plane_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { - mtlTexture = mtlTexture_; + flags = flags_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkExportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceDiagnosticsConfigCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkExportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, image, imageView, bufferView, plane, mtlTexture ); + return std::tie( sType, pNext, flags ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExportMetalTextureInfoEXT const & ) const = default; -# else - bool operator==( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const & ) const = default; +#else + bool operator==( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( imageView == rhs.imageView ) && ( bufferView == rhs.bufferView ) && - ( plane == rhs.plane ) && ( mtlTexture == rhs.mtlTexture ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif } - bool operator!=( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalTextureInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - VULKAN_HPP_NAMESPACE::ImageView imageView = {}; - VULKAN_HPP_NAMESPACE::BufferView bufferView = {}; - VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; - MTLTexture_id mtlTexture = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {}; }; template <> - struct CppType + struct CppType { - using Type = ExportMetalTextureInfoEXT; + using Type = DeviceDiagnosticsConfigCreateInfoNV; }; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct ExportSemaphoreCreateInfo + struct DeviceEventInfoEXT { - using NativeType = VkExportSemaphoreCreateInfo; + using NativeType = VkDeviceEventInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleTypes( handleTypes_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceEvent{ deviceEvent_ } { } - VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : ExportSemaphoreCreateInfo( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceEventInfoEXT( *reinterpret_cast( &rhs ) ) {} - ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & - setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT { - handleTypes = handleTypes_; + deviceEvent = deviceEvent_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, handleTypes ); + return std::tie( sType, pNext, deviceEvent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExportSemaphoreCreateInfo const & ) const = default; + auto operator<=>( DeviceEventInfoEXT const & ) const = default; #else - bool operator==( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceEvent == rhs.deviceEvent ); # endif } - bool operator!=( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug; }; template <> - struct CppType + struct CppType { - using Type = ExportSemaphoreCreateInfo; + using Type = DeviceEventInfoEXT; }; - using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - struct ExportSemaphoreWin32HandleInfoKHR + struct DeviceFaultAddressInfoEXT { - using NativeType = VkExportSemaphoreWin32HandleInfoKHR; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreWin32HandleInfoKHR; + using NativeType = VkDeviceFaultAddressInfoEXT; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, - DWORD dwAccess_ = {}, - LPCWSTR name_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - , name( name_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceFaultAddressInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType_ = VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT::eNone, + VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision_ = {} ) VULKAN_HPP_NOEXCEPT + : addressType{ addressType_ } + , reportedAddress{ reportedAddress_ } + , addressPrecision{ addressPrecision_ } { } - VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceFaultAddressInfoEXT( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : ExportSemaphoreWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + DeviceFaultAddressInfoEXT( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceFaultAddressInfoEXT( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ExportSemaphoreWin32HandleInfoKHR & operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + DeviceFaultAddressInfoEXT & operator=( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + DeviceFaultAddressInfoEXT & operator=( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressType( VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType_ ) VULKAN_HPP_NOEXCEPT { - pAttributes = pAttributes_; + addressType = addressType_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setReportedAddress( VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress_ ) VULKAN_HPP_NOEXCEPT { - dwAccess = dwAccess_; + reportedAddress = reportedAddress_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressPrecision( VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision_ ) VULKAN_HPP_NOEXCEPT { - name = name_; + addressPrecision = addressPrecision_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkExportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceFaultAddressInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkDeviceFaultAddressInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pAttributes, dwAccess, name ); + return std::tie( addressType, reportedAddress, addressPrecision ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const & ) const = default; -# else - bool operator==( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceFaultAddressInfoEXT const & ) const = default; +#else + bool operator==( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); -# endif +# else + return ( addressType == rhs.addressType ) && ( reportedAddress == rhs.reportedAddress ) && ( addressPrecision == rhs.addressPrecision ); +# endif } - bool operator!=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; - const void * pNext = {}; - const SECURITY_ATTRIBUTES * pAttributes = {}; - DWORD dwAccess = {}; - LPCWSTR name = {}; + VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType = VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT::eNone; + VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision = {}; }; - template <> - struct CppType + struct DeviceFaultCountsEXT { - using Type = ExportSemaphoreWin32HandleInfoKHR; - }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + using NativeType = VkDeviceFaultCountsEXT; - struct ExtensionProperties - { - using NativeType = VkExtensionProperties; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceFaultCountsEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( std::array const & extensionName_ = {}, - uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT - : extensionName( extensionName_ ) - , specVersion( specVersion_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT( uint32_t addressInfoCount_ = {}, + uint32_t vendorInfoCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , addressInfoCount{ addressInfoCount_ } + , vendorInfoCount{ vendorInfoCount_ } + , vendorBinarySize{ vendorBinarySize_ } { } - VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT : ExtensionProperties( *reinterpret_cast( &rhs ) ) + DeviceFaultCountsEXT( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceFaultCountsEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceFaultCountsEXT & operator=( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceFaultCountsEXT & operator=( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setAddressInfoCount( uint32_t addressInfoCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + addressInfoCount = addressInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorInfoCount( uint32_t vendorInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + vendorInfoCount = vendorInfoCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorBinarySize( VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize_ ) VULKAN_HPP_NOEXCEPT + { + vendorBinarySize = vendorBinarySize_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceFaultCountsEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceFaultCountsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &, uint32_t const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( extensionName, specVersion ); + return std::tie( sType, pNext, addressInfoCount, vendorInfoCount, vendorBinarySize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExtensionProperties const & ) const = default; + auto operator<=>( DeviceFaultCountsEXT const & ) const = default; #else - bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( extensionName == rhs.extensionName ) && ( specVersion == rhs.specVersion ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( addressInfoCount == rhs.addressInfoCount ) && ( vendorInfoCount == rhs.vendorInfoCount ) && + ( vendorBinarySize == rhs.vendorBinarySize ); # endif } - bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ArrayWrapper1D extensionName = {}; - uint32_t specVersion = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceFaultCountsEXT; + void * pNext = {}; + uint32_t addressInfoCount = {}; + uint32_t vendorInfoCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize = {}; }; - struct ExternalMemoryProperties + template <> + struct CppType { - using NativeType = VkExternalMemoryProperties; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : externalMemoryFeatures( externalMemoryFeatures_ ) - , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) - { - } + using Type = DeviceFaultCountsEXT; + }; - VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + struct DeviceFaultVendorInfoEXT + { + using NativeType = VkDeviceFaultVendorInfoEXT; - ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : ExternalMemoryProperties( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT( std::array const & description_ = {}, + uint64_t vendorFaultCode_ = {}, + uint64_t vendorFaultData_ = {} ) VULKAN_HPP_NOEXCEPT + : description{ description_ } + , vendorFaultCode{ vendorFaultCode_ } + , vendorFaultData{ vendorFaultData_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ExternalMemoryProperties & operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT + DeviceFaultVendorInfoEXT( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceFaultVendorInfoEXT( *reinterpret_cast( &rhs ) ) { - return *reinterpret_cast( this ); } - operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceFaultVendorInfoEXT( std::string const & description_, uint64_t vendorFaultCode_ = {}, uint64_t vendorFaultData_ = {} ) + : vendorFaultCode( vendorFaultCode_ ), vendorFaultData( vendorFaultData_ ) { - return *reinterpret_cast( this ); + VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( _WIN32 ) + strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); +# else + strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); +# endif } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes ); - } -#endif + DeviceFaultVendorInfoEXT & operator=( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExternalMemoryProperties const & ) const = default; -#else - bool operator==( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + DeviceFaultVendorInfoEXT & operator=( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && - ( compatibleHandleTypes == rhs.compatibleHandleTypes ); -# endif + *this = *reinterpret_cast( &rhs ); + return *this; } - bool operator!=( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setDescription( std::array description_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + description = description_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {}; - }; - using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; - - struct ExternalBufferProperties - { - using NativeType = VkExternalBufferProperties; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExternalBufferProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , externalMemoryProperties( externalMemoryProperties_ ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceFaultVendorInfoEXT & setDescription( std::string const & description_ ) VULKAN_HPP_NOEXCEPT { + VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); +# if defined( _WIN32 ) + strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); +# else + strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); +# endif + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : ExternalBufferProperties( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultCode( uint64_t vendorFaultCode_ ) VULKAN_HPP_NOEXCEPT { + vendorFaultCode = vendorFaultCode_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultData( uint64_t vendorFaultData_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + vendorFaultData = vendorFaultData_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceFaultVendorInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT + operator VkDeviceFaultVendorInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &, uint64_t const &, uint64_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, externalMemoryProperties ); + return std::tie( description, vendorFaultCode, vendorFaultData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExternalBufferProperties const & ) const = default; -#else - bool operator==( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties ); -# endif + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = vendorFaultCode <=> rhs.vendorFaultCode; cmp != 0 ) + return cmp; + if ( auto cmp = vendorFaultData <=> rhs.vendorFaultData; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; } +#endif - bool operator!=( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( strcmp( description, rhs.description ) == 0 ) && ( vendorFaultCode == rhs.vendorFaultCode ) && ( vendorFaultData == rhs.vendorFaultData ); + } + + bool operator!=( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; - }; - - template <> - struct CppType - { - using Type = ExternalBufferProperties; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + uint64_t vendorFaultCode = {}; + uint64_t vendorFaultData = {}; }; - using ExternalBufferPropertiesKHR = ExternalBufferProperties; - struct ExternalFenceProperties + struct DeviceFaultInfoEXT { - using NativeType = VkExternalFenceProperties; + using NativeType = VkDeviceFaultInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceFaultInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExternalFenceProperties( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) - , externalFenceFeatures( externalFenceFeatures_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT( std::array const & description_ = {}, + VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos_ = {}, + VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos_ = {}, + void * pVendorBinaryData_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , description{ description_ } + , pAddressInfos{ pAddressInfos_ } + , pVendorInfos{ pVendorInfos_ } + , pVendorBinaryData{ pVendorBinaryData_ } { } - VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : ExternalFenceProperties( *reinterpret_cast( &rhs ) ) + DeviceFaultInfoEXT( VkDeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceFaultInfoEXT( *reinterpret_cast( &rhs ) ) {} + + DeviceFaultInfoEXT & operator=( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# else + DeviceFaultInfoEXT( DeviceFaultInfoEXT const & ) = delete; + DeviceFaultInfoEXT & operator=( DeviceFaultInfoEXT const & ) = delete; + + DeviceFaultInfoEXT( DeviceFaultInfoEXT && rhs ) VULKAN_HPP_NOEXCEPT + : pNext{ rhs.pNext } + , pAddressInfos{ rhs.pAddressInfos } + , pVendorInfos{ rhs.pVendorInfos } + , pVendorBinaryData{ rhs.pVendorBinaryData } { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + memcpy( description, rhs.description, VK_MAX_DESCRIPTION_SIZE ); - ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + rhs.pNext = nullptr; + memset( rhs.description, 0, VK_MAX_DESCRIPTION_SIZE ); + rhs.pAddressInfos = nullptr; + rhs.pVendorInfos = nullptr; + rhs.pVendorBinaryData = nullptr; + } - ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceFaultInfoEXT & operator=( DeviceFaultInfoEXT && rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + free( pAddressInfos ); + free( pVendorInfos ); + free( pVendorBinaryData ); + + pNext = rhs.pNext; + memcpy( description, rhs.description, VK_MAX_DESCRIPTION_SIZE ); + pAddressInfos = rhs.pAddressInfos; + pVendorInfos = rhs.pVendorInfos; + pVendorBinaryData = rhs.pVendorBinaryData; + + rhs.pNext = nullptr; + memset( rhs.description, 0, VK_MAX_DESCRIPTION_SIZE ); + rhs.pAddressInfos = nullptr; + rhs.pVendorInfos = nullptr; + rhs.pVendorBinaryData = nullptr; + return *this; } - operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT + ~DeviceFaultInfoEXT() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + free( pAddressInfos ); + free( pVendorInfos ); + free( pVendorBinaryData ); } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT + operator VkDeviceFaultInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkDeviceFaultInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -29179,947 +32024,1101 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * const &, + VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * const &, + void * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalFenceFeatures ); + return std::tie( sType, pNext, description, pAddressInfos, pVendorInfos, pVendorBinaryData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExternalFenceProperties const & ) const = default; -#else - bool operator==( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && - ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && ( externalFenceFeatures == rhs.externalFenceFeatures ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = pAddressInfos <=> rhs.pAddressInfos; cmp != 0 ) + return cmp; + if ( auto cmp = pVendorInfos <=> rhs.pVendorInfos; cmp != 0 ) + return cmp; + if ( auto cmp = pVendorBinaryData <=> rhs.pVendorBinaryData; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; } +#endif - bool operator!=( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( description, rhs.description ) == 0 ) && ( pAddressInfos == rhs.pAddressInfos ) && + ( pVendorInfos == rhs.pVendorInfos ) && ( pVendorBinaryData == rhs.pVendorBinaryData ); + } + + bool operator!=( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceFaultInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT * pAddressInfos = {}; + VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos = {}; + void * pVendorBinaryData = {}; }; template <> - struct CppType + struct CppType { - using Type = ExternalFenceProperties; + using Type = DeviceFaultInfoEXT; }; - using ExternalFencePropertiesKHR = ExternalFenceProperties; -#if defined( VK_USE_PLATFORM_ANDROID_KHR ) - struct ExternalFormatANDROID + struct DeviceFaultVendorBinaryHeaderVersionOneEXT { - using NativeType = VkExternalFormatANDROID; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID; + using NativeType = VkDeviceFaultVendorBinaryHeaderVersionOneEXT; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , externalFormat( externalFormat_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT( + uint32_t headerSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ = VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT::eOne, + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + uint32_t driverVersion_ = {}, + std::array const & pipelineCacheUUID_ = {}, + uint32_t applicationNameOffset_ = {}, + uint32_t applicationVersion_ = {}, + uint32_t engineNameOffset_ = {}, + uint32_t engineVersion_ = {}, + uint32_t apiVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : headerSize{ headerSize_ } + , headerVersion{ headerVersion_ } + , vendorID{ vendorID_ } + , deviceID{ deviceID_ } + , driverVersion{ driverVersion_ } + , pipelineCacheUUID{ pipelineCacheUUID_ } + , applicationNameOffset{ applicationNameOffset_ } + , applicationVersion{ applicationVersion_ } + , engineNameOffset{ engineNameOffset_ } + , engineVersion{ engineVersion_ } + , apiVersion{ apiVersion_ } { } - VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - : ExternalFormatANDROID( *reinterpret_cast( &rhs ) ) + DeviceFaultVendorBinaryHeaderVersionOneEXT( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceFaultVendorBinaryHeaderVersionOneEXT( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ExternalFormatANDROID & operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + headerSize = headerSize_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & + setHeaderVersion( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ ) VULKAN_HPP_NOEXCEPT { - externalFormat = externalFormat_; + headerVersion = headerVersion_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkExternalFormatANDROID const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + vendorID = vendorID_; + return *this; } - operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + deviceID = deviceID_; + return *this; } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDriverVersion( uint32_t driverVersion_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, externalFormat ); + driverVersion = driverVersion_; + return *this; } -# endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExternalFormatANDROID const & ) const = default; -# else - bool operator==( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & + setPipelineCacheUUID( std::array pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat ); -# endif + pipelineCacheUUID = pipelineCacheUUID_; + return *this; } - bool operator!=( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationNameOffset( uint32_t applicationNameOffset_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + applicationNameOffset = applicationNameOffset_; + return *this; } -# endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID; - void * pNext = {}; - uint64_t externalFormat = {}; - }; - - template <> - struct CppType - { - using Type = ExternalFormatANDROID; - }; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - struct ExternalImageFormatProperties - { - using NativeType = VkExternalImageFormatProperties; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , externalMemoryProperties( externalMemoryProperties_ ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT { + applicationVersion = applicationVersion_; + return *this; } - VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : ExternalImageFormatProperties( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setEngineNameOffset( uint32_t engineNameOffset_ ) VULKAN_HPP_NOEXCEPT { + engineNameOffset = engineNameOffset_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT + { + engineVersion = engineVersion_; + return *this; + } - ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + apiVersion = apiVersion_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT + operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, externalMemoryProperties ); + return std::tie( headerSize, + headerVersion, + vendorID, + deviceID, + driverVersion, + pipelineCacheUUID, + applicationNameOffset, + applicationVersion, + engineNameOffset, + engineVersion, + apiVersion ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExternalImageFormatProperties const & ) const = default; + auto operator<=>( DeviceFaultVendorBinaryHeaderVersionOneEXT const & ) const = default; #else - bool operator==( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties ); + return ( headerSize == rhs.headerSize ) && ( headerVersion == rhs.headerVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && + ( driverVersion == rhs.driverVersion ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && + ( applicationNameOffset == rhs.applicationNameOffset ) && ( applicationVersion == rhs.applicationVersion ) && + ( engineNameOffset == rhs.engineNameOffset ) && ( engineVersion == rhs.engineVersion ) && ( apiVersion == rhs.apiVersion ); # endif } - bool operator!=( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; + uint32_t headerSize = {}; + VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT headerVersion = VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT::eOne; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + uint32_t driverVersion = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; + uint32_t applicationNameOffset = {}; + uint32_t applicationVersion = {}; + uint32_t engineNameOffset = {}; + uint32_t engineVersion = {}; + uint32_t apiVersion = {}; }; - template <> - struct CppType + struct DeviceGroupBindSparseInfo { - using Type = ExternalImageFormatProperties; - }; - using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; + using NativeType = VkDeviceGroupBindSparseInfo; - struct ImageFormatProperties - { - using NativeType = VkImageFormatProperties; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, - uint32_t maxMipLevels_ = {}, - uint32_t maxArrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT - : maxExtent( maxExtent_ ) - , maxMipLevels( maxMipLevels_ ) - , maxArrayLayers( maxArrayLayers_ ) - , sampleCounts( sampleCounts_ ) - , maxResourceSize( maxResourceSize_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , resourceDeviceIndex{ resourceDeviceIndex_ } + , memoryDeviceIndex{ memoryDeviceIndex_ } { } - VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageFormatProperties( *reinterpret_cast( &rhs ) ) + DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupBindSparseInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageFormatProperties & operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + resourceDeviceIndex = resourceDeviceIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryDeviceIndex = memoryDeviceIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceGroupBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( maxExtent, maxMipLevels, maxArrayLayers, sampleCounts, maxResourceSize ); + return std::tie( sType, pNext, resourceDeviceIndex, memoryDeviceIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageFormatProperties const & ) const = default; + auto operator<=>( DeviceGroupBindSparseInfo const & ) const = default; #else - bool operator==( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( maxExtent == rhs.maxExtent ) && ( maxMipLevels == rhs.maxMipLevels ) && ( maxArrayLayers == rhs.maxArrayLayers ) && - ( sampleCounts == rhs.sampleCounts ) && ( maxResourceSize == rhs.maxResourceSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) && + ( memoryDeviceIndex == rhs.memoryDeviceIndex ); # endif } - bool operator!=( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {}; - uint32_t maxMipLevels = {}; - uint32_t maxArrayLayers = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; - VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo; + const void * pNext = {}; + uint32_t resourceDeviceIndex = {}; + uint32_t memoryDeviceIndex = {}; }; - struct ExternalImageFormatPropertiesNV + template <> + struct CppType { - using NativeType = VkExternalImageFormatPropertiesNV; + using Type = DeviceGroupBindSparseInfo; + }; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - ExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : imageFormatProperties( imageFormatProperties_ ) - , externalMemoryFeatures( externalMemoryFeatures_ ) - , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) + using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; + + struct DeviceGroupCommandBufferBeginInfo + { + using NativeType = VkDeviceGroupCommandBufferBeginInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceMask{ deviceMask_ } { } - VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : ExternalImageFormatPropertiesNV( *reinterpret_cast( &rhs ) ) + DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupCommandBufferBeginInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ExternalImageFormatPropertiesNV & operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkExternalImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + deviceMask = deviceMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( imageFormatProperties, externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes ); + return std::tie( sType, pNext, deviceMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExternalImageFormatPropertiesNV const & ) const = default; + auto operator<=>( DeviceGroupCommandBufferBeginInfo const & ) const = default; #else - bool operator==( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( imageFormatProperties == rhs.imageFormatProperties ) && ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && - ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ); # endif } - bool operator!=( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo; + const void * pNext = {}; + uint32_t deviceMask = {}; }; - struct ExternalMemoryBufferCreateInfo + template <> + struct CppType { - using NativeType = VkExternalMemoryBufferCreateInfo; + using Type = DeviceGroupCommandBufferBeginInfo; + }; + + using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; + + struct DeviceGroupDeviceCreateInfo + { + using NativeType = VkDeviceGroupDeviceCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleTypes( handleTypes_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , physicalDeviceCount{ physicalDeviceCount_ } + , pPhysicalDevices{ pPhysicalDevices_ } { } - VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : ExternalMemoryBufferCreateInfo( *reinterpret_cast( &rhs ) ) + DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupDeviceCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupDeviceCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & physicalDevices_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), physicalDeviceCount( static_cast( physicalDevices_.size() ) ), pPhysicalDevices( physicalDevices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & - setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT { - handleTypes = handleTypes_; + physicalDeviceCount = physicalDeviceCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & + setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pPhysicalDevices = pPhysicalDevices_; + return *this; } - operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupDeviceCreateInfo & setPhysicalDevices( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & physicalDevices_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + physicalDeviceCount = static_cast( physicalDevices_.size() ); + pPhysicalDevices = physicalDevices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, handleTypes ); + return std::tie( sType, pNext, physicalDeviceCount, pPhysicalDevices ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExternalMemoryBufferCreateInfo const & ) const = default; + auto operator<=>( DeviceGroupDeviceCreateInfo const & ) const = default; #else - bool operator==( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && + ( pPhysicalDevices == rhs.pPhysicalDevices ); # endif } - bool operator!=( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; + const void * pNext = {}; + uint32_t physicalDeviceCount = {}; + const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices = {}; }; template <> - struct CppType + struct CppType { - using Type = ExternalMemoryBufferCreateInfo; + using Type = DeviceGroupDeviceCreateInfo; }; - using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; - struct ExternalMemoryImageCreateInfo + using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; + + struct DeviceGroupPresentCapabilitiesKHR { - using NativeType = VkExternalMemoryImageCreateInfo; + using NativeType = VkDeviceGroupPresentCapabilitiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleTypes( handleTypes_ ) - { - } - - VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentCapabilitiesKHR; - ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : ExternalMemoryImageCreateInfo( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( std::array const & presentMask_ = {}, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentMask{ presentMask_ } + , modes{ modes_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupPresentCapabilitiesKHR( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & - setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - handleTypes = handleTypes_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, handleTypes ); + return std::tie( sType, pNext, presentMask, modes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExternalMemoryImageCreateInfo const & ) const = default; + auto operator<=>( DeviceGroupPresentCapabilitiesKHR const & ) const = default; #else - bool operator==( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMask == rhs.presentMask ) && ( modes == rhs.modes ); # endif } - bool operator!=( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D presentMask = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; }; template <> - struct CppType + struct CppType { - using Type = ExternalMemoryImageCreateInfo; + using Type = DeviceGroupPresentCapabilitiesKHR; }; - using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; - struct ExternalMemoryImageCreateInfoNV + struct DeviceGroupPresentInfoKHR { - using NativeType = VkExternalMemoryImageCreateInfoNV; + using NativeType = VkDeviceGroupPresentInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleTypes( handleTypes_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( + uint32_t swapchainCount_ = {}, + const uint32_t * pDeviceMasks_ = {}, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pDeviceMasks{ pDeviceMasks_ } + , mode{ mode_ } { } - VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : ExternalMemoryImageCreateInfoNV( *reinterpret_cast( &rhs ) ) + DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupPresentInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ExternalMemoryImageCreateInfoNV & operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupPresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceMasks_, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( deviceMasks_.size() ) ), pDeviceMasks( deviceMasks_.data() ), mode( mode_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & - setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT { - handleTypes = handleTypes_; + swapchainCount = swapchainCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkExternalMemoryImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t * pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pDeviceMasks = pDeviceMasks_; + return *this; } - operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupPresentInfoKHR & setDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + swapchainCount = static_cast( deviceMasks_.size() ); + pDeviceMasks = deviceMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, handleTypes ); + return std::tie( sType, pNext, swapchainCount, pDeviceMasks, mode ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExternalMemoryImageCreateInfoNV const & ) const = default; + auto operator<=>( DeviceGroupPresentInfoKHR const & ) const = default; #else - bool operator==( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pDeviceMasks == rhs.pDeviceMasks ) && + ( mode == rhs.mode ); # endif } - bool operator!=( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const uint32_t * pDeviceMasks = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal; }; template <> - struct CppType + struct CppType { - using Type = ExternalMemoryImageCreateInfoNV; + using Type = DeviceGroupPresentInfoKHR; }; - struct ExternalSemaphoreProperties + struct DeviceGroupRenderPassBeginInfo { - using NativeType = VkExternalSemaphoreProperties; + using NativeType = VkDeviceGroupRenderPassBeginInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) - , externalSemaphoreFeatures( externalSemaphoreFeatures_ ) - { - } - - VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo; - ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : ExternalSemaphoreProperties( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {}, + uint32_t deviceRenderAreaCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceMask{ deviceMask_ } + , deviceRenderAreaCount{ deviceRenderAreaCount_ } + , pDeviceRenderAreas{ pDeviceRenderAreas_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT + DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupRenderPassBeginInfo( *reinterpret_cast( &rhs ) ) { - return *reinterpret_cast( this ); } - operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceRenderAreas_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , deviceMask( deviceMask_ ) + , deviceRenderAreaCount( static_cast( deviceRenderAreas_.size() ) ) + , pDeviceRenderAreas( deviceRenderAreas_.data() ) { - return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalSemaphoreFeatures ); - } -#endif + DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ExternalSemaphoreProperties const & ) const = default; -#else - bool operator==( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && - ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures ); -# endif + *this = *reinterpret_cast( &rhs ); + return *this; } - bool operator!=( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + pNext = pNext_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {}; - }; - - template <> - struct CppType - { - using Type = ExternalSemaphoreProperties; - }; - using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; - - struct FenceCreateInfo - { - using NativeType = VkFenceCreateInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR FenceCreateInfo( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT { + deviceMask = deviceMask_; + return *this; } - VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : FenceCreateInfo( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + deviceRenderAreaCount = deviceRenderAreaCount_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & + setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + pDeviceRenderAreas = pDeviceRenderAreas_; return *this; } - VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupRenderPassBeginInfo & + setDeviceRenderAreas( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & deviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + deviceRenderAreaCount = static_cast( deviceRenderAreas_.size() ); + pDeviceRenderAreas = deviceRenderAreas_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std:: + tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags ); + return std::tie( sType, pNext, deviceMask, deviceRenderAreaCount, pDeviceRenderAreas ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( FenceCreateInfo const & ) const = default; + auto operator<=>( DeviceGroupRenderPassBeginInfo const & ) const = default; #else - bool operator==( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ) && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) && + ( pDeviceRenderAreas == rhs.pDeviceRenderAreas ); # endif } - bool operator!=( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; + const void * pNext = {}; + uint32_t deviceMask = {}; + uint32_t deviceRenderAreaCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas = {}; }; template <> - struct CppType + struct CppType { - using Type = FenceCreateInfo; + using Type = DeviceGroupRenderPassBeginInfo; }; - struct FenceGetFdInfoKHR + using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; + + struct DeviceGroupSubmitInfo { - using NativeType = VkFenceGetFdInfoKHR; + using NativeType = VkDeviceGroupSubmitInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fence( fence_ ) - , handleType( handleType_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {}, + const uint32_t * pWaitSemaphoreDeviceIndices_ = {}, + uint32_t commandBufferCount_ = {}, + const uint32_t * pCommandBufferDeviceMasks_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const uint32_t * pSignalSemaphoreDeviceIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , waitSemaphoreCount{ waitSemaphoreCount_ } + , pWaitSemaphoreDeviceIndices{ pWaitSemaphoreDeviceIndices_ } + , commandBufferCount{ commandBufferCount_ } + , pCommandBufferDeviceMasks{ pCommandBufferDeviceMasks_ } + , signalSemaphoreCount{ signalSemaphoreCount_ } + , pSignalSemaphoreDeviceIndices{ pSignalSemaphoreDeviceIndices_ } { } - VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : FenceGetFdInfoKHR( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupSubmitInfo( *reinterpret_cast( &rhs ) ) + { + } - FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , waitSemaphoreCount( static_cast( waitSemaphoreDeviceIndices_.size() ) ) + , pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() ) + , commandBufferCount( static_cast( commandBufferDeviceMasks_.size() ) ) + , pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() ) + , signalSemaphoreCount( static_cast( signalSemaphoreDeviceIndices_.size() ) ) + , pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT { - fence = fence_; + waitSemaphoreCount = waitSemaphoreCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT { - handleType = handleType_; + pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & + setWaitSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + waitSemaphoreCount = static_cast( waitSemaphoreDeviceIndices_.size() ); + pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); - } + commandBufferCount = commandBufferCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & + setCommandBufferDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = static_cast( commandBufferDeviceMasks_.size() ); + pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DeviceGroupSubmitInfo & + setSignalSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphoreDeviceIndices_.size() ); + pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION @@ -30127,555 +33126,707 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + uint32_t const &, + const uint32_t * const &, + uint32_t const &, + const uint32_t * const &, + uint32_t const &, + const uint32_t * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, fence, handleType ); + return std::tie( sType, + pNext, + waitSemaphoreCount, + pWaitSemaphoreDeviceIndices, + commandBufferCount, + pCommandBufferDeviceMasks, + signalSemaphoreCount, + pSignalSemaphoreDeviceIndices ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( FenceGetFdInfoKHR const & ) const = default; + auto operator<=>( DeviceGroupSubmitInfo const & ) const = default; #else - bool operator==( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) && ( commandBufferCount == rhs.commandBufferCount ) && + ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && + ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices ); # endif } - bool operator!=( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Fence fence = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const uint32_t * pWaitSemaphoreDeviceIndices = {}; + uint32_t commandBufferCount = {}; + const uint32_t * pCommandBufferDeviceMasks = {}; + uint32_t signalSemaphoreCount = {}; + const uint32_t * pSignalSemaphoreDeviceIndices = {}; }; template <> - struct CppType + struct CppType { - using Type = FenceGetFdInfoKHR; + using Type = DeviceGroupSubmitInfo; }; -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - struct FenceGetWin32HandleInfoKHR + using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; + + struct DeviceGroupSwapchainCreateInfoKHR { - using NativeType = VkFenceGetWin32HandleInfoKHR; + using NativeType = VkDeviceGroupSwapchainCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( - VULKAN_HPP_NAMESPACE::Fence fence_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fence( fence_ ) - , handleType( handleType_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , modes{ modes_ } { } - VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : FenceGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceGroupSwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT - { - fence = fence_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT { - handleType = handleType_; + modes = modes_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkFenceGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, fence, handleType ); + return std::tie( sType, pNext, modes ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( FenceGetWin32HandleInfoKHR const & ) const = default; -# else - bool operator==( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const & ) const = default; +#else + bool operator==( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( handleType == rhs.handleType ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( modes == rhs.modes ); +# endif } - bool operator!=( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Fence fence = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; }; template <> - struct CppType + struct CppType { - using Type = FenceGetWin32HandleInfoKHR; + using Type = DeviceGroupSwapchainCreateInfoKHR; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct FilterCubicImageViewImageFormatPropertiesEXT + struct ImageCreateInfo { - using NativeType = VkFilterCubicImageViewImageFormatPropertiesEXT; + using NativeType = VkImageCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + uint32_t mipLevels_ = {}, + uint32_t arrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , imageType{ imageType_ } + , format{ format_ } + , extent{ extent_ } + , mipLevels{ mipLevels_ } + , arrayLayers{ arrayLayers_ } + , samples{ samples_ } + , tiling{ tiling_ } + , usage{ usage_ } + , sharingMode{ sharingMode_ } + , queueFamilyIndexCount{ queueFamilyIndexCount_ } + , pQueueFamilyIndices{ pQueueFamilyIndices_ } + , initialLayout{ initialLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCreateInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ImageType imageType_, + VULKAN_HPP_NAMESPACE::Format format_, + VULKAN_HPP_NAMESPACE::Extent3D extent_, + uint32_t mipLevels_, + uint32_t arrayLayers_, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , filterCubic( filterCubic_ ) - , filterCubicMinmax( filterCubicMinmax_ ) + , flags( flags_ ) + , imageType( imageType_ ) + , format( format_ ) + , extent( extent_ ) + , mipLevels( mipLevels_ ) + , arrayLayers( arrayLayers_ ) + , samples( samples_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + , initialLayout( initialLayout_ ) { } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : FilterCubicImageViewImageFormatPropertiesEXT( *reinterpret_cast( &rhs ) ) + ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } - FilterCubicImageViewImageFormatPropertiesEXT & operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + flags = flags_; return *this; } - operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + imageType = imageType_; + return *this; } - operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + format = format_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, filterCubic, filterCubicMinmax ); + extent = extent_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const & ) const = default; -#else - bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCubic == rhs.filterCubic ) && ( filterCubicMinmax == rhs.filterCubicMinmax ); -# endif + mipLevels = mipLevels_; + return *this; } - bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + arrayLayers = arrayLayers_; + return *this; } -#endif - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {}; - }; + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } - template <> - struct CppType - { - using Type = FilterCubicImageViewImageFormatPropertiesEXT; - }; + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } - struct FormatProperties - { - using NativeType = VkFormatProperties; + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT - : linearTilingFeatures( linearTilingFeatures_ ) - , optimalTilingFeatures( optimalTilingFeatures_ ) - , bufferFeatures( bufferFeatures_ ) + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT { + sharingMode = sharingMode_; + return *this; } - VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } - FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } - FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + initialLayout = initialLayout_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT + operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT + operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( linearTilingFeatures, optimalTilingFeatures, bufferFeatures ); + return std::tie( sType, + pNext, + flags, + imageType, + format, + extent, + mipLevels, + arrayLayers, + samples, + tiling, + usage, + sharingMode, + queueFamilyIndexCount, + pQueueFamilyIndices, + initialLayout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( FormatProperties const & ) const = default; + auto operator<=>( ImageCreateInfo const & ) const = default; #else - bool operator==( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( linearTilingFeatures == rhs.linearTilingFeatures ) && ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && - ( bufferFeatures == rhs.bufferFeatures ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( imageType == rhs.imageType ) && ( format == rhs.format ) && + ( extent == rhs.extent ) && ( mipLevels == rhs.mipLevels ) && ( arrayLayers == rhs.arrayLayers ) && ( samples == rhs.samples ) && + ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) && + ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && + ( initialLayout == rhs.initialLayout ); # endif } - bool operator!=( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {}; - VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {}; - VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + uint32_t mipLevels = {}; + uint32_t arrayLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; - struct FormatProperties2 + template <> + struct CppType { - using NativeType = VkFormatProperties2; + using Type = ImageCreateInfo; + }; + + struct DeviceImageMemoryRequirements + { + using NativeType = VkDeviceImageMemoryRequirements; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageMemoryRequirements; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , formatProperties( formatProperties_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {}, + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pCreateInfo{ pCreateInfo_ } + , planeAspect{ planeAspect_ } { } - VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; - FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties2( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + DeviceImageMemoryRequirements( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceImageMemoryRequirements( *reinterpret_cast( &rhs ) ) + { + } - FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceImageMemoryRequirements & operator=( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceImageMemoryRequirements & operator=( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pCreateInfo = pCreateInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + { + planeAspect = planeAspect_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDeviceImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, formatProperties ); + return std::tie( sType, pNext, pCreateInfo, planeAspect ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( FormatProperties2 const & ) const = default; + auto operator<=>( DeviceImageMemoryRequirements const & ) const = default; #else - bool operator==( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatProperties == rhs.formatProperties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ) && ( planeAspect == rhs.planeAspect ); # endif } - bool operator!=( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageMemoryRequirements; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; }; template <> - struct CppType + struct CppType { - using Type = FormatProperties2; + using Type = DeviceImageMemoryRequirements; }; - using FormatProperties2KHR = FormatProperties2; - struct FormatProperties3 + using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements; + + struct ImageSubresource2 { - using NativeType = VkFormatProperties3; + using NativeType = VkImageSubresource2; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties3; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSubresource2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR FormatProperties3( VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , linearTilingFeatures( linearTilingFeatures_ ) - , optimalTilingFeatures( optimalTilingFeatures_ ) - , bufferFeatures( bufferFeatures_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresource2( VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageSubresource{ imageSubresource_ } { } - VULKAN_HPP_CONSTEXPR FormatProperties3( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageSubresource2( ImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - FormatProperties3( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties3( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + ImageSubresource2( VkImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageSubresource2( *reinterpret_cast( &rhs ) ) {} - FormatProperties3 & operator=( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageSubresource2 & operator=( ImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - FormatProperties3 & operator=( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT + ImageSubresource2 & operator=( VkImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkFormatProperties3 const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSubresource2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkFormatProperties3 &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageSubresource2 & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + imageSubresource = imageSubresource_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageSubresource2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresource2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, linearTilingFeatures, optimalTilingFeatures, bufferFeatures ); + return std::tie( sType, pNext, imageSubresource ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( FormatProperties3 const & ) const = default; + auto operator<=>( ImageSubresource2 const & ) const = default; #else - bool operator==( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageSubresource2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( linearTilingFeatures == rhs.linearTilingFeatures ) && - ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && ( bufferFeatures == rhs.bufferFeatures ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageSubresource == rhs.imageSubresource ); # endif } - bool operator!=( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageSubresource2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties3; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures = {}; - VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures = {}; - VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSubresource2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource = {}; }; template <> - struct CppType + struct CppType { - using Type = FormatProperties3; + using Type = ImageSubresource2; }; - using FormatProperties3KHR = FormatProperties3; - struct FragmentShadingRateAttachmentInfoKHR + using ImageSubresource2EXT = ImageSubresource2; + using ImageSubresource2KHR = ImageSubresource2; + + struct DeviceImageSubresourceInfo { - using NativeType = VkFragmentShadingRateAttachmentInfoKHR; + using NativeType = VkDeviceImageSubresourceInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFragmentShadingRateAttachmentInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageSubresourceInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pFragmentShadingRateAttachment( pFragmentShadingRateAttachment_ ) - , shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {}, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pCreateInfo{ pCreateInfo_ } + , pSubresource{ pSubresource_ } { } - VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfo( DeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - FragmentShadingRateAttachmentInfoKHR( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : FragmentShadingRateAttachmentInfoKHR( *reinterpret_cast( &rhs ) ) + DeviceImageSubresourceInfo( VkDeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceImageSubresourceInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - FragmentShadingRateAttachmentInfoKHR & operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceImageSubresourceInfo & operator=( DeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - FragmentShadingRateAttachmentInfoKHR & operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceImageSubresourceInfo & operator=( VkDeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & - setPFragmentShadingRateAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT { - pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_; + pCreateInfo = pCreateInfo_; return *this; } - VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & - setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPSubresource( const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource_ ) VULKAN_HPP_NOEXCEPT { - shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_; + pSubresource = pSubresource_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceImageSubresourceInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkDeviceImageSubresourceInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -30684,942 +33835,655 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + const VULKAN_HPP_NAMESPACE::ImageCreateInfo * const &, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pFragmentShadingRateAttachment, shadingRateAttachmentTexelSize ); + return std::tie( sType, pNext, pCreateInfo, pSubresource ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( FragmentShadingRateAttachmentInfoKHR const & ) const = default; + auto operator<=>( DeviceImageSubresourceInfo const & ) const = default; #else - bool operator==( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceImageSubresourceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment ) && - ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ) && ( pSubresource == rhs.pSubresource ); # endif } - bool operator!=( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceImageSubresourceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFragmentShadingRateAttachmentInfoKHR; - const void * pNext = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment = {}; - VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageSubresourceInfo; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {}; + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource = {}; }; template <> - struct CppType + struct CppType { - using Type = FragmentShadingRateAttachmentInfoKHR; + using Type = DeviceImageSubresourceInfo; }; - struct FramebufferAttachmentImageInfo + using DeviceImageSubresourceInfoKHR = DeviceImageSubresourceInfo; + + struct DeviceMemoryOpaqueCaptureAddressInfo { - using NativeType = VkFramebufferAttachmentImageInfo; + using NativeType = VkDeviceMemoryOpaqueCaptureAddressInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, - uint32_t width_ = {}, - uint32_t height_ = {}, - uint32_t layerCount_ = {}, - uint32_t viewFormatCount_ = {}, - const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , usage( usage_ ) - , width( width_ ) - , height( height_ ) - , layerCount( layerCount_ ) - , viewFormatCount( viewFormatCount_ ) - , pViewFormats( pViewFormats_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } { } - VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : FramebufferAttachmentImageInfo( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, - uint32_t width_, - uint32_t height_, - uint32_t layerCount_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , usage( usage_ ) - , width( width_ ) - , height( height_ ) - , layerCount( layerCount_ ) - , viewFormatCount( static_cast( viewFormats_.size() ) ) - , pViewFormats( viewFormats_.data() ) + DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryOpaqueCaptureAddressInfo( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - FramebufferAttachmentImageInfo & operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceMemoryOpaqueCaptureAddressInfo & operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceMemoryOpaqueCaptureAddressInfo & operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT - { - usage = usage_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT - { - width = width_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT - { - height = height_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT - { - layerCount = layerCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT - { - viewFormatCount = viewFormatCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT - { - pViewFormats = pViewFormats_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - FramebufferAttachmentImageInfo & - setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT { - viewFormatCount = static_cast( viewFormats_.size() ); - pViewFormats = viewFormats_.data(); + memory = memory_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT + operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, usage, width, height, layerCount, viewFormatCount, pViewFormats ); + return std::tie( sType, pNext, memory ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( FramebufferAttachmentImageInfo const & ) const = default; + auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const & ) const = default; #else - bool operator==( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && ( width == rhs.width ) && - ( height == rhs.height ) && ( layerCount == rhs.layerCount ) && ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); # endif } - bool operator!=( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; - uint32_t width = {}; - uint32_t height = {}; - uint32_t layerCount = {}; - uint32_t viewFormatCount = {}; - const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; }; template <> - struct CppType + struct CppType { - using Type = FramebufferAttachmentImageInfo; + using Type = DeviceMemoryOpaqueCaptureAddressInfo; }; - using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo; - struct FramebufferAttachmentsCreateInfo + using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo; + + struct DeviceMemoryOverallocationCreateInfoAMD { - using NativeType = VkFramebufferAttachmentsCreateInfo; + using NativeType = VkDeviceMemoryOverallocationCreateInfoAMD; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( uint32_t attachmentImageInfoCount_ = {}, - const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , attachmentImageInfoCount( attachmentImageInfoCount_ ) - , pAttachmentImageInfos( pAttachmentImageInfos_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , overallocationBehavior{ overallocationBehavior_ } { } - VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : FramebufferAttachmentsCreateInfo( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - FramebufferAttachmentsCreateInfo( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentImageInfos_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , attachmentImageInfoCount( static_cast( attachmentImageInfos_.size() ) ) - , pAttachmentImageInfos( attachmentImageInfos_.data() ) + DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryOverallocationCreateInfoAMD( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - FramebufferAttachmentsCreateInfo & operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceMemoryOverallocationCreateInfoAMD & operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT - { - attachmentImageInfoCount = attachmentImageInfoCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & - setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT - { - pAttachmentImageInfos = pAttachmentImageInfos_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - FramebufferAttachmentsCreateInfo & setAttachmentImageInfos( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentImageInfos_ ) - VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & + setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT { - attachmentImageInfoCount = static_cast( attachmentImageInfos_.size() ); - pAttachmentImageInfos = attachmentImageInfos_.data(); + overallocationBehavior = overallocationBehavior_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceMemoryOverallocationCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, attachmentImageInfoCount, pAttachmentImageInfos ); + return std::tie( sType, pNext, overallocationBehavior ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( FramebufferAttachmentsCreateInfo const & ) const = default; + auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const & ) const = default; #else - bool operator==( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) && - ( pAttachmentImageInfos == rhs.pAttachmentImageInfos ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( overallocationBehavior == rhs.overallocationBehavior ); # endif } - bool operator!=( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo; - const void * pNext = {}; - uint32_t attachmentImageInfoCount = {}; - const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault; }; template <> - struct CppType + struct CppType { - using Type = FramebufferAttachmentsCreateInfo; + using Type = DeviceMemoryOverallocationCreateInfoAMD; }; - using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo; - struct FramebufferCreateInfo +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + union DeviceOrHostAddressConstAMDX { - using NativeType = VkFramebufferCreateInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, - uint32_t width_ = {}, - uint32_t height_ = {}, - uint32_t layers_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , renderPass( renderPass_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - , width( width_ ) - , height( height_ ) - , layers( layers_ ) - { - } - - VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : FramebufferCreateInfo( *reinterpret_cast( &rhs ) ) - { - } + using NativeType = VkDeviceOrHostAddressConstAMDX; +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_, - VULKAN_HPP_NAMESPACE::RenderPass renderPass_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, - uint32_t width_ = {}, - uint32_t height_ = {}, - uint32_t layers_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , renderPass( renderPass_ ) - , attachmentCount( static_cast( attachments_.size() ) ) - , pAttachments( attachments_.data() ) - , width( width_ ) - , height( height_ ) - , layers( layers_ ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {} - FramebufferCreateInfo & operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {} +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + deviceAddress = deviceAddress_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + hostAddress = hostAddress_; return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + operator VkDeviceOrHostAddressConstAMDX const &() const { - flags = flags_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + operator VkDeviceOrHostAddressConstAMDX &() { - renderPass = renderPass_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - attachmentCount = attachmentCount_; - return *this; - } +# ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; + const void * hostAddress; +# else + VkDeviceAddress deviceAddress; + const void * hostAddress; +# endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT + struct DevicePipelineBinaryInternalCacheControlKHR + { + using NativeType = VkDevicePipelineBinaryInternalCacheControlKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePipelineBinaryInternalCacheControlKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DevicePipelineBinaryInternalCacheControlKHR( VULKAN_HPP_NAMESPACE::Bool32 disableInternalCache_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , disableInternalCache{ disableInternalCache_ } { - pAttachments = pAttachments_; - return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - FramebufferCreateInfo & - setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DevicePipelineBinaryInternalCacheControlKHR( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DevicePipelineBinaryInternalCacheControlKHR( VkDevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DevicePipelineBinaryInternalCacheControlKHR( *reinterpret_cast( &rhs ) ) { - attachmentCount = static_cast( attachments_.size() ); - pAttachments = attachments_.data(); - return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + DevicePipelineBinaryInternalCacheControlKHR & operator=( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DevicePipelineBinaryInternalCacheControlKHR & operator=( VkDevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - width = width_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DevicePipelineBinaryInternalCacheControlKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - height = height_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DevicePipelineBinaryInternalCacheControlKHR & + setDisableInternalCache( VULKAN_HPP_NAMESPACE::Bool32 disableInternalCache_ ) VULKAN_HPP_NOEXCEPT { - layers = layers_; + disableInternalCache = disableInternalCache_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDevicePipelineBinaryInternalCacheControlKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDevicePipelineBinaryInternalCacheControlKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, renderPass, attachmentCount, pAttachments, width, height, layers ); + return std::tie( sType, pNext, disableInternalCache ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( FramebufferCreateInfo const & ) const = default; + auto operator<=>( DevicePipelineBinaryInternalCacheControlKHR const & ) const = default; #else - bool operator==( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( renderPass == rhs.renderPass ) && - ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && ( width == rhs.width ) && ( height == rhs.height ) && - ( layers == rhs.layers ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( disableInternalCache == rhs.disableInternalCache ); # endif } - bool operator!=( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {}; - uint32_t width = {}; - uint32_t height = {}; - uint32_t layers = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePipelineBinaryInternalCacheControlKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 disableInternalCache = {}; }; template <> - struct CppType + struct CppType { - using Type = FramebufferCreateInfo; + using Type = DevicePipelineBinaryInternalCacheControlKHR; }; - struct FramebufferMixedSamplesCombinationNV + struct DevicePrivateDataCreateInfo { - using NativeType = VkFramebufferMixedSamplesCombinationNV; + using NativeType = VkDevicePrivateDataCreateInfo; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferMixedSamplesCombinationNV; + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( - VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , coverageReductionMode( coverageReductionMode_ ) - , rasterizationSamples( rasterizationSamples_ ) - , depthStencilSamples( depthStencilSamples_ ) - , colorSamples( colorSamples_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( uint32_t privateDataSlotRequestCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , privateDataSlotRequestCount{ privateDataSlotRequestCount_ } { } - VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT - : FramebufferMixedSamplesCombinationNV( *reinterpret_cast( &rhs ) ) + DevicePrivateDataCreateInfo( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DevicePrivateDataCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - FramebufferMixedSamplesCombinationNV & operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DevicePrivateDataCreateInfo & operator=( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - FramebufferMixedSamplesCombinationNV & operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + DevicePrivateDataCreateInfo & operator=( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkFramebufferMixedSamplesCombinationNV const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + privateDataSlotRequestCount = privateDataSlotRequestCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDevicePrivateDataCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDevicePrivateDataCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, coverageReductionMode, rasterizationSamples, depthStencilSamples, colorSamples ); + return std::tie( sType, pNext, privateDataSlotRequestCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( FramebufferMixedSamplesCombinationNV const & ) const = default; + auto operator<=>( DevicePrivateDataCreateInfo const & ) const = default; #else - bool operator==( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode ) && - ( rasterizationSamples == rhs.rasterizationSamples ) && ( depthStencilSamples == rhs.depthStencilSamples ) && ( colorSamples == rhs.colorSamples ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount ); # endif } - bool operator!=( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePrivateDataCreateInfo; + const void * pNext = {}; + uint32_t privateDataSlotRequestCount = {}; }; template <> - struct CppType + struct CppType { - using Type = FramebufferMixedSamplesCombinationNV; + using Type = DevicePrivateDataCreateInfo; }; - struct IndirectCommandsStreamNV + using DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo; + + struct DeviceQueueGlobalPriorityCreateInfo { - using NativeType = VkIndirectCommandsStreamNV; + using NativeType = VkDeviceQueueGlobalPriorityCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) - , offset( offset_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueGlobalPriorityCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DeviceQueueGlobalPriorityCreateInfo( VULKAN_HPP_NAMESPACE::QueueGlobalPriority globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , globalPriority{ globalPriority_ } { } - VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfo( DeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectCommandsStreamNV( *reinterpret_cast( &rhs ) ) + DeviceQueueGlobalPriorityCreateInfo( VkDeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueGlobalPriorityCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceQueueGlobalPriorityCreateInfo & operator=( DeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceQueueGlobalPriorityCreateInfo & operator=( VkDeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - buffer = buffer_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfo & + setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriority globalPriority_ ) VULKAN_HPP_NOEXCEPT { - offset = offset_; + globalPriority = globalPriority_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkIndirectCommandsStreamNV const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceQueueGlobalPriorityCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT + operator VkDeviceQueueGlobalPriorityCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( buffer, offset ); + return std::tie( sType, pNext, globalPriority ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectCommandsStreamNV const & ) const = default; + auto operator<=>( DeviceQueueGlobalPriorityCreateInfo const & ) const = default; #else - bool operator==( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceQueueGlobalPriorityCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( buffer == rhs.buffer ) && ( offset == rhs.offset ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriority == rhs.globalPriority ); # endif } - bool operator!=( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceQueueGlobalPriorityCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueueGlobalPriority globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow; }; - struct GeneratedCommandsInfoNV + template <> + struct CppType { - using NativeType = VkGeneratedCommandsInfoNV; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, - uint32_t streamCount_ = {}, - const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ = {}, - uint32_t sequencesCount_ = {}, - VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, - VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, - VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , pipeline( pipeline_ ) - , indirectCommandsLayout( indirectCommandsLayout_ ) - , streamCount( streamCount_ ) - , pStreams( pStreams_ ) - , sequencesCount( sequencesCount_ ) - , preprocessBuffer( preprocessBuffer_ ) - , preprocessOffset( preprocessOffset_ ) - , preprocessSize( preprocessSize_ ) - , sequencesCountBuffer( sequencesCountBuffer_ ) - , sequencesCountOffset( sequencesCountOffset_ ) - , sequencesIndexBuffer( sequencesIndexBuffer_ ) - , sequencesIndexOffset( sequencesIndexOffset_ ) - { - } - - VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : GeneratedCommandsInfoNV( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, - VULKAN_HPP_NAMESPACE::Pipeline pipeline_, - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streams_, - uint32_t sequencesCount_ = {}, - VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, - VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, - VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , pipeline( pipeline_ ) - , indirectCommandsLayout( indirectCommandsLayout_ ) - , streamCount( static_cast( streams_.size() ) ) - , pStreams( streams_.data() ) - , sequencesCount( sequencesCount_ ) - , preprocessBuffer( preprocessBuffer_ ) - , preprocessOffset( preprocessOffset_ ) - , preprocessSize( preprocessSize_ ) - , sequencesCountBuffer( sequencesCountBuffer_ ) - , sequencesCountOffset( sequencesCountOffset_ ) - , sequencesIndexBuffer( sequencesIndexBuffer_ ) - , sequencesIndexOffset( sequencesIndexOffset_ ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT - { - pipelineBindPoint = pipelineBindPoint_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT - { - pipeline = pipeline_; - return *this; - } + using Type = DeviceQueueGlobalPriorityCreateInfo; + }; - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & - setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT - { - indirectCommandsLayout = indirectCommandsLayout_; - return *this; - } + using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfo; + using DeviceQueueGlobalPriorityCreateInfoKHR = DeviceQueueGlobalPriorityCreateInfo; - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT - { - streamCount = streamCount_; - return *this; - } + struct DeviceQueueInfo2 + { + using NativeType = VkDeviceQueueInfo2; - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ ) VULKAN_HPP_NOEXCEPT - { - pStreams = pStreams_; - return *this; - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - GeneratedCommandsInfoNV & - setStreams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streams_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + uint32_t queueIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , queueFamilyIndex{ queueFamilyIndex_ } + , queueIndex{ queueIndex_ } { - streamCount = static_cast( streams_.size() ); - pStreams = streams_.data(); - return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT - { - sequencesCount = sequencesCount_; - return *this; - } + VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT - { - preprocessBuffer = preprocessBuffer_; - return *this; - } + DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceQueueInfo2( *reinterpret_cast( &rhs ) ) {} - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT - { - preprocessOffset = preprocessOffset_; - return *this; - } + DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT + DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - preprocessSize = preprocessSize_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - sequencesCountBuffer = sequencesCountBuffer_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - sequencesCountOffset = sequencesCountOffset_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT { - sequencesIndexBuffer = sequencesIndexBuffer_; + queueFamilyIndex = queueFamilyIndex_; return *this; } - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT { - sequencesIndexOffset = sequencesIndexOffset_; + queueIndex = queueIndex_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkGeneratedCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -31628,1335 +34492,1017 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + uint32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - pipelineBindPoint, - pipeline, - indirectCommandsLayout, - streamCount, - pStreams, - sequencesCount, - preprocessBuffer, - preprocessOffset, - preprocessSize, - sequencesCountBuffer, - sequencesCountOffset, - sequencesIndexBuffer, - sequencesIndexOffset ); + return std::tie( sType, pNext, flags, queueFamilyIndex, queueIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( GeneratedCommandsInfoNV const & ) const = default; + auto operator<=>( DeviceQueueInfo2 const & ) const = default; #else - bool operator==( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ) && - ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( streamCount == rhs.streamCount ) && ( pStreams == rhs.pStreams ) && - ( sequencesCount == rhs.sequencesCount ) && ( preprocessBuffer == rhs.preprocessBuffer ) && ( preprocessOffset == rhs.preprocessOffset ) && - ( preprocessSize == rhs.preprocessSize ) && ( sequencesCountBuffer == rhs.sequencesCountBuffer ) && - ( sequencesCountOffset == rhs.sequencesCountOffset ) && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) && - ( sequencesIndexOffset == rhs.sequencesIndexOffset ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && + ( queueIndex == rhs.queueIndex ); # endif } - bool operator!=( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; - uint32_t streamCount = {}; - const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams = {}; - uint32_t sequencesCount = {}; - VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {}; - VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {}; - VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + uint32_t queueIndex = {}; }; template <> - struct CppType + struct CppType { - using Type = GeneratedCommandsInfoNV; + using Type = DeviceQueueInfo2; }; - struct GeneratedCommandsMemoryRequirementsInfoNV + struct DeviceQueueShaderCoreControlCreateInfoARM { - using NativeType = VkGeneratedCommandsMemoryRequirementsInfoNV; + using NativeType = VkDeviceQueueShaderCoreControlCreateInfoARM; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueShaderCoreControlCreateInfoARM; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, - uint32_t maxSequencesCount_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , pipeline( pipeline_ ) - , indirectCommandsLayout( indirectCommandsLayout_ ) - , maxSequencesCount( maxSequencesCount_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceQueueShaderCoreControlCreateInfoARM( uint32_t shaderCoreCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderCoreCount{ shaderCoreCount_ } { } - VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceQueueShaderCoreControlCreateInfoARM( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : GeneratedCommandsMemoryRequirementsInfoNV( *reinterpret_cast( &rhs ) ) + DeviceQueueShaderCoreControlCreateInfoARM( VkDeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueShaderCoreControlCreateInfoARM( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - GeneratedCommandsMemoryRequirementsInfoNV & operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceQueueShaderCoreControlCreateInfoARM & operator=( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - GeneratedCommandsMemoryRequirementsInfoNV & operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceQueueShaderCoreControlCreateInfoARM & operator=( VkDeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceQueueShaderCoreControlCreateInfoARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & - setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT - { - pipelineBindPoint = pipelineBindPoint_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT - { - pipeline = pipeline_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & - setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT - { - indirectCommandsLayout = indirectCommandsLayout_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceQueueShaderCoreControlCreateInfoARM & setShaderCoreCount( uint32_t shaderCoreCount_ ) VULKAN_HPP_NOEXCEPT { - maxSequencesCount = maxSequencesCount_; + shaderCoreCount = shaderCoreCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkGeneratedCommandsMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceQueueShaderCoreControlCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkDeviceQueueShaderCoreControlCreateInfoARM &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pipelineBindPoint, pipeline, indirectCommandsLayout, maxSequencesCount ); + return std::tie( sType, pNext, shaderCoreCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const & ) const = default; + auto operator<=>( DeviceQueueShaderCoreControlCreateInfoARM const & ) const = default; #else - bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ) && - ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( maxSequencesCount == rhs.maxSequencesCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreCount == rhs.shaderCoreCount ); # endif } - bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; - uint32_t maxSequencesCount = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueShaderCoreControlCreateInfoARM; + void * pNext = {}; + uint32_t shaderCoreCount = {}; }; template <> - struct CppType + struct CppType { - using Type = GeneratedCommandsMemoryRequirementsInfoNV; + using Type = DeviceQueueShaderCoreControlCreateInfoARM; }; - struct VertexInputBindingDescription + typedef PFN_vkVoidFunction( VKAPI_PTR * PFN_GetInstanceProcAddrLUNARG )( VULKAN_HPP_NAMESPACE::Instance instance, const char * pName ); + + struct DirectDriverLoadingInfoLUNARG { - using NativeType = VkVertexInputBindingDescription; + using NativeType = VkDirectDriverLoadingInfoLUNARG; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - VertexInputBindingDescription( uint32_t binding_ = {}, - uint32_t stride_ = {}, - VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT - : binding( binding_ ) - , stride( stride_ ) - , inputRate( inputRate_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectDriverLoadingInfoLUNARG; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DirectDriverLoadingInfoLUNARG( VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags_ = {}, + VULKAN_HPP_NAMESPACE::PFN_GetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pfnGetInstanceProcAddr{ pfnGetInstanceProcAddr_ } { } - VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DirectDriverLoadingInfoLUNARG( DirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT - : VertexInputBindingDescription( *reinterpret_cast( &rhs ) ) + DirectDriverLoadingInfoLUNARG( VkDirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT + : DirectDriverLoadingInfoLUNARG( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VertexInputBindingDescription & operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DirectDriverLoadingInfoLUNARG & operator=( DirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + DirectDriverLoadingInfoLUNARG & operator=( VkDirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - binding = binding_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & setFlags( VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags_ ) VULKAN_HPP_NOEXCEPT { - stride = stride_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & + setPfnGetInstanceProcAddr( VULKAN_HPP_NAMESPACE::PFN_GetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ ) VULKAN_HPP_NOEXCEPT { - inputRate = inputRate_; + pfnGetInstanceProcAddr = pfnGetInstanceProcAddr_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT + operator VkDirectDriverLoadingInfoLUNARG const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT + operator VkDirectDriverLoadingInfoLUNARG &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( binding, stride, inputRate ); + return std::tie( sType, pNext, flags, pfnGetInstanceProcAddr ); } #endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VertexInputBindingDescription const & ) const = default; -#else - bool operator==( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DirectDriverLoadingInfoLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +#if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ); -# endif +#else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnGetInstanceProcAddr == rhs.pfnGetInstanceProcAddr ); +#endif } - bool operator!=( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DirectDriverLoadingInfoLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - uint32_t binding = {}; - uint32_t stride = {}; - VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectDriverLoadingInfoLUNARG; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags = {}; + VULKAN_HPP_NAMESPACE::PFN_GetInstanceProcAddrLUNARG pfnGetInstanceProcAddr = {}; }; - struct VertexInputAttributeDescription + template <> + struct CppType { - using NativeType = VkVertexInputAttributeDescription; + using Type = DirectDriverLoadingInfoLUNARG; + }; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( uint32_t location_ = {}, - uint32_t binding_ = {}, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT - : location( location_ ) - , binding( binding_ ) - , format( format_ ) - , offset( offset_ ) + struct DirectDriverLoadingListLUNARG + { + using NativeType = VkDirectDriverLoadingListLUNARG; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectDriverLoadingListLUNARG; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DirectDriverLoadingListLUNARG( + VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode_ = VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG::eExclusive, + uint32_t driverCount_ = {}, + const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * pDrivers_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mode{ mode_ } + , driverCount{ driverCount_ } + , pDrivers{ pDrivers_ } { } - VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DirectDriverLoadingListLUNARG( DirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT - : VertexInputAttributeDescription( *reinterpret_cast( &rhs ) ) + DirectDriverLoadingListLUNARG( VkDirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT + : DirectDriverLoadingListLUNARG( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VertexInputAttributeDescription & operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DirectDriverLoadingListLUNARG( VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drivers_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), mode( mode_ ), driverCount( static_cast( drivers_.size() ) ), pDrivers( drivers_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + DirectDriverLoadingListLUNARG & operator=( DirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DirectDriverLoadingListLUNARG & operator=( VkDirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - location = location_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setMode( VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode_ ) VULKAN_HPP_NOEXCEPT { - binding = binding_; + mode = mode_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setDriverCount( uint32_t driverCount_ ) VULKAN_HPP_NOEXCEPT { - format = format_; + driverCount = driverCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & + setPDrivers( const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * pDrivers_ ) VULKAN_HPP_NOEXCEPT { - offset = offset_; + pDrivers = pDrivers_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + DirectDriverLoadingListLUNARG & setDrivers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drivers_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + driverCount = static_cast( drivers_.size() ); + pDrivers = drivers_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT + operator VkDirectDriverLoadingListLUNARG const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkDirectDriverLoadingListLUNARG &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( location, binding, format, offset ); + return std::tie( sType, pNext, mode, driverCount, pDrivers ); } #endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VertexInputAttributeDescription const & ) const = default; -#else - bool operator==( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DirectDriverLoadingListLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +#if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) && ( offset == rhs.offset ); -# endif +#else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( driverCount == rhs.driverCount ) && ( pDrivers == rhs.pDrivers ); +#endif } - bool operator!=( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DirectDriverLoadingListLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - uint32_t location = {}; - uint32_t binding = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - uint32_t offset = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectDriverLoadingListLUNARG; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG mode = VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG::eExclusive; + uint32_t driverCount = {}; + const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * pDrivers = {}; }; - struct PipelineVertexInputStateCreateInfo + template <> + struct CppType { - using NativeType = VkPipelineVertexInputStateCreateInfo; + using Type = DirectDriverLoadingListLUNARG; + }; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + struct DirectFBSurfaceCreateInfoEXT + { + using NativeType = VkDirectFBSurfaceCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputStateCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, - uint32_t vertexBindingDescriptionCount_ = {}, - const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ = {}, - uint32_t vertexAttributeDescriptionCount_ = {}, - const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ) - , pVertexBindingDescriptions( pVertexBindingDescriptions_ ) - , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ ) - , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {}, + IDirectFB * dfb_ = {}, + IDirectFBSurface * surface_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , dfb{ dfb_ } + , surface{ surface_ } { } - VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineVertexInputStateCreateInfo( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineVertexInputStateCreateInfo( - VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDescriptions_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexAttributeDescriptions_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , vertexBindingDescriptionCount( static_cast( vertexBindingDescriptions_.size() ) ) - , pVertexBindingDescriptions( vertexBindingDescriptions_.data() ) - , vertexAttributeDescriptionCount( static_cast( vertexAttributeDescriptions_.size() ) ) - , pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() ) + DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DirectFBSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DirectFBSurfaceCreateInfoEXT & operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineVertexInputStateCreateInfo & operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DirectFBSurfaceCreateInfoEXT & operator=( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & - setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT - { - vertexBindingDescriptionCount = vertexBindingDescriptionCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & - setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT - { - pVertexBindingDescriptions = pVertexBindingDescriptions_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDescriptions_ ) - VULKAN_HPP_NOEXCEPT - { - vertexBindingDescriptionCount = static_cast( vertexBindingDescriptions_.size() ); - pVertexBindingDescriptions = vertexBindingDescriptions_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & - setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT - { - vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & - setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setDfb( IDirectFB * dfb_ ) VULKAN_HPP_NOEXCEPT { - pVertexAttributeDescriptions = pVertexAttributeDescriptions_; + dfb = dfb_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexAttributeDescriptions_ ) - VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setSurface( IDirectFBSurface * surface_ ) VULKAN_HPP_NOEXCEPT { - vertexAttributeDescriptionCount = static_cast( vertexAttributeDescriptions_.size() ); - pVertexAttributeDescriptions = vertexAttributeDescriptions_.data(); + surface = surface_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDirectFBSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT const &, + IDirectFB * const &, + IDirectFBSurface * const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( - sType, pNext, flags, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions ); + return std::tie( sType, pNext, flags, dfb, surface ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineVertexInputStateCreateInfo const & ) const = default; -#else - bool operator==( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DirectFBSurfaceCreateInfoEXT const & ) const = default; +# else + bool operator==( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && - ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) && - ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dfb == rhs.dfb ) && ( surface == rhs.surface ); +# endif } - bool operator!=( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {}; - uint32_t vertexBindingDescriptionCount = {}; - const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions = {}; - uint32_t vertexAttributeDescriptionCount = {}; - const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectfbSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags = {}; + IDirectFB * dfb = {}; + IDirectFBSurface * surface = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineVertexInputStateCreateInfo; + using Type = DirectFBSurfaceCreateInfoEXT; }; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - struct PipelineInputAssemblyStateCreateInfo +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct DispatchGraphCountInfoAMDX { - using NativeType = VkPipelineInputAssemblyStateCreateInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInputAssemblyStateCreateInfo; + using NativeType = VkDispatchGraphCountInfoAMDX; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PipelineInputAssemblyStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, - VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , topology( topology_ ) - , primitiveRestartEnable( primitiveRestartEnable_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX( uint32_t count_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX infos_ = {}, + uint64_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : count{ count_ } + , infos{ infos_ } + , stride{ stride_ } { } - VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX( DispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineInputAssemblyStateCreateInfo( *reinterpret_cast( &rhs ) ) + DispatchGraphCountInfoAMDX( VkDispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : DispatchGraphCountInfoAMDX( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineInputAssemblyStateCreateInfo & operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + DispatchGraphCountInfoAMDX & operator=( DispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + DispatchGraphCountInfoAMDX & operator=( VkDispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & - setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setCount( uint32_t count_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + count = count_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setInfos( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX const & infos_ ) VULKAN_HPP_NOEXCEPT { - topology = topology_; + infos = infos_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & - setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setStride( uint64_t stride_ ) VULKAN_HPP_NOEXCEPT { - primitiveRestartEnable = primitiveRestartEnable_; + stride = stride_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDispatchGraphCountInfoAMDX const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDispatchGraphCountInfoAMDX &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, topology, primitiveRestartEnable ); + return std::tie( count, infos, stride ); } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineInputAssemblyStateCreateInfo const & ) const = default; -#else - bool operator==( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( topology == rhs.topology ) && - ( primitiveRestartEnable == rhs.primitiveRestartEnable ); # endif - } - - bool operator!=( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList; - VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {}; - }; - - template <> - struct CppType - { - using Type = PipelineInputAssemblyStateCreateInfo; + uint32_t count = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX infos = {}; + uint64_t stride = {}; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct PipelineTessellationStateCreateInfo +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct DispatchGraphInfoAMDX { - using NativeType = VkPipelineTessellationStateCreateInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationStateCreateInfo; + using NativeType = VkDispatchGraphInfoAMDX; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, - uint32_t patchControlPoints_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , patchControlPoints( patchControlPoints_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX( uint32_t nodeIndex_ = {}, + uint32_t payloadCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX payloads_ = {}, + uint64_t payloadStride_ = {} ) VULKAN_HPP_NOEXCEPT + : nodeIndex{ nodeIndex_ } + , payloadCount{ payloadCount_ } + , payloads{ payloads_ } + , payloadStride{ payloadStride_ } { } - VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX( DispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineTessellationStateCreateInfo( *reinterpret_cast( &rhs ) ) + DispatchGraphInfoAMDX( VkDispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : DispatchGraphInfoAMDX( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DispatchGraphInfoAMDX & operator=( DispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineTessellationStateCreateInfo & operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DispatchGraphInfoAMDX & operator=( VkDispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setNodeIndex( uint32_t nodeIndex_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + nodeIndex = nodeIndex_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & - setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloadCount( uint32_t payloadCount_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + payloadCount = payloadCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloads( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX const & payloads_ ) VULKAN_HPP_NOEXCEPT { - patchControlPoints = patchControlPoints_; + payloads = payloads_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloadStride( uint64_t payloadStride_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + payloadStride = payloadStride_; + return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDispatchGraphInfoAMDX const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + operator VkDispatchGraphInfoAMDX &() VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, patchControlPoints ); + return *reinterpret_cast( this ); } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineTessellationStateCreateInfo const & ) const = default; -#else - bool operator==( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { # if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( patchControlPoints == rhs.patchControlPoints ); -# endif - } - - bool operator!=( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + return std::tie( nodeIndex, payloadCount, payloads, payloadStride ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {}; - uint32_t patchControlPoints = {}; - }; - - template <> - struct CppType - { - using Type = PipelineTessellationStateCreateInfo; + uint32_t nodeIndex = {}; + uint32_t payloadCount = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX payloads = {}; + uint64_t payloadStride = {}; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct PipelineViewportStateCreateInfo + struct DispatchIndirectCommand { - using NativeType = VkPipelineViewportStateCreateInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, - uint32_t viewportCount_ = {}, - const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ = {}, - uint32_t scissorCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , viewportCount( viewportCount_ ) - , pViewports( pViewports_ ) - , scissorCount( scissorCount_ ) - , pScissors( pScissors_ ) - { - } - - VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineViewportStateCreateInfo( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewports_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & scissors_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , viewportCount( static_cast( viewports_.size() ) ) - , pViewports( viewports_.data() ) - , scissorCount( static_cast( scissors_.size() ) ) - , pScissors( scissors_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + using NativeType = VkDispatchIndirectCommand; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT + : x{ x_ } + , y{ y_ } + , z{ z_ } { - pNext = pNext_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } + VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + : DispatchIndirectCommand( *reinterpret_cast( &rhs ) ) { - viewportCount = viewportCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ ) VULKAN_HPP_NOEXCEPT - { - pViewports = pViewports_; - return *this; - } + DispatchIndirectCommand & operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineViewportStateCreateInfo & - setViewports( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewports_ ) VULKAN_HPP_NOEXCEPT + DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT { - viewportCount = static_cast( viewports_.size() ); - pViewports = viewports_.data(); + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT { - scissorCount = scissorCount_; + x = x_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT { - pScissors = pScissors_; + y = y_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineViewportStateCreateInfo & - setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & scissors_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT { - scissorCount = static_cast( scissors_.size() ); - pScissors = scissors_.data(); + z = z_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, viewportCount, pViewports, scissorCount, pScissors ); + return std::tie( x, y, z ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineViewportStateCreateInfo const & ) const = default; + auto operator<=>( DispatchIndirectCommand const & ) const = default; #else - bool operator==( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewportCount == rhs.viewportCount ) && - ( pViewports == rhs.pViewports ) && ( scissorCount == rhs.scissorCount ) && ( pScissors == rhs.pScissors ); + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ); # endif } - bool operator!=( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {}; - uint32_t viewportCount = {}; - const VULKAN_HPP_NAMESPACE::Viewport * pViewports = {}; - uint32_t scissorCount = {}; - const VULKAN_HPP_NAMESPACE::Rect2D * pScissors = {}; - }; - - template <> - struct CppType - { - using Type = PipelineViewportStateCreateInfo; + uint32_t x = {}; + uint32_t y = {}; + uint32_t z = {}; }; - struct PipelineRasterizationStateCreateInfo + struct DisplayEventInfoEXT { - using NativeType = VkPipelineRasterizationStateCreateInfo; + using NativeType = VkDisplayEventInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, - VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, - VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, - VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, - VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, - float depthBiasConstantFactor_ = {}, - float depthBiasClamp_ = {}, - float depthBiasSlopeFactor_ = {}, - float lineWidth_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , depthClampEnable( depthClampEnable_ ) - , rasterizerDiscardEnable( rasterizerDiscardEnable_ ) - , polygonMode( polygonMode_ ) - , cullMode( cullMode_ ) - , frontFace( frontFace_ ) - , depthBiasEnable( depthBiasEnable_ ) - , depthBiasConstantFactor( depthBiasConstantFactor_ ) - , depthBiasClamp( depthBiasClamp_ ) - , depthBiasSlopeFactor( depthBiasSlopeFactor_ ) - , lineWidth( lineWidth_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , displayEvent{ displayEvent_ } { } - VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineRasterizationStateCreateInfo( *reinterpret_cast( &rhs ) ) + DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayEventInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineRasterizationStateCreateInfo & operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & - setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + displayEvent = displayEvent_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT + operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - depthClampEnable = depthClampEnable_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & - setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT + operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT { - rasterizerDiscardEnable = rasterizerDiscardEnable_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT - { - polygonMode = polygonMode_; - return *this; +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, displayEvent ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayEventInfoEXT const & ) const = default; +#else + bool operator==( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - cullMode = cullMode_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayEvent == rhs.displayEvent ); +# endif } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - frontFace = frontFace_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut; + }; + + template <> + struct CppType + { + using Type = DisplayEventInfoEXT; + }; + + struct DisplayModeParametersKHR + { + using NativeType = VkDisplayModeParametersKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT + : visibleRegion{ visibleRegion_ } + , refreshRate{ refreshRate_ } { - depthBiasEnable = depthBiasEnable_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeParametersKHR( *reinterpret_cast( &rhs ) ) { - depthBiasConstantFactor = depthBiasConstantFactor_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT + DisplayModeParametersKHR & operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - depthBiasClamp = depthBiasClamp_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT { - depthBiasSlopeFactor = depthBiasSlopeFactor_; + visibleRegion = visibleRegion_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT { - lineWidth = lineWidth_; + refreshRate = refreshRate_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - flags, - depthClampEnable, - rasterizerDiscardEnable, - polygonMode, - cullMode, - frontFace, - depthBiasEnable, - depthBiasConstantFactor, - depthBiasClamp, - depthBiasSlopeFactor, - lineWidth ); + return std::tie( visibleRegion, refreshRate ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineRasterizationStateCreateInfo const & ) const = default; + auto operator<=>( DisplayModeParametersKHR const & ) const = default; #else - bool operator==( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthClampEnable == rhs.depthClampEnable ) && - ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) && ( polygonMode == rhs.polygonMode ) && ( cullMode == rhs.cullMode ) && - ( frontFace == rhs.frontFace ) && ( depthBiasEnable == rhs.depthBiasEnable ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) && - ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) && ( lineWidth == rhs.lineWidth ); + return ( visibleRegion == rhs.visibleRegion ) && ( refreshRate == rhs.refreshRate ); # endif } - bool operator!=( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {}; - VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {}; - VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill; - VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {}; - VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise; - VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {}; - float depthBiasConstantFactor = {}; - float depthBiasClamp = {}; - float depthBiasSlopeFactor = {}; - float lineWidth = {}; - }; - - template <> - struct CppType - { - using Type = PipelineRasterizationStateCreateInfo; + VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {}; + uint32_t refreshRate = {}; }; - struct PipelineMultisampleStateCreateInfo + struct DisplayModeCreateInfoKHR { - using NativeType = VkPipelineMultisampleStateCreateInfo; + using NativeType = VkDisplayModeCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineMultisampleStateCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PipelineMultisampleStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, - float minSampleShading_ = {}, - const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , rasterizationSamples( rasterizationSamples_ ) - , sampleShadingEnable( sampleShadingEnable_ ) - , minSampleShading( minSampleShading_ ) - , pSampleMask( pSampleMask_ ) - , alphaToCoverageEnable( alphaToCoverageEnable_ ) - , alphaToOneEnable( alphaToOneEnable_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , parameters{ parameters_ } { } - VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineMultisampleStateCreateInfo( *reinterpret_cast( &rhs ) ) + DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineMultisampleStateCreateInfo & operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & - setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & - setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT - { - rasterizationSamples = rasterizationSamples_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT - { - sampleShadingEnable = sampleShadingEnable_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT - { - minSampleShading = minSampleShading_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ ) VULKAN_HPP_NOEXCEPT - { - pSampleMask = pSampleMask_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & - setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT - { - alphaToCoverageEnable = alphaToCoverageEnable_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT { - alphaToOneEnable = alphaToOneEnable_; + parameters = parameters_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -32965,1115 +35511,980 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR const &, + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, rasterizationSamples, sampleShadingEnable, minSampleShading, pSampleMask, alphaToCoverageEnable, alphaToOneEnable ); + return std::tie( sType, pNext, flags, parameters ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineMultisampleStateCreateInfo const & ) const = default; + auto operator<=>( DisplayModeCreateInfoKHR const & ) const = default; #else - bool operator==( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rasterizationSamples == rhs.rasterizationSamples ) && - ( sampleShadingEnable == rhs.sampleShadingEnable ) && ( minSampleShading == rhs.minSampleShading ) && ( pSampleMask == rhs.pSampleMask ) && - ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) && ( alphaToOneEnable == rhs.alphaToOneEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( parameters == rhs.parameters ); # endif } - bool operator!=( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {}; - float minSampleShading = {}; - const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask = {}; - VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {}; - VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineMultisampleStateCreateInfo; + using Type = DisplayModeCreateInfoKHR; }; - struct StencilOpState + struct DisplayModePropertiesKHR { - using NativeType = VkStencilOpState; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, - VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, - VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, - VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, - uint32_t compareMask_ = {}, - uint32_t writeMask_ = {}, - uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT - : failOp( failOp_ ) - , passOp( passOp_ ) - , depthFailOp( depthFailOp_ ) - , compareOp( compareOp_ ) - , compareMask( compareMask_ ) - , writeMask( writeMask_ ) - , reference( reference_ ) - { - } - - VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT : StencilOpState( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT - { - failOp = failOp_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT - { - passOp = passOp_; - return *this; - } + using NativeType = VkDisplayModePropertiesKHR; - VULKAN_HPP_CONSTEXPR_14 StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT + : displayMode{ displayMode_ } + , parameters{ parameters_ } { - depthFailOp = depthFailOp_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT - { - compareOp = compareOp_; - return *this; - } + VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT + DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModePropertiesKHR( *reinterpret_cast( &rhs ) ) { - compareMask = compareMask_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT - { - writeMask = writeMask_; - return *this; - } + DisplayModePropertiesKHR & operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT + DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - reference = reference_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT + operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT + operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( failOp, passOp, depthFailOp, compareOp, compareMask, writeMask, reference ); + return std::tie( displayMode, parameters ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( StencilOpState const & ) const = default; + auto operator<=>( DisplayModePropertiesKHR const & ) const = default; #else - bool operator==( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( failOp == rhs.failOp ) && ( passOp == rhs.passOp ) && ( depthFailOp == rhs.depthFailOp ) && ( compareOp == rhs.compareOp ) && - ( compareMask == rhs.compareMask ) && ( writeMask == rhs.writeMask ) && ( reference == rhs.reference ); + return ( displayMode == rhs.displayMode ) && ( parameters == rhs.parameters ); # endif } - bool operator!=( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; - VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; - VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; - VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; - uint32_t compareMask = {}; - uint32_t writeMask = {}; - uint32_t reference = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; }; - struct PipelineDepthStencilStateCreateInfo + struct DisplayModeProperties2KHR { - using NativeType = VkPipelineDepthStencilStateCreateInfo; + using NativeType = VkDisplayModeProperties2KHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDepthStencilStateCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, - VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, - VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, - VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, - VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, - float minDepthBounds_ = {}, - float maxDepthBounds_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , depthTestEnable( depthTestEnable_ ) - , depthWriteEnable( depthWriteEnable_ ) - , depthCompareOp( depthCompareOp_ ) - , depthBoundsTestEnable( depthBoundsTestEnable_ ) - , stencilTestEnable( stencilTestEnable_ ) - , front( front_ ) - , back( back_ ) - , minDepthBounds( minDepthBounds_ ) - , maxDepthBounds( maxDepthBounds_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , displayModeProperties{ displayModeProperties_ } { } - VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineDepthStencilStateCreateInfo( *reinterpret_cast( &rhs ) ) + DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeProperties2KHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineDepthStencilStateCreateInfo & operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & - setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT { - flags = flags_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - depthTestEnable = depthTestEnable_; - return *this; + return std::tie( sType, pNext, displayModeProperties ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeProperties2KHR const & ) const = default; +#else + bool operator==( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - depthWriteEnable = depthWriteEnable_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayModeProperties == rhs.displayModeProperties ); +# endif } - VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - depthCompareOp = depthCompareOp_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & - setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT - { - depthBoundsTestEnable = depthBoundsTestEnable_; - return *this; - } + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {}; + }; - VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT - { - stencilTestEnable = stencilTestEnable_; - return *this; - } + template <> + struct CppType + { + using Type = DisplayModeProperties2KHR; + }; - VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT - { - front = front_; - return *this; - } + struct DisplayModeStereoPropertiesNV + { + using NativeType = VkDisplayModeStereoPropertiesNV; - VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeStereoPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeStereoPropertiesNV( VULKAN_HPP_NAMESPACE::Bool32 hdmi3DSupported_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hdmi3DSupported{ hdmi3DSupported_ } { - back = back_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DisplayModeStereoPropertiesNV( DisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeStereoPropertiesNV( VkDisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeStereoPropertiesNV( *reinterpret_cast( &rhs ) ) { - minDepthBounds = minDepthBounds_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT + DisplayModeStereoPropertiesNV & operator=( DisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayModeStereoPropertiesNV & operator=( VkDisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - maxDepthBounds = maxDepthBounds_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDisplayModeStereoPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDisplayModeStereoPropertiesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - flags, - depthTestEnable, - depthWriteEnable, - depthCompareOp, - depthBoundsTestEnable, - stencilTestEnable, - front, - back, - minDepthBounds, - maxDepthBounds ); + return std::tie( sType, pNext, hdmi3DSupported ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineDepthStencilStateCreateInfo const & ) const = default; + auto operator<=>( DisplayModeStereoPropertiesNV const & ) const = default; #else - bool operator==( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayModeStereoPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthTestEnable == rhs.depthTestEnable ) && - ( depthWriteEnable == rhs.depthWriteEnable ) && ( depthCompareOp == rhs.depthCompareOp ) && - ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) && ( stencilTestEnable == rhs.stencilTestEnable ) && ( front == rhs.front ) && - ( back == rhs.back ) && ( minDepthBounds == rhs.minDepthBounds ) && ( maxDepthBounds == rhs.maxDepthBounds ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hdmi3DSupported == rhs.hdmi3DSupported ); # endif } - bool operator!=( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayModeStereoPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {}; - VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; - VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {}; - VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {}; - VULKAN_HPP_NAMESPACE::StencilOpState front = {}; - VULKAN_HPP_NAMESPACE::StencilOpState back = {}; - float minDepthBounds = {}; - float maxDepthBounds = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeStereoPropertiesNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hdmi3DSupported = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineDepthStencilStateCreateInfo; + using Type = DisplayModeStereoPropertiesNV; }; - struct PipelineColorBlendAttachmentState + struct DisplayNativeHdrSurfaceCapabilitiesAMD { - using NativeType = VkPipelineColorBlendAttachmentState; + using NativeType = VkDisplayNativeHdrSurfaceCapabilitiesAMD; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, - VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, - VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, - VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, - VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, - VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, - VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, - VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT - : blendEnable( blendEnable_ ) - , srcColorBlendFactor( srcColorBlendFactor_ ) - , dstColorBlendFactor( dstColorBlendFactor_ ) - , colorBlendOp( colorBlendOp_ ) - , srcAlphaBlendFactor( srcAlphaBlendFactor_ ) - , dstAlphaBlendFactor( dstAlphaBlendFactor_ ) - , alphaBlendOp( alphaBlendOp_ ) - , colorWriteMask( colorWriteMask_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , localDimmingSupport{ localDimmingSupport_ } { } - VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineColorBlendAttachmentState( *reinterpret_cast( &rhs ) ) + DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayNativeHdrSurfaceCapabilitiesAMD( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineColorBlendAttachmentState & operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT + operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const &() const VULKAN_HPP_NOEXCEPT { - blendEnable = blendEnable_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & - setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT { - srcColorBlendFactor = srcColorBlendFactor_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & - setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - dstColorBlendFactor = dstColorBlendFactor_; - return *this; + return std::tie( sType, pNext, localDimmingSupport ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const & ) const = default; +#else + bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - colorBlendOp = colorBlendOp_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingSupport == rhs.localDimmingSupport ); +# endif } - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & - setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - srcAlphaBlendFactor = srcAlphaBlendFactor_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & - setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {}; + }; + + template <> + struct CppType + { + using Type = DisplayNativeHdrSurfaceCapabilitiesAMD; + }; + + struct DisplayPlaneCapabilitiesKHR + { + using NativeType = VkDisplayPlaneCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : supportedAlpha{ supportedAlpha_ } + , minSrcPosition{ minSrcPosition_ } + , maxSrcPosition{ maxSrcPosition_ } + , minSrcExtent{ minSrcExtent_ } + , maxSrcExtent{ maxSrcExtent_ } + , minDstPosition{ minDstPosition_ } + , maxDstPosition{ maxDstPosition_ } + , minDstExtent{ minDstExtent_ } + , maxDstExtent{ maxDstExtent_ } { - dstAlphaBlendFactor = dstAlphaBlendFactor_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneCapabilitiesKHR( *reinterpret_cast( &rhs ) ) { - alphaBlendOp = alphaBlendOp_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & - setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT + DisplayPlaneCapabilitiesKHR & operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - colorWriteMask = colorWriteMask_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT + operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT + operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( - blendEnable, srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp, colorWriteMask ); + return std::tie( supportedAlpha, minSrcPosition, maxSrcPosition, minSrcExtent, maxSrcExtent, minDstPosition, maxDstPosition, minDstExtent, maxDstExtent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineColorBlendAttachmentState const & ) const = default; + auto operator<=>( DisplayPlaneCapabilitiesKHR const & ) const = default; #else - bool operator==( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( blendEnable == rhs.blendEnable ) && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && - ( colorBlendOp == rhs.colorBlendOp ) && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) && - ( alphaBlendOp == rhs.alphaBlendOp ) && ( colorWriteMask == rhs.colorWriteMask ); + return ( supportedAlpha == rhs.supportedAlpha ) && ( minSrcPosition == rhs.minSrcPosition ) && ( maxSrcPosition == rhs.maxSrcPosition ) && + ( minSrcExtent == rhs.minSrcExtent ) && ( maxSrcExtent == rhs.maxSrcExtent ) && ( minDstPosition == rhs.minDstPosition ) && + ( maxDstPosition == rhs.maxDstPosition ) && ( minDstExtent == rhs.minDstExtent ) && ( maxDstExtent == rhs.maxDstExtent ); # endif } - bool operator!=( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {}; - VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; - VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; - VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; - VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; - VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; - VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; - VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {}; + VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {}; }; - struct PipelineColorBlendStateCreateInfo + struct DisplayPlaneCapabilities2KHR { - using NativeType = VkPipelineColorBlendStateCreateInfo; + using NativeType = VkDisplayPlaneCapabilities2KHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendStateCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, - VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, - uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ = {}, - std::array const & blendConstants_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , logicOpEnable( logicOpEnable_ ) - , logicOp( logicOp_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - , blendConstants( blendConstants_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , capabilities{ capabilities_ } { } - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineColorBlendStateCreateInfo( *reinterpret_cast( &rhs ) ) + DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneCapabilities2KHR( *reinterpret_cast( &rhs ) ) { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineColorBlendStateCreateInfo( - VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_, - VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_, - VULKAN_HPP_NAMESPACE::LogicOp logicOp_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, - std::array const & blendConstants_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , logicOpEnable( logicOpEnable_ ) - , logicOp( logicOp_ ) - , attachmentCount( static_cast( attachments_.size() ) ) - , pAttachments( attachments_.data() ) - , blendConstants( blendConstants_ ) + DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } - PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); - return *this; + return *reinterpret_cast( this ); } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return std::tie( sType, pNext, capabilities ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlaneCapabilities2KHR const & ) const = default; +#else + bool operator==( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - flags = flags_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilities == rhs.capabilities ); +# endif } - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - logicOpEnable = logicOpEnable_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {}; + }; + + template <> + struct CppType + { + using Type = DisplayPlaneCapabilities2KHR; + }; + + struct DisplayPlaneInfo2KHR + { + using NativeType = VkDisplayPlaneInfo2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, uint32_t planeIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mode{ mode_ } + , planeIndex{ planeIndex_ } { - logicOp = logicOp_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneInfo2KHR( *reinterpret_cast( &rhs ) ) { - attachmentCount = attachmentCount_; + } + + DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & - setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pAttachments = pAttachments_; + pNext = pNext_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineColorBlendStateCreateInfo & setAttachments( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT { - attachmentCount = static_cast( attachments_.size() ); - pAttachments = attachments_.data(); + mode = mode_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setBlendConstants( std::array blendConstants_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT { - blendConstants = blendConstants_; + planeIndex = planeIndex_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, logicOpEnable, logicOp, attachmentCount, pAttachments, blendConstants ); + return std::tie( sType, pNext, mode, planeIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineColorBlendStateCreateInfo const & ) const = default; + auto operator<=>( DisplayPlaneInfo2KHR const & ) const = default; #else - bool operator==( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( logicOpEnable == rhs.logicOpEnable ) && - ( logicOp == rhs.logicOp ) && ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && - ( blendConstants == rhs.blendConstants ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( planeIndex == rhs.planeIndex ); # endif } - bool operator!=( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {}; - VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D blendConstants = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {}; + uint32_t planeIndex = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineColorBlendStateCreateInfo; + using Type = DisplayPlaneInfo2KHR; }; - struct PipelineDynamicStateCreateInfo + struct DisplayPlanePropertiesKHR { - using NativeType = VkPipelineDynamicStateCreateInfo; + using NativeType = VkDisplayPlanePropertiesKHR; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, + uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : currentDisplay{ currentDisplay_ } + , currentStackIndex{ currentStackIndex_ } + { + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, - uint32_t dynamicStateCount_ = {}, - const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , dynamicStateCount( dynamicStateCount_ ) - , pDynamicStates( pDynamicStates_ ) + VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlanePropertiesKHR( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DisplayPlanePropertiesKHR & operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineDynamicStateCreateInfo( *reinterpret_cast( &rhs ) ) + DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicStates_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), flags( flags_ ), dynamicStateCount( static_cast( dynamicStates_.size() ) ), pDynamicStates( dynamicStates_.data() ) + operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { + return *reinterpret_cast( this ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } - PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); - return *this; + return std::tie( currentDisplay, currentStackIndex ); } +#endif -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPlanePropertiesKHR const & ) const = default; +#else + bool operator==( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( currentDisplay == rhs.currentDisplay ) && ( currentStackIndex == rhs.currentStackIndex ); +# endif } - VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - flags = flags_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {}; + uint32_t currentStackIndex = {}; + }; + + struct DisplayPlaneProperties2KHR + { + using NativeType = VkDisplayPlaneProperties2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , displayPlaneProperties{ displayPlaneProperties_ } { - dynamicStateCount = dynamicStateCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPlaneProperties2KHR( *reinterpret_cast( &rhs ) ) { - pDynamicStates = pDynamicStates_; - return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineDynamicStateCreateInfo & - setDynamicStates( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicStates_ ) VULKAN_HPP_NOEXCEPT + DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - dynamicStateCount = static_cast( dynamicStates_.size() ); - pDynamicStates = dynamicStates_.data(); + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, dynamicStateCount, pDynamicStates ); + return std::tie( sType, pNext, displayPlaneProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineDynamicStateCreateInfo const & ) const = default; + auto operator<=>( DisplayPlaneProperties2KHR const & ) const = default; #else - bool operator==( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dynamicStateCount == rhs.dynamicStateCount ) && - ( pDynamicStates == rhs.pDynamicStates ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPlaneProperties == rhs.displayPlaneProperties ); # endif } - bool operator!=( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {}; - uint32_t dynamicStateCount = {}; - const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineDynamicStateCreateInfo; + using Type = DisplayPlaneProperties2KHR; }; - struct GraphicsPipelineCreateInfo + struct DisplayPowerInfoEXT { - using NativeType = VkGraphicsPipelineCreateInfo; + using NativeType = VkDisplayPowerInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, - uint32_t stageCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - uint32_t subpass_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, - int32_t basePipelineIndex_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , stageCount( stageCount_ ) - , pStages( pStages_ ) - , pVertexInputState( pVertexInputState_ ) - , pInputAssemblyState( pInputAssemblyState_ ) - , pTessellationState( pTessellationState_ ) - , pViewportState( pViewportState_ ) - , pRasterizationState( pRasterizationState_ ) - , pMultisampleState( pMultisampleState_ ) - , pDepthStencilState( pDepthStencilState_ ) - , pColorBlendState( pColorBlendState_ ) - , pDynamicState( pDynamicState_ ) - , layout( layout_ ) - , renderPass( renderPass_ ) - , subpass( subpass_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , powerState{ powerState_ } { } - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : GraphicsPipelineCreateInfo( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, - const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - uint32_t subpass_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, - int32_t basePipelineIndex_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , stageCount( static_cast( stages_.size() ) ) - , pStages( stages_.data() ) - , pVertexInputState( pVertexInputState_ ) - , pInputAssemblyState( pInputAssemblyState_ ) - , pTessellationState( pTessellationState_ ) - , pViewportState( pViewportState_ ) - , pRasterizationState( pRasterizationState_ ) - , pMultisampleState( pMultisampleState_ ) - , pDepthStencilState( pDepthStencilState_ ) - , pColorBlendState( pColorBlendState_ ) - , pDynamicState( pDynamicState_ ) - , layout( layout_ ) - , renderPass( renderPass_ ) - , subpass( subpass_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) + DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayPowerInfoEXT( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + powerState = powerState_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - stageCount = stageCount_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT { - pStages = pStages_; - return *this; + return *reinterpret_cast( this ); } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - GraphicsPipelineCreateInfo & - setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - stageCount = static_cast( stages_.size() ); - pStages = stages_.data(); - return *this; + return std::tie( sType, pNext, powerState ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & - setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayPowerInfoEXT const & ) const = default; +#else + bool operator==( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - pVertexInputState = pVertexInputState_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( powerState == rhs.powerState ); +# endif } - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & - setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - pInputAssemblyState = pInputAssemblyState_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & - setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT - { - pTessellationState = pTessellationState_; - return *this; - } + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff; + }; - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & - setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ ) VULKAN_HPP_NOEXCEPT - { - pViewportState = pViewportState_; - return *this; - } + template <> + struct CppType + { + using Type = DisplayPowerInfoEXT; + }; - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & - setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ ) VULKAN_HPP_NOEXCEPT - { - pRasterizationState = pRasterizationState_; - return *this; - } + struct DisplayPresentInfoKHR + { + using NativeType = VkDisplayPresentInfoKHR; - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & - setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ ) VULKAN_HPP_NOEXCEPT - { - pMultisampleState = pMultisampleState_; - return *this; - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR; - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & - setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcRect{ srcRect_ } + , dstRect{ dstRect_ } + , persistent{ persistent_ } { - pDepthStencilState = pDepthStencilState_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & - setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ ) VULKAN_HPP_NOEXCEPT - { - pColorBlendState = pColorBlendState_; - return *this; - } + VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & - setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT + DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPresentInfoKHR( *reinterpret_cast( &rhs ) ) { - pDynamicState = pDynamicState_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - layout = layout_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - renderPass = renderPass_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT { - subpass = subpass_; + srcRect = srcRect_; return *this; } - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT { - basePipelineHandle = basePipelineHandle_; + dstRect = dstRect_; return *this; } - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT { - basePipelineIndex = basePipelineIndex_; + persistent = persistent_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -34082,301 +36493,353 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::Rect2D const &, + VULKAN_HPP_NAMESPACE::Rect2D const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - flags, - stageCount, - pStages, - pVertexInputState, - pInputAssemblyState, - pTessellationState, - pViewportState, - pRasterizationState, - pMultisampleState, - pDepthStencilState, - pColorBlendState, - pDynamicState, - layout, - renderPass, - subpass, - basePipelineHandle, - basePipelineIndex ); + return std::tie( sType, pNext, srcRect, dstRect, persistent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( GraphicsPipelineCreateInfo const & ) const = default; + auto operator<=>( DisplayPresentInfoKHR const & ) const = default; #else - bool operator==( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && - ( pVertexInputState == rhs.pVertexInputState ) && ( pInputAssemblyState == rhs.pInputAssemblyState ) && - ( pTessellationState == rhs.pTessellationState ) && ( pViewportState == rhs.pViewportState ) && - ( pRasterizationState == rhs.pRasterizationState ) && ( pMultisampleState == rhs.pMultisampleState ) && - ( pDepthStencilState == rhs.pDepthStencilState ) && ( pColorBlendState == rhs.pColorBlendState ) && ( pDynamicState == rhs.pDynamicState ) && - ( layout == rhs.layout ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ) && ( basePipelineHandle == rhs.basePipelineHandle ) && - ( basePipelineIndex == rhs.basePipelineIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcRect == rhs.srcRect ) && ( dstRect == rhs.dstRect ) && ( persistent == rhs.persistent ); # endif } - bool operator!=( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; - uint32_t stageCount = {}; - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; - const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {}; - const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState = {}; - const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {}; - const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState = {}; - const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState = {}; - const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState = {}; - const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState = {}; - const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState = {}; - const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; - VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; - uint32_t subpass = {}; - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; - int32_t basePipelineIndex = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Rect2D srcRect = {}; + VULKAN_HPP_NAMESPACE::Rect2D dstRect = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistent = {}; }; template <> - struct CppType + struct CppType { - using Type = GraphicsPipelineCreateInfo; + using Type = DisplayPresentInfoKHR; }; - struct GraphicsPipelineLibraryCreateInfoEXT + struct DisplayPropertiesKHR { - using NativeType = VkGraphicsPipelineLibraryCreateInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT; + using NativeType = VkDisplayPropertiesKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, + const char * displayName_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT + : display{ display_ } + , displayName{ displayName_ } + , physicalDimensions{ physicalDimensions_ } + , physicalResolution{ physicalResolution_ } + , supportedTransforms{ supportedTransforms_ } + , planeReorderPossible{ planeReorderPossible_ } + , persistentContent{ persistentContent_ } { } - VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - GraphicsPipelineLibraryCreateInfoEXT( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : GraphicsPipelineLibraryCreateInfoEXT( *reinterpret_cast( &rhs ) ) + DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayPropertiesKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - GraphicsPipelineLibraryCreateInfoEXT & operator=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DisplayPropertiesKHR & operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - GraphicsPipelineLibraryCreateInfoEXT & operator=( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT { - flags = flags_; + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( display, displayName, physicalDimensions, physicalResolution, supportedTransforms, planeReorderPossible, persistentContent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = display <=> rhs.display; cmp != 0 ) + return cmp; + if ( displayName != rhs.displayName ) + if ( auto cmp = strcmp( displayName, rhs.displayName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = physicalDimensions <=> rhs.physicalDimensions; cmp != 0 ) + return cmp; + if ( auto cmp = physicalResolution <=> rhs.physicalResolution; cmp != 0 ) + return cmp; + if ( auto cmp = supportedTransforms <=> rhs.supportedTransforms; cmp != 0 ) + return cmp; + if ( auto cmp = planeReorderPossible <=> rhs.planeReorderPossible; cmp != 0 ) + return cmp; + if ( auto cmp = persistentContent <=> rhs.persistentContent; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( display == rhs.display ) && ( ( displayName == rhs.displayName ) || ( strcmp( displayName, rhs.displayName ) == 0 ) ) && + ( physicalDimensions == rhs.physicalDimensions ) && ( physicalResolution == rhs.physicalResolution ) && + ( supportedTransforms == rhs.supportedTransforms ) && ( planeReorderPossible == rhs.planeReorderPossible ) && + ( persistentContent == rhs.persistentContent ); + } + + bool operator!=( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::DisplayKHR display = {}; + const char * displayName = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {}; + }; + + struct DisplayProperties2KHR + { + using NativeType = VkDisplayProperties2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , displayProperties{ displayProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayProperties2KHR( *reinterpret_cast( &rhs ) ) + { + } + + DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkGraphicsPipelineLibraryCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkGraphicsPipelineLibraryCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags ); + return std::tie( sType, pNext, displayProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( GraphicsPipelineLibraryCreateInfoEXT const & ) const = default; + auto operator<=>( DisplayProperties2KHR const & ) const = default; #else - bool operator==( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayProperties == rhs.displayProperties ); # endif } - bool operator!=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {}; }; template <> - struct CppType + struct CppType { - using Type = GraphicsPipelineLibraryCreateInfoEXT; + using Type = DisplayProperties2KHR; }; - struct GraphicsShaderGroupCreateInfoNV + struct DisplaySurfaceCreateInfoKHR { - using NativeType = VkGraphicsShaderGroupCreateInfoNV; + using NativeType = VkDisplaySurfaceCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( uint32_t stageCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stageCount( stageCount_ ) - , pStages( pStages_ ) - , pVertexInputState( pVertexInputState_ ) - , pTessellationState( pTessellationState_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplaySurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, + uint32_t planeIndex_ = {}, + uint32_t planeStackIndex_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + float globalAlpha_ = {}, + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , displayMode{ displayMode_ } + , planeIndex{ planeIndex_ } + , planeStackIndex{ planeStackIndex_ } + , transform{ transform_ } + , globalAlpha{ globalAlpha_ } + , alphaMode{ alphaMode_ } + , imageExtent{ imageExtent_ } { } - VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : GraphicsShaderGroupCreateInfoNV( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - GraphicsShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, - const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , stageCount( static_cast( stages_.size() ) ) - , pStages( stages_.data() ) - , pVertexInputState( pVertexInputState_ ) - , pTessellationState( pTessellationState_ ) + DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplaySurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - GraphicsShaderGroupCreateInfoNV & operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { - stageCount = stageCount_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & - setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT { - pStages = pStages_; + displayMode = displayMode_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - GraphicsShaderGroupCreateInfoNV & - setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT { - stageCount = static_cast( stages_.size() ); - pStages = stages_.data(); + planeIndex = planeIndex_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & - setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT { - pVertexInputState = pVertexInputState_; + planeStackIndex = planeStackIndex_; return *this; } - VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & - setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT { - pTessellationState = pTessellationState_; + transform = transform_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkGraphicsShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + globalAlpha = globalAlpha_; + return *this; } - operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + alphaMode = alphaMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -34385,3871 +36848,3531 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + uint32_t const &, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, + float const &, + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR const &, + VULKAN_HPP_NAMESPACE::Extent2D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, stageCount, pStages, pVertexInputState, pTessellationState ); + return std::tie( sType, pNext, flags, displayMode, planeIndex, planeStackIndex, transform, globalAlpha, alphaMode, imageExtent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( GraphicsShaderGroupCreateInfoNV const & ) const = default; + auto operator<=>( DisplaySurfaceCreateInfoKHR const & ) const = default; #else - bool operator==( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && - ( pVertexInputState == rhs.pVertexInputState ) && ( pTessellationState == rhs.pTessellationState ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( displayMode == rhs.displayMode ) && + ( planeIndex == rhs.planeIndex ) && ( planeStackIndex == rhs.planeStackIndex ) && ( transform == rhs.transform ) && + ( globalAlpha == rhs.globalAlpha ) && ( alphaMode == rhs.alphaMode ) && ( imageExtent == rhs.imageExtent ); # endif } - bool operator!=( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV; - const void * pNext = {}; - uint32_t stageCount = {}; - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; - const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {}; - const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + uint32_t planeIndex = {}; + uint32_t planeStackIndex = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + float globalAlpha = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque; + VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; }; template <> - struct CppType + struct CppType { - using Type = GraphicsShaderGroupCreateInfoNV; + using Type = DisplaySurfaceCreateInfoKHR; }; - struct GraphicsPipelineShaderGroupsCreateInfoNV + struct DisplaySurfaceStereoCreateInfoNV { - using NativeType = VkGraphicsPipelineShaderGroupsCreateInfoNV; + using NativeType = VkDisplaySurfaceStereoCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceStereoCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( uint32_t groupCount_ = {}, - const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ = {}, - uint32_t pipelineCount_ = {}, - const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , groupCount( groupCount_ ) - , pGroups( pGroups_ ) - , pipelineCount( pipelineCount_ ) - , pPipelines( pPipelines_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplaySurfaceStereoCreateInfoNV( VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV stereoType_ = VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV::eNone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stereoType{ stereoType_ } { } - VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : GraphicsPipelineShaderGroupsCreateInfoNV( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR DisplaySurfaceStereoCreateInfoNV( DisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - GraphicsPipelineShaderGroupsCreateInfoNV( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelines_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , groupCount( static_cast( groups_.size() ) ) - , pGroups( groups_.data() ) - , pipelineCount( static_cast( pipelines_.size() ) ) - , pPipelines( pipelines_.data() ) + DisplaySurfaceStereoCreateInfoNV( VkDisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplaySurfaceStereoCreateInfoNV( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - GraphicsPipelineShaderGroupsCreateInfoNV & operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DisplaySurfaceStereoCreateInfoNV & operator=( DisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - GraphicsPipelineShaderGroupsCreateInfoNV & operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + DisplaySurfaceStereoCreateInfoNV & operator=( VkDisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceStereoCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT - { - groupCount = groupCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & - setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT - { - pGroups = pGroups_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - GraphicsPipelineShaderGroupsCreateInfoNV & setGroups( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ ) VULKAN_HPP_NOEXCEPT - { - groupCount = static_cast( groups_.size() ); - pGroups = groups_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT - { - pipelineCount = pipelineCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ ) VULKAN_HPP_NOEXCEPT - { - pPipelines = pPipelines_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - GraphicsPipelineShaderGroupsCreateInfoNV & - setPipelines( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelines_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceStereoCreateInfoNV & setStereoType( VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV stereoType_ ) VULKAN_HPP_NOEXCEPT { - pipelineCount = static_cast( pipelines_.size() ); - pPipelines = pipelines_.data(); + stereoType = stereoType_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkGraphicsPipelineShaderGroupsCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkDisplaySurfaceStereoCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkDisplaySurfaceStereoCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, groupCount, pGroups, pipelineCount, pPipelines ); + return std::tie( sType, pNext, stereoType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const & ) const = default; + auto operator<=>( DisplaySurfaceStereoCreateInfoNV const & ) const = default; #else - bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DisplaySurfaceStereoCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) && - ( pipelineCount == rhs.pipelineCount ) && ( pPipelines == rhs.pPipelines ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stereoType == rhs.stereoType ); # endif } - bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DisplaySurfaceStereoCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; - const void * pNext = {}; - uint32_t groupCount = {}; - const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups = {}; - uint32_t pipelineCount = {}; - const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceStereoCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV stereoType = VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV::eNone; }; template <> - struct CppType + struct CppType { - using Type = GraphicsPipelineShaderGroupsCreateInfoNV; + using Type = DisplaySurfaceStereoCreateInfoNV; }; - struct XYColorEXT + struct DrawIndexedIndirectCommand { - using NativeType = VkXYColorEXT; + using NativeType = VkDrawIndexedIndirectCommand; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = {}, + uint32_t instanceCount_ = {}, + uint32_t firstIndex_ = {}, + int32_t vertexOffset_ = {}, + uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT + : indexCount{ indexCount_ } + , instanceCount{ instanceCount_ } + , firstIndex{ firstIndex_ } + , vertexOffset{ vertexOffset_ } + , firstInstance{ firstInstance_ } { } - VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; - XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT : XYColorEXT( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawIndexedIndirectCommand( *reinterpret_cast( &rhs ) ) + { + } - XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DrawIndexedIndirectCommand & operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT { - x = x_; + indexCount = indexCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT { - y = y_; + instanceCount = instanceCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + firstIndex = firstIndex_; + return *this; } - operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + vertexOffset = vertexOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT + { + firstInstance = firstInstance_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( x, y ); + return std::tie( indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( XYColorEXT const & ) const = default; + auto operator<=>( DrawIndexedIndirectCommand const & ) const = default; #else - bool operator==( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( x == rhs.x ) && ( y == rhs.y ); + return ( indexCount == rhs.indexCount ) && ( instanceCount == rhs.instanceCount ) && ( firstIndex == rhs.firstIndex ) && + ( vertexOffset == rhs.vertexOffset ) && ( firstInstance == rhs.firstInstance ); # endif } - bool operator!=( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - float x = {}; - float y = {}; + uint32_t indexCount = {}; + uint32_t instanceCount = {}; + uint32_t firstIndex = {}; + int32_t vertexOffset = {}; + uint32_t firstInstance = {}; }; - struct HdrMetadataEXT + struct DrawIndirectCommand { - using NativeType = VkHdrMetadataEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, - VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, - float maxLuminance_ = {}, - float minLuminance_ = {}, - float maxContentLightLevel_ = {}, - float maxFrameAverageLightLevel_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , displayPrimaryRed( displayPrimaryRed_ ) - , displayPrimaryGreen( displayPrimaryGreen_ ) - , displayPrimaryBlue( displayPrimaryBlue_ ) - , whitePoint( whitePoint_ ) - , maxLuminance( maxLuminance_ ) - , minLuminance( minLuminance_ ) - , maxContentLightLevel( maxContentLightLevel_ ) - , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ ) - { - } - - VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT : HdrMetadataEXT( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + using NativeType = VkDrawIndirectCommand; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = {}, + uint32_t instanceCount_ = {}, + uint32_t firstVertex_ = {}, + uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexCount{ vertexCount_ } + , instanceCount{ instanceCount_ } + , firstVertex{ firstVertex_ } + , firstInstance{ firstInstance_ } { - pNext = pNext_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT - { - displayPrimaryRed = displayPrimaryRed_; - return *this; - } + VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT + DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT : DrawIndirectCommand( *reinterpret_cast( &rhs ) ) { - displayPrimaryGreen = displayPrimaryGreen_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT - { - displayPrimaryBlue = displayPrimaryBlue_; - return *this; - } + DrawIndirectCommand & operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT + DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT { - whitePoint = whitePoint_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT { - maxLuminance = maxLuminance_; + vertexCount = vertexCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT { - minLuminance = minLuminance_; + instanceCount = instanceCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT { - maxContentLightLevel = maxContentLightLevel_; + firstVertex = firstVertex_; return *this; } - VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT { - maxFrameAverageLightLevel = maxFrameAverageLightLevel_; + firstInstance = firstInstance_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT + operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - displayPrimaryRed, - displayPrimaryGreen, - displayPrimaryBlue, - whitePoint, - maxLuminance, - minLuminance, - maxContentLightLevel, - maxFrameAverageLightLevel ); + return std::tie( vertexCount, instanceCount, firstVertex, firstInstance ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( HdrMetadataEXT const & ) const = default; + auto operator<=>( DrawIndirectCommand const & ) const = default; #else - bool operator==( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPrimaryRed == rhs.displayPrimaryRed ) && - ( displayPrimaryGreen == rhs.displayPrimaryGreen ) && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) && ( whitePoint == rhs.whitePoint ) && - ( maxLuminance == rhs.maxLuminance ) && ( minLuminance == rhs.minLuminance ) && ( maxContentLightLevel == rhs.maxContentLightLevel ) && - ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel ); + return ( vertexCount == rhs.vertexCount ) && ( instanceCount == rhs.instanceCount ) && ( firstVertex == rhs.firstVertex ) && + ( firstInstance == rhs.firstInstance ); # endif } - bool operator!=( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {}; - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {}; - VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {}; - VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {}; - float maxLuminance = {}; - float minLuminance = {}; - float maxContentLightLevel = {}; - float maxFrameAverageLightLevel = {}; + uint32_t vertexCount = {}; + uint32_t instanceCount = {}; + uint32_t firstVertex = {}; + uint32_t firstInstance = {}; }; - template <> - struct CppType + struct DrawIndirectCountIndirectCommandEXT { - using Type = HdrMetadataEXT; - }; + using NativeType = VkDrawIndirectCountIndirectCommandEXT; - struct HeadlessSurfaceCreateInfoEXT - { - using NativeType = VkHeadlessSurfaceCreateInfoEXT; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawIndirectCountIndirectCommandEXT( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t stride_ = {}, + uint32_t commandCount_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferAddress{ bufferAddress_ } + , stride{ stride_ } + , commandCount{ commandCount_ } + { + } - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT; + VULKAN_HPP_CONSTEXPR DrawIndirectCountIndirectCommandEXT( DrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) + DrawIndirectCountIndirectCommandEXT( VkDrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawIndirectCountIndirectCommandEXT( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DrawIndirectCountIndirectCommandEXT & operator=( DrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : HeadlessSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + DrawIndirectCountIndirectCommandEXT & operator=( VkDrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + bufferAddress = bufferAddress_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + stride = stride_; return *this; } - VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setCommandCount( uint32_t commandCount_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + commandCount = commandCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkDrawIndirectCountIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkDrawIndirectCountIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags ); + return std::tie( bufferAddress, stride, commandCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( HeadlessSurfaceCreateInfoEXT const & ) const = default; + auto operator<=>( DrawIndirectCountIndirectCommandEXT const & ) const = default; #else - bool operator==( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DrawIndirectCountIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + return ( bufferAddress == rhs.bufferAddress ) && ( stride == rhs.stride ) && ( commandCount == rhs.commandCount ); # endif } - bool operator!=( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DrawIndirectCountIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {}; - }; - - template <> - struct CppType - { - using Type = HeadlessSurfaceCreateInfoEXT; + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t stride = {}; + uint32_t commandCount = {}; }; -#if defined( VK_USE_PLATFORM_IOS_MVK ) - struct IOSSurfaceCreateInfoMVK + struct DrawMeshTasksIndirectCommandEXT { - using NativeType = VkIOSSurfaceCreateInfoMVK; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK; + using NativeType = VkDrawMeshTasksIndirectCommandEXT; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, - const void * pView_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pView( pView_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DrawMeshTasksIndirectCommandEXT( uint32_t groupCountX_ = {}, uint32_t groupCountY_ = {}, uint32_t groupCountZ_ = {} ) VULKAN_HPP_NOEXCEPT + : groupCountX{ groupCountX_ } + , groupCountY{ groupCountY_ } + , groupCountZ{ groupCountZ_ } { } - VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandEXT( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT - : IOSSurfaceCreateInfoMVK( *reinterpret_cast( &rhs ) ) + DrawMeshTasksIndirectCommandEXT( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawMeshTasksIndirectCommandEXT( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - IOSSurfaceCreateInfoMVK & operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DrawMeshTasksIndirectCommandEXT & operator=( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + DrawMeshTasksIndirectCommandEXT & operator=( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountX( uint32_t groupCountX_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + groupCountX = groupCountX_; return *this; } - VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountY( uint32_t groupCountY_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + groupCountY = groupCountY_; return *this; } - VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountZ( uint32_t groupCountZ_ ) VULKAN_HPP_NOEXCEPT { - pView = pView_; + groupCountZ = groupCountZ_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkIOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT + operator VkDrawMeshTasksIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT + operator VkDrawMeshTasksIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, pView ); + return std::tie( groupCountX, groupCountY, groupCountZ ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IOSSurfaceCreateInfoMVK const & ) const = default; -# else - bool operator==( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawMeshTasksIndirectCommandEXT const & ) const = default; +#else + bool operator==( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); -# endif +# else + return ( groupCountX == rhs.groupCountX ) && ( groupCountY == rhs.groupCountY ) && ( groupCountZ == rhs.groupCountZ ); +# endif } - bool operator!=( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {}; - const void * pView = {}; - }; - - template <> - struct CppType - { - using Type = IOSSurfaceCreateInfoMVK; + uint32_t groupCountX = {}; + uint32_t groupCountY = {}; + uint32_t groupCountZ = {}; }; -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - struct ImageBlit + struct DrawMeshTasksIndirectCommandNV { - using NativeType = VkImageBlit; + using NativeType = VkDrawMeshTasksIndirectCommandNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, - std::array const & srcOffsets_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, - std::array const & dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubresource( srcSubresource_ ) - , srcOffsets( srcOffsets_ ) - , dstSubresource( dstSubresource_ ) - , dstOffsets( dstOffsets_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {}, uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT + : taskCount{ taskCount_ } + , firstTask{ firstTask_ } { } - VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT + DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawMeshTasksIndirectCommandNV( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT - { - srcSubresource = srcSubresource_; - return *this; - } + DrawMeshTasksIndirectCommandNV & operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcOffsets( std::array const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT + DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { - srcOffsets = srcOffsets_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT { - dstSubresource = dstSubresource_; + taskCount = taskCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstOffsets( std::array const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT { - dstOffsets = dstOffsets_; + firstTask = firstTask_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT + operator VkDrawMeshTasksIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageBlit &() VULKAN_HPP_NOEXCEPT + operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( srcSubresource, srcOffsets, dstSubresource, dstOffsets ); + return std::tie( taskCount, firstTask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageBlit const & ) const = default; + auto operator<=>( DrawMeshTasksIndirectCommandNV const & ) const = default; #else - bool operator==( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) && ( dstSubresource == rhs.dstSubresource ) && - ( dstOffsets == rhs.dstOffsets ); + return ( taskCount == rhs.taskCount ) && ( firstTask == rhs.firstTask ); # endif } - bool operator!=( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; + uint32_t taskCount = {}; + uint32_t firstTask = {}; }; - struct ImageCompressionControlEXT + struct DrmFormatModifierProperties2EXT { - using NativeType = VkImageCompressionControlEXT; + using NativeType = VkDrmFormatModifierProperties2EXT; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCompressionControlEXT; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier{ drmFormatModifier_ } + , drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ } + , drmFormatModifierTilingFeatures{ drmFormatModifierTilingFeatures_ } + { + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_ = {}, - uint32_t compressionControlPlaneCount_ = {}, - VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , compressionControlPlaneCount( compressionControlPlaneCount_ ) - , pFixedRateFlags( pFixedRateFlags_ ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierProperties2EXT( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierProperties2EXT( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DrmFormatModifierProperties2EXT & operator=( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageCompressionControlEXT( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageCompressionControlEXT( *reinterpret_cast( &rhs ) ) + DrmFormatModifierProperties2EXT & operator=( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ImageCompressionControlEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fixedRateFlags_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , compressionControlPlaneCount( static_cast( fixedRateFlags_.size() ) ) - , pFixedRateFlags( fixedRateFlags_.data() ) + operator VkDrmFormatModifierProperties2EXT const &() const VULKAN_HPP_NOEXCEPT { + return *reinterpret_cast( this ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageCompressionControlEXT & operator=( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + operator VkDrmFormatModifierProperties2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } - ImageCompressionControlEXT & operator=( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); - return *this; + return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures ); } +#endif -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierProperties2EXT const & ) const = default; +#else + bool operator==( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( drmFormatModifier == rhs.drmFormatModifier ) && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && + ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); +# endif } - VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setFlags( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - flags = flags_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setCompressionControlPlaneCount( uint32_t compressionControlPlaneCount_ ) VULKAN_HPP_NOEXCEPT + public: + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures = {}; + }; + + struct DrmFormatModifierPropertiesEXT + { + using NativeType = VkDrmFormatModifierPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier{ drmFormatModifier_ } + , drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ } + , drmFormatModifierTilingFeatures{ drmFormatModifierTilingFeatures_ } { - compressionControlPlaneCount = compressionControlPlaneCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & - setPFixedRateFlags( VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierPropertiesEXT( *reinterpret_cast( &rhs ) ) { - pFixedRateFlags = pFixedRateFlags_; - return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ImageCompressionControlEXT & setFixedRateFlags( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fixedRateFlags_ ) VULKAN_HPP_NOEXCEPT + DrmFormatModifierPropertiesEXT & operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - compressionControlPlaneCount = static_cast( fixedRateFlags_.size() ); - pFixedRateFlags = fixedRateFlags_.data(); + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImageCompressionControlEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageCompressionControlEXT &() VULKAN_HPP_NOEXCEPT + operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, compressionControlPlaneCount, pFixedRateFlags ); + return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageCompressionControlEXT const & ) const = default; + auto operator<=>( DrmFormatModifierPropertiesEXT const & ) const = default; #else - bool operator==( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && - ( compressionControlPlaneCount == rhs.compressionControlPlaneCount ) && ( pFixedRateFlags == rhs.pFixedRateFlags ); + return ( drmFormatModifier == rhs.drmFormatModifier ) && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && + ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); # endif } - bool operator!=( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCompressionControlEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags = {}; - uint32_t compressionControlPlaneCount = {}; - VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags = {}; - }; - - template <> - struct CppType - { - using Type = ImageCompressionControlEXT; + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {}; }; - struct ImageCompressionPropertiesEXT + struct DrmFormatModifierPropertiesList2EXT { - using NativeType = VkImageCompressionPropertiesEXT; + using NativeType = VkDrmFormatModifierPropertiesList2EXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCompressionPropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesList2EXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT imageCompressionFlags_ = {}, - VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageCompressionFlags( imageCompressionFlags_ ) - , imageCompressionFixedRateFlags( imageCompressionFixedRateFlags_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( uint32_t drmFormatModifierCount_ = {}, + VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , drmFormatModifierCount{ drmFormatModifierCount_ } + , pDrmFormatModifierProperties{ pDrmFormatModifierProperties_ } { } - VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageCompressionPropertiesEXT( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageCompressionPropertiesEXT( *reinterpret_cast( &rhs ) ) + DrmFormatModifierPropertiesList2EXT( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierPropertiesList2EXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageCompressionPropertiesEXT & operator=( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DrmFormatModifierPropertiesList2EXT & operator=( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageCompressionPropertiesEXT & operator=( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + DrmFormatModifierPropertiesList2EXT & operator=( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkImageCompressionPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkDrmFormatModifierPropertiesList2EXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageCompressionPropertiesEXT &() VULKAN_HPP_NOEXCEPT + operator VkDrmFormatModifierPropertiesList2EXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, imageCompressionFlags, imageCompressionFixedRateFlags ); + return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageCompressionPropertiesEXT const & ) const = default; + auto operator<=>( DrmFormatModifierPropertiesList2EXT const & ) const = default; #else - bool operator==( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionFlags == rhs.imageCompressionFlags ) && - ( imageCompressionFixedRateFlags == rhs.imageCompressionFixedRateFlags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); # endif } - bool operator!=( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCompressionPropertiesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT imageCompressionFlags = {}; - VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesList2EXT; + void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageCompressionPropertiesEXT; + using Type = DrmFormatModifierPropertiesList2EXT; }; -#if defined( VK_USE_PLATFORM_FUCHSIA ) - struct ImageFormatConstraintsInfoFUCHSIA + struct DrmFormatModifierPropertiesListEXT { - using NativeType = VkImageFormatConstraintsInfoFUCHSIA; + using NativeType = VkDrmFormatModifierPropertiesListEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatConstraintsInfoFUCHSIA; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesListEXT; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_ = {}, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, - VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ = {}, - uint64_t sysmemPixelFormat_ = {}, - uint32_t colorSpaceCount_ = {}, - const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageCreateInfo( imageCreateInfo_ ) - , requiredFormatFeatures( requiredFormatFeatures_ ) - , flags( flags_ ) - , sysmemPixelFormat( sysmemPixelFormat_ ) - , colorSpaceCount( colorSpaceCount_ ) - , pColorSpaces( pColorSpaces_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = {}, + VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , drmFormatModifierCount{ drmFormatModifierCount_ } + , pDrmFormatModifierProperties{ pDrmFormatModifierProperties_ } { } - VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageFormatConstraintsInfoFUCHSIA( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageFormatConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrmFormatModifierPropertiesListEXT( *reinterpret_cast( &rhs ) ) { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ImageFormatConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_, - VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_, - VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_, - uint64_t sysmemPixelFormat_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorSpaces_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , imageCreateInfo( imageCreateInfo_ ) - , requiredFormatFeatures( requiredFormatFeatures_ ) - , flags( flags_ ) - , sysmemPixelFormat( sysmemPixelFormat_ ) - , colorSpaceCount( static_cast( colorSpaces_.size() ) ) - , pColorSpaces( colorSpaces_.data() ) + DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + DrmFormatModifierPropertiesListEXT & operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ImageFormatConstraintsInfoFUCHSIA & operator=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageFormatConstraintsInfoFUCHSIA & operator=( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); - return *this; + return *reinterpret_cast( this ); } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & - setImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & imageCreateInfo_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - imageCreateInfo = imageCreateInfo_; - return *this; + return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties ); } +#endif - VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & - setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrmFormatModifierPropertiesListEXT const & ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - requiredFormatFeatures = requiredFormatFeatures_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); +# endif } - VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - flags = flags_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT; + void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties = {}; + }; + + template <> + struct CppType + { + using Type = DrmFormatModifierPropertiesListEXT; + }; + + struct EventCreateInfo + { + using NativeType = VkEventCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR EventCreateInfo( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } { - sysmemPixelFormat = sysmemPixelFormat_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setColorSpaceCount( uint32_t colorSpaceCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : EventCreateInfo( *reinterpret_cast( &rhs ) ) {} + + EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - colorSpaceCount = colorSpaceCount_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & - setPColorSpaces( const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pColorSpaces = pColorSpaces_; + pNext = pNext_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ImageFormatConstraintsInfoFUCHSIA & setColorSpaces( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorSpaces_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - colorSpaceCount = static_cast( colorSpaces_.size() ); - pColorSpaces = colorSpaces_.data(); + flags = flags_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImageFormatConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageFormatConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, imageCreateInfo, requiredFormatFeatures, flags, sysmemPixelFormat, colorSpaceCount, pColorSpaces ); + return std::tie( sType, pNext, flags ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageFormatConstraintsInfoFUCHSIA const & ) const = default; -# else - bool operator==( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( EventCreateInfo const & ) const = default; +#else + bool operator==( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCreateInfo == rhs.imageCreateInfo ) && - ( requiredFormatFeatures == rhs.requiredFormatFeatures ) && ( flags == rhs.flags ) && ( sysmemPixelFormat == rhs.sysmemPixelFormat ) && - ( colorSpaceCount == rhs.colorSpaceCount ) && ( pColorSpaces == rhs.pColorSpaces ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif } - bool operator!=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatConstraintsInfoFUCHSIA; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo = {}; - VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {}; - VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags = {}; - uint64_t sysmemPixelFormat = {}; - uint32_t colorSpaceCount = {}; - const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageFormatConstraintsInfoFUCHSIA; + using Type = EventCreateInfo; }; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ -#if defined( VK_USE_PLATFORM_FUCHSIA ) - struct ImageConstraintsInfoFUCHSIA + struct PipelineLibraryCreateInfoKHR { - using NativeType = VkImageConstraintsInfoFUCHSIA; + using NativeType = VkPipelineLibraryCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageConstraintsInfoFUCHSIA; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( uint32_t formatConstraintsCount_ = {}, - const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ = {}, - VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, - VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , formatConstraintsCount( formatConstraintsCount_ ) - , pFormatConstraints( pFormatConstraints_ ) - , bufferCollectionConstraints( bufferCollectionConstraints_ ) - , flags( flags_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {}, + const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , libraryCount{ libraryCount_ } + , pLibraries{ pLibraries_ } { } - VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageConstraintsInfoFUCHSIA( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineLibraryCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ImageConstraintsInfoFUCHSIA( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & formatConstraints_, - VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, - VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , formatConstraintsCount( static_cast( formatConstraints_.size() ) ) - , pFormatConstraints( formatConstraints_.data() ) - , bufferCollectionConstraints( bufferCollectionConstraints_ ) - , flags( flags_ ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLibraryCreateInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), libraryCount( static_cast( libraries_.size() ) ), pLibraries( libraries_.data() ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - ImageConstraintsInfoFUCHSIA & operator=( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageConstraintsInfoFUCHSIA & operator=( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFormatConstraintsCount( uint32_t formatConstraintsCount_ ) VULKAN_HPP_NOEXCEPT - { - formatConstraintsCount = formatConstraintsCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & - setPFormatConstraints( const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ ) VULKAN_HPP_NOEXCEPT - { - pFormatConstraints = pFormatConstraints_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ImageConstraintsInfoFUCHSIA & setFormatConstraints( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & formatConstraints_ ) - VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT { - formatConstraintsCount = static_cast( formatConstraints_.size() ); - pFormatConstraints = formatConstraints_.data(); + libraryCount = libraryCount_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & - setBufferCollectionConstraints( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT { - bufferCollectionConstraints = bufferCollectionConstraints_; + pLibraries = pLibraries_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLibraryCreateInfoKHR & + setLibraries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + libraryCount = static_cast( libraries_.size() ); + pLibraries = libraries_.data(); return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImageConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, formatConstraintsCount, pFormatConstraints, bufferCollectionConstraints, flags ); + return std::tie( sType, pNext, libraryCount, pLibraries ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageConstraintsInfoFUCHSIA const & ) const = default; -# else - bool operator==( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatConstraintsCount == rhs.formatConstraintsCount ) && - ( pFormatConstraints == rhs.pFormatConstraints ) && ( bufferCollectionConstraints == rhs.bufferCollectionConstraints ) && ( flags == rhs.flags ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( libraryCount == rhs.libraryCount ) && ( pLibraries == rhs.pLibraries ); +# endif } - bool operator!=( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageConstraintsInfoFUCHSIA; - const void * pNext = {}; - uint32_t formatConstraintsCount = {}; - const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints = {}; - VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {}; - VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR; + const void * pNext = {}; + uint32_t libraryCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageConstraintsInfoFUCHSIA; + using Type = PipelineLibraryCreateInfoKHR; }; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct ImageCopy +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct ExecutionGraphPipelineCreateInfoAMDX { - using NativeType = VkImageCopy; + using NativeType = VkExecutionGraphPipelineCreateInfoAMDX; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageCopy( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubresource( srcSubresource_ ) - , srcOffset( srcOffset_ ) - , dstSubresource( dstSubresource_ ) - , dstOffset( dstOffset_ ) - , extent( extent_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExecutionGraphPipelineCreateInfoAMDX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineCreateInfoAMDX( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , stageCount{ stageCount_ } + , pStages{ pStages_ } + , pLibraryInfo{ pLibraryInfo_ } + , layout{ layout_ } + , basePipelineHandle{ basePipelineHandle_ } + , basePipelineIndex{ basePipelineIndex_ } { } - VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineCreateInfoAMDX( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + ExecutionGraphPipelineCreateInfoAMDX( VkExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : ExecutionGraphPipelineCreateInfoAMDX( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ExecutionGraphPipelineCreateInfoAMDX( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , pLibraryInfo( pLibraryInfo_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) { - srcSubresource = srcSubresource_; - return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT - { - srcOffset = srcOffset_; - return *this; - } + ExecutionGraphPipelineCreateInfoAMDX & operator=( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + ExecutionGraphPipelineCreateInfoAMDX & operator=( VkExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT { - dstSubresource = dstSubresource_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - dstOffset = dstOffset_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - extent = extent_; + flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + stageCount = stageCount_; + return *this; } - operator VkImageCopy &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pStages = pStages_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ExecutionGraphPipelineCreateInfoAMDX & + setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; } -#endif +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageCopy const & ) const = default; -#else - bool operator==( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & + setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && - ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); -# endif + pLibraryInfo = pLibraryInfo_; + return *this; } - bool operator!=( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + layout = layout_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; - VULKAN_HPP_NAMESPACE::Extent3D extent = {}; - }; - - struct SubresourceLayout - { - using NativeType = VkSubresourceLayout; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubresourceLayout( VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT - : offset( offset_ ) - , size( size_ ) - , rowPitch( rowPitch_ ) - , arrayPitch( arrayPitch_ ) - , depthPitch( depthPitch_ ) + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT { + basePipelineHandle = basePipelineHandle_; + return *this; } - VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT : SubresourceLayout( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - SubresourceLayout & operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + basePipelineIndex = basePipelineIndex_; return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT + operator VkExecutionGraphPipelineCreateInfoAMDX const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT + operator VkExecutionGraphPipelineCreateInfoAMDX &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( offset, size, rowPitch, arrayPitch, depthPitch ); + return std::tie( sType, pNext, flags, stageCount, pStages, pLibraryInfo, layout, basePipelineHandle, basePipelineIndex ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubresourceLayout const & ) const = default; -#else - bool operator==( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExecutionGraphPipelineCreateInfoAMDX const & ) const = default; +# else + bool operator==( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( offset == rhs.offset ) && ( size == rhs.size ) && ( rowPitch == rhs.rowPitch ) && ( arrayPitch == rhs.arrayPitch ) && - ( depthPitch == rhs.depthPitch ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( pLibraryInfo == rhs.pLibraryInfo ) && ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) && + ( basePipelineIndex == rhs.basePipelineIndex ); +# endif } - bool operator!=( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {}; - VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {}; - VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExecutionGraphPipelineCreateInfoAMDX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; }; - struct ImageDrmFormatModifierExplicitCreateInfoEXT + template <> + struct CppType { - using NativeType = VkImageDrmFormatModifierExplicitCreateInfoEXT; + using Type = ExecutionGraphPipelineCreateInfoAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct ExecutionGraphPipelineScratchSizeAMDX + { + using NativeType = VkExecutionGraphPipelineScratchSizeAMDX; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExecutionGraphPipelineScratchSizeAMDX; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = {}, - uint32_t drmFormatModifierPlaneCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , drmFormatModifier( drmFormatModifier_ ) - , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) - , pPlaneLayouts( pPlaneLayouts_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::DeviceSize minSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sizeGranularity_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minSize{ minSize_ } + , maxSize{ maxSize_ } + , sizeGranularity{ sizeGranularity_ } { } - VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageDrmFormatModifierExplicitCreateInfoEXT( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ImageDrmFormatModifierExplicitCreateInfoEXT( - uint64_t drmFormatModifier_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & planeLayouts_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , drmFormatModifier( drmFormatModifier_ ) - , drmFormatModifierPlaneCount( static_cast( planeLayouts_.size() ) ) - , pPlaneLayouts( planeLayouts_.data() ) + ExecutionGraphPipelineScratchSizeAMDX( VkExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : ExecutionGraphPipelineScratchSizeAMDX( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExecutionGraphPipelineScratchSizeAMDX & operator=( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ExecutionGraphPipelineScratchSizeAMDX & operator=( VkExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT - { - drmFormatModifier = drmFormatModifier_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & - setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setMinSize( VULKAN_HPP_NAMESPACE::DeviceSize minSize_ ) VULKAN_HPP_NOEXCEPT { - drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_; + minSize = minSize_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & - setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setMaxSize( VULKAN_HPP_NAMESPACE::DeviceSize maxSize_ ) VULKAN_HPP_NOEXCEPT { - pPlaneLayouts = pPlaneLayouts_; + maxSize = maxSize_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ImageDrmFormatModifierExplicitCreateInfoEXT & - setPlaneLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & planeLayouts_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setSizeGranularity( VULKAN_HPP_NAMESPACE::DeviceSize sizeGranularity_ ) VULKAN_HPP_NOEXCEPT { - drmFormatModifierPlaneCount = static_cast( planeLayouts_.size() ); - pPlaneLayouts = planeLayouts_.data(); + sizeGranularity = sizeGranularity_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkExecutionGraphPipelineScratchSizeAMDX const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkExecutionGraphPipelineScratchSizeAMDX &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + void * const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, drmFormatModifier, drmFormatModifierPlaneCount, pPlaneLayouts ); + return std::tie( sType, pNext, minSize, maxSize, sizeGranularity ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const & ) const = default; -#else - bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExecutionGraphPipelineScratchSizeAMDX const & ) const = default; +# else + bool operator==( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && - ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && ( pPlaneLayouts == rhs.pPlaneLayouts ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSize == rhs.minSize ) && ( maxSize == rhs.maxSize ) && + ( sizeGranularity == rhs.sizeGranularity ); +# endif } - bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; - const void * pNext = {}; - uint64_t drmFormatModifier = {}; - uint32_t drmFormatModifierPlaneCount = {}; - const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExecutionGraphPipelineScratchSizeAMDX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sizeGranularity = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageDrmFormatModifierExplicitCreateInfoEXT; + using Type = ExecutionGraphPipelineScratchSizeAMDX; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct ImageDrmFormatModifierListCreateInfoEXT + struct ExportFenceCreateInfo { - using NativeType = VkImageDrmFormatModifierListCreateInfoEXT; + using NativeType = VkExportFenceCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {}, - const uint64_t * pDrmFormatModifiers_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , drmFormatModifierCount( drmFormatModifierCount_ ) - , pDrmFormatModifiers( pDrmFormatModifiers_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } { } - VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageDrmFormatModifierListCreateInfoEXT( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ImageDrmFormatModifierListCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifiers_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), drmFormatModifierCount( static_cast( drmFormatModifiers_.size() ) ), pDrmFormatModifiers( drmFormatModifiers_.data() ) + ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportFenceCreateInfo( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageDrmFormatModifierListCreateInfoEXT & operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT - { - drmFormatModifierCount = drmFormatModifierCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT - { - pDrmFormatModifiers = pDrmFormatModifiers_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ImageDrmFormatModifierListCreateInfoEXT & - setDrmFormatModifiers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT { - drmFormatModifierCount = static_cast( drmFormatModifiers_.size() ); - pDrmFormatModifiers = drmFormatModifiers_.data(); + handleTypes = handleTypes_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifiers ); + return std::tie( sType, pNext, handleTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const & ) const = default; + auto operator<=>( ExportFenceCreateInfo const & ) const = default; #else - bool operator==( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && - ( pDrmFormatModifiers == rhs.pDrmFormatModifiers ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); # endif } - bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; - const void * pNext = {}; - uint32_t drmFormatModifierCount = {}; - const uint64_t * pDrmFormatModifiers = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageDrmFormatModifierListCreateInfoEXT; + using Type = ExportFenceCreateInfo; }; - struct ImageDrmFormatModifierPropertiesEXT + using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportFenceWin32HandleInfoKHR { - using NativeType = VkImageDrmFormatModifierPropertiesEXT; + using NativeType = VkExportFenceWin32HandleInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierPropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , drmFormatModifier( drmFormatModifier_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pAttributes{ pAttributes_ } + , dwAccess{ dwAccess_ } + , name{ name_ } { } - VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageDrmFormatModifierPropertiesEXT( *reinterpret_cast( &rhs ) ) + ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportFenceWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExportFenceWin32HandleInfoKHR & operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageDrmFormatModifierPropertiesEXT & operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pAttributes = pAttributes_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, drmFormatModifier ); + return std::tie( sType, pNext, pAttributes, dwAccess, name ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageDrmFormatModifierPropertiesEXT const & ) const = default; -#else - bool operator==( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportFenceWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); +# endif } - bool operator!=( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT; - void * pNext = {}; - uint64_t drmFormatModifier = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageDrmFormatModifierPropertiesEXT; + using Type = ExportFenceWin32HandleInfoKHR; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ImageFormatListCreateInfo + struct ExportMemoryAllocateInfo { - using NativeType = VkImageFormatListCreateInfo; + using NativeType = VkExportMemoryAllocateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {}, - const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , viewFormatCount( viewFormatCount_ ) - , pViewFormats( pViewFormats_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } { } - VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageFormatListCreateInfo( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ImageFormatListCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), viewFormatCount( static_cast( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() ) + ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryAllocateInfo( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageFormatListCreateInfo & operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT - { - viewFormatCount = viewFormatCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT - { - pViewFormats = pViewFormats_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ImageFormatListCreateInfo & - setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT { - viewFormatCount = static_cast( viewFormats_.size() ); - pViewFormats = viewFormats_.data(); + handleTypes = handleTypes_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, viewFormatCount, pViewFormats ); + return std::tie( sType, pNext, handleTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageFormatListCreateInfo const & ) const = default; + auto operator<=>( ExportMemoryAllocateInfo const & ) const = default; #else - bool operator==( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); # endif } - bool operator!=( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo; - const void * pNext = {}; - uint32_t viewFormatCount = {}; - const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageFormatListCreateInfo; + using Type = ExportMemoryAllocateInfo; }; - using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo; - struct ImageFormatProperties2 + using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; + + struct ExportMemoryAllocateInfoNV { - using NativeType = VkImageFormatProperties2; + using NativeType = VkExportMemoryAllocateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageFormatProperties2( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageFormatProperties( imageFormatProperties_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } { } - VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageFormatProperties2( *reinterpret_cast( &rhs ) ) + ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryAllocateInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handleTypes = handleTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, imageFormatProperties ); + return std::tie( sType, pNext, handleTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageFormatProperties2 const & ) const = default; + auto operator<=>( ExportMemoryAllocateInfoNV const & ) const = default; #else - bool operator==( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFormatProperties == rhs.imageFormatProperties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); # endif } - bool operator!=( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageFormatProperties2; + using Type = ExportMemoryAllocateInfoNV; }; - using ImageFormatProperties2KHR = ImageFormatProperties2; - struct ImageMemoryBarrier +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportMemoryWin32HandleInfoKHR { - using NativeType = VkImageMemoryBarrier; + using NativeType = VkExportMemoryWin32HandleInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - uint32_t srcQueueFamilyIndex_ = {}, - uint32_t dstQueueFamilyIndex_ = {}, - VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , oldLayout( oldLayout_ ) - , newLayout( newLayout_ ) - , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) - , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) - , image( image_ ) - , subresourceRange( subresourceRange_ ) - { - } - - VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pAttributes{ pAttributes_ } + , dwAccess{ dwAccess_ } + , name{ name_ } { - pNext = pNext_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT - { - srcAccessMask = srcAccessMask_; - return *this; - } + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) { - dstAccessMask = dstAccessMask_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT - { - oldLayout = oldLayout_; - return *this; - } + ExportMemoryWin32HandleInfoKHR & operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT + ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - newLayout = newLayout_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - srcQueueFamilyIndex = srcQueueFamilyIndex_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT { - dstQueueFamilyIndex = dstQueueFamilyIndex_; + pAttributes = pAttributes_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT { - image = image_; + dwAccess = dwAccess_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & - setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT { - subresourceRange = subresourceRange_; + name = name_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT + operator VkExportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT + operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, srcAccessMask, dstAccessMask, oldLayout, newLayout, srcQueueFamilyIndex, dstQueueFamilyIndex, image, subresourceRange ); + return std::tie( sType, pNext, pAttributes, dwAccess, name ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageMemoryBarrier const & ) const = default; -#else - bool operator==( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) && - ( oldLayout == rhs.oldLayout ) && ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && - ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) && ( subresourceRange == rhs.subresourceRange ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); +# endif } - bool operator!=( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; - VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - uint32_t srcQueueFamilyIndex = {}; - uint32_t dstQueueFamilyIndex = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageMemoryBarrier; + using Type = ExportMemoryWin32HandleInfoKHR; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ImageMemoryRequirementsInfo2 +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportMemoryWin32HandleInfoNV { - using NativeType = VkImageMemoryRequirementsInfo2; + using NativeType = VkExportMemoryWin32HandleInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image( image_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pAttributes{ pAttributes_ } + , dwAccess{ dwAccess_ } { } - VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) + ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMemoryWin32HandleInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExportMemoryWin32HandleInfoNV & operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT { - image = image_; + pAttributes = pAttributes_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dwAccess = dwAccess_; + return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + operator VkExportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, image ); + return std::tie( sType, pNext, pAttributes, dwAccess ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageMemoryRequirementsInfo2 const & ) const = default; -#else - bool operator==( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMemoryWin32HandleInfoNV const & ) const = default; +# else + bool operator==( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ); +# endif } - bool operator!=( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageMemoryRequirementsInfo2; + using Type = ExportMemoryWin32HandleInfoNV; }; - using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#if defined( VK_USE_PLATFORM_FUCHSIA ) - struct ImagePipeSurfaceCreateInfoFUCHSIA +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ExportMetalBufferInfoEXT { - using NativeType = VkImagePipeSurfaceCreateInfoFUCHSIA; + using NativeType = VkExportMetalBufferInfoEXT; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalBufferInfoEXT; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, - zx_handle_t imagePipeHandle_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , imagePipeHandle( imagePipeHandle_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + MTLBuffer_id mtlBuffer_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } + , mtlBuffer{ mtlBuffer_ } { } - VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT - : ImagePipeSurfaceCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + ExportMetalBufferInfoEXT( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalBufferInfoEXT( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImagePipeSurfaceCreateInfoFUCHSIA & operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExportMetalBufferInfoEXT & operator=( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + ExportMetalBufferInfoEXT & operator=( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + memory = memory_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) VULKAN_HPP_NOEXCEPT { - imagePipeHandle = imagePipeHandle_; + mtlBuffer = mtlBuffer_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImagePipeSurfaceCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + operator VkExportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + operator VkExportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, imagePipeHandle ); + return std::tie( sType, pNext, memory, mtlBuffer ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT - { - if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) - return cmp; - if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) - return cmp; - if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) - return cmp; - if ( auto cmp = memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ); cmp != 0 ) - return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - - return std::strong_ordering::equivalent; - } -# endif - - bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + auto operator<=>( ExportMetalBufferInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && - ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 ); +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( mtlBuffer == rhs.mtlBuffer ); +# endif } - bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {}; - zx_handle_t imagePipeHandle = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalBufferInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + MTLBuffer_id mtlBuffer = {}; }; template <> - struct CppType + struct CppType { - using Type = ImagePipeSurfaceCreateInfoFUCHSIA; + using Type = ExportMetalBufferInfoEXT; }; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct ImagePlaneMemoryRequirementsInfo +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ExportMetalCommandQueueInfoEXT { - using NativeType = VkImagePlaneMemoryRequirementsInfo; + using NativeType = VkExportMetalCommandQueueInfoEXT; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo; + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalCommandQueueInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , planeAspect( planeAspect_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT( VULKAN_HPP_NAMESPACE::Queue queue_ = {}, + MTLCommandQueue_id mtlCommandQueue_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , queue{ queue_ } + , mtlCommandQueue{ mtlCommandQueue_ } { } - VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : ImagePlaneMemoryRequirementsInfo( *reinterpret_cast( &rhs ) ) + ExportMetalCommandQueueInfoEXT( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalCommandQueueInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExportMetalCommandQueueInfoEXT & operator=( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ExportMetalCommandQueueInfoEXT & operator=( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setQueue( VULKAN_HPP_NAMESPACE::Queue queue_ ) VULKAN_HPP_NOEXCEPT { - planeAspect = planeAspect_; + queue = queue_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setMtlCommandQueue( MTLCommandQueue_id mtlCommandQueue_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + mtlCommandQueue = mtlCommandQueue_; + return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT + operator VkExportMetalCommandQueueInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkExportMetalCommandQueueInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, planeAspect ); + return std::tie( sType, pNext, queue, mtlCommandQueue ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImagePlaneMemoryRequirementsInfo const & ) const = default; -#else - bool operator==( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalCommandQueueInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queue == rhs.queue ) && ( mtlCommandQueue == rhs.mtlCommandQueue ); +# endif } - bool operator!=( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalCommandQueueInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Queue queue = {}; + MTLCommandQueue_id mtlCommandQueue = {}; }; template <> - struct CppType + struct CppType { - using Type = ImagePlaneMemoryRequirementsInfo; + using Type = ExportMetalCommandQueueInfoEXT; }; - using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct ImageResolve +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ExportMetalDeviceInfoEXT { - using NativeType = VkImageResolve; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageResolve( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubresource( srcSubresource_ ) - , srcOffset( srcOffset_ ) - , dstSubresource( dstSubresource_ ) - , dstOffset( dstOffset_ ) - , extent( extent_ ) - { - } - - VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT : ImageResolve( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + using NativeType = VkExportMetalDeviceInfoEXT; - ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalDeviceInfoEXT; - ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT( MTLDevice_id mtlDevice_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mtlDevice{ mtlDevice_ } { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT - { - srcSubresource = srcSubresource_; - return *this; - } + VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + ExportMetalDeviceInfoEXT( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalDeviceInfoEXT( *reinterpret_cast( &rhs ) ) { - srcOffset = srcOffset_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + ExportMetalDeviceInfoEXT & operator=( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportMetalDeviceInfoEXT & operator=( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - dstSubresource = dstSubresource_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - dstOffset = dstOffset_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setMtlDevice( MTLDevice_id mtlDevice_ ) VULKAN_HPP_NOEXCEPT { - extent = extent_; + mtlDevice = mtlDevice_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT + operator VkExportMetalDeviceInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageResolve &() VULKAN_HPP_NOEXCEPT + operator VkExportMetalDeviceInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); + return std::tie( sType, pNext, mtlDevice ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageResolve const & ) const = default; -#else - bool operator==( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalDeviceInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && - ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlDevice == rhs.mtlDevice ); +# endif } - bool operator!=( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; - VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalDeviceInfoEXT; + const void * pNext = {}; + MTLDevice_id mtlDevice = {}; }; - struct ImageResolve2 + template <> + struct CppType { - using NativeType = VkImageResolve2; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageResolve2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcSubresource( srcSubresource_ ) - , srcOffset( srcOffset_ ) - , dstSubresource( dstSubresource_ ) - , dstOffset( dstOffset_ ) - , extent( extent_ ) - { - } - - VULKAN_HPP_CONSTEXPR ImageResolve2( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + using Type = ExportMetalDeviceInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - ImageResolve2( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageResolve2( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ExportMetalIOSurfaceInfoEXT + { + using NativeType = VkExportMetalIOSurfaceInfoEXT; - ImageResolve2 & operator=( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalIoSurfaceInfoEXT; - ImageResolve2 & operator=( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ExportMetalIOSurfaceInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {}, IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image{ image_ } + , ioSurface{ ioSurface_ } { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + VULKAN_HPP_CONSTEXPR ExportMetalIOSurfaceInfoEXT( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + ExportMetalIOSurfaceInfoEXT( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalIOSurfaceInfoEXT( *reinterpret_cast( &rhs ) ) { - srcSubresource = srcSubresource_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT + ExportMetalIOSurfaceInfoEXT & operator=( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExportMetalIOSurfaceInfoEXT & operator=( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - srcOffset = srcOffset_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - dstSubresource = dstSubresource_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT { - dstOffset = dstOffset_; + image = image_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) VULKAN_HPP_NOEXCEPT { - extent = extent_; + ioSurface = ioSurface_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImageResolve2 const &() const VULKAN_HPP_NOEXCEPT + operator VkExportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageResolve2 &() VULKAN_HPP_NOEXCEPT + operator VkExportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); + return std::tie( sType, pNext, image, ioSurface ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageResolve2 const & ) const = default; -#else - bool operator==( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalIOSurfaceInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && - ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( ioSurface == rhs.ioSurface ); +# endif } - bool operator!=( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; - VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; - VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalIoSurfaceInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + IOSurfaceRef ioSurface = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageResolve2; + using Type = ExportMetalIOSurfaceInfoEXT; }; - using ImageResolve2KHR = ImageResolve2; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct ImageSparseMemoryRequirementsInfo2 +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ExportMetalObjectCreateInfoEXT { - using NativeType = VkImageSparseMemoryRequirementsInfo2; + using NativeType = VkExportMetalObjectCreateInfoEXT; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSparseMemoryRequirementsInfo2; + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalObjectCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image( image_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType_ = VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT::eMetalDevice, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , exportObjectType{ exportObjectType_ } { } - VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageSparseMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) + ExportMetalObjectCreateInfoEXT( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalObjectCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageSparseMemoryRequirementsInfo2 & operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExportMetalObjectCreateInfoEXT & operator=( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageSparseMemoryRequirementsInfo2 & operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + ExportMetalObjectCreateInfoEXT & operator=( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT & + setExportObjectType( VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType_ ) VULKAN_HPP_NOEXCEPT { - image = image_; + exportObjectType = exportObjectType_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImageSparseMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT + operator VkExportMetalObjectCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + operator VkExportMetalObjectCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, image ); + return std::tie( sType, pNext, exportObjectType ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageSparseMemoryRequirementsInfo2 const & ) const = default; -#else - bool operator==( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalObjectCreateInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportObjectType == rhs.exportObjectType ); +# endif } - bool operator!=( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalObjectCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType = VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT::eMetalDevice; }; template <> - struct CppType + struct CppType { - using Type = ImageSparseMemoryRequirementsInfo2; + using Type = ExportMetalObjectCreateInfoEXT; }; - using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct ImageStencilUsageCreateInfo +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ExportMetalObjectsInfoEXT { - using NativeType = VkImageStencilUsageCreateInfo; + using NativeType = VkExportMetalObjectsInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalObjectsInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stencilUsage( stencilUsage_ ) - { - } +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} - VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageStencilUsageCreateInfo( *reinterpret_cast( &rhs ) ) + ExportMetalObjectsInfoEXT( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalObjectsInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExportMetalObjectsInfoEXT & operator=( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ExportMetalObjectsInfoEXT & operator=( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT - { - stencilUsage = stencilUsage_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkExportMetalObjectsInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkExportMetalObjectsInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, stencilUsage ); + return std::tie( sType, pNext ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageStencilUsageCreateInfo const & ) const = default; -#else - bool operator==( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalObjectsInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilUsage == rhs.stencilUsage ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif } - bool operator!=( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalObjectsInfoEXT; + const void * pNext = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageStencilUsageCreateInfo; + using Type = ExportMetalObjectsInfoEXT; }; - using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct ImageSubresource2EXT +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ExportMetalSharedEventInfoEXT { - using NativeType = VkImageSubresource2EXT; + using NativeType = VkExportMetalSharedEventInfoEXT; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSubresource2EXT; + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalSharedEventInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageSubresource2EXT( VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageSubresource( imageSubresource_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::Event event_ = {}, + MTLSharedEvent_id mtlSharedEvent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , event{ event_ } + , mtlSharedEvent{ mtlSharedEvent_ } { } - VULKAN_HPP_CONSTEXPR ImageSubresource2EXT( ImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageSubresource2EXT( VkImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageSubresource2EXT( *reinterpret_cast( &rhs ) ) + ExportMetalSharedEventInfoEXT( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalSharedEventInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageSubresource2EXT & operator=( ImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExportMetalSharedEventInfoEXT & operator=( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageSubresource2EXT & operator=( VkImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + ExportMetalSharedEventInfoEXT & operator=( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageSubresource2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageSubresource2EXT & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT { - imageSubresource = imageSubresource_; + semaphore = semaphore_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImageSubresource2EXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setEvent( VULKAN_HPP_NAMESPACE::Event event_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + event = event_; + return *this; } - operator VkImageSubresource2EXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + mtlSharedEvent = mtlSharedEvent_; + return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkExportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, imageSubresource ); + return std::tie( sType, pNext, semaphore, event, mtlSharedEvent ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageSubresource2EXT const & ) const = default; -#else - bool operator==( ImageSubresource2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalSharedEventInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageSubresource == rhs.imageSubresource ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( event == rhs.event ) && + ( mtlSharedEvent == rhs.mtlSharedEvent ); +# endif } - bool operator!=( ImageSubresource2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSubresource2EXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalSharedEventInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::Event event = {}; + MTLSharedEvent_id mtlSharedEvent = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageSubresource2EXT; + using Type = ExportMetalSharedEventInfoEXT; }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct ImageSwapchainCreateInfoKHR +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ExportMetalTextureInfoEXT { - using NativeType = VkImageSwapchainCreateInfoKHR; + using NativeType = VkExportMetalTextureInfoEXT; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR; + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalTextureInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchain( swapchain_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::BufferView bufferView_ = {}, + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, + MTLTexture_id mtlTexture_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image{ image_ } + , imageView{ imageView_ } + , bufferView{ bufferView_ } + , plane{ plane_ } + , mtlTexture{ mtlTexture_ } { } - VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageSwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + ExportMetalTextureInfoEXT( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportMetalTextureInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExportMetalTextureInfoEXT & operator=( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ExportMetalTextureInfoEXT & operator=( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT { - swapchain = swapchain_; + image = image_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + imageView = imageView_; + return *this; } - operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + bufferView = bufferView_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPlane( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ ) VULKAN_HPP_NOEXCEPT + { + plane = plane_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) VULKAN_HPP_NOEXCEPT + { + mtlTexture = mtlTexture_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, swapchain ); + return std::tie( sType, pNext, image, imageView, bufferView, plane, mtlTexture ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageSwapchainCreateInfoKHR const & ) const = default; -#else - bool operator==( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportMetalTextureInfoEXT const & ) const = default; +# else + bool operator==( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( imageView == rhs.imageView ) && ( bufferView == rhs.bufferView ) && + ( plane == rhs.plane ) && ( mtlTexture == rhs.mtlTexture ); +# endif } - bool operator!=( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMetalTextureInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::BufferView bufferView = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + MTLTexture_id mtlTexture = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageSwapchainCreateInfoKHR; + using Type = ExportMetalTextureInfoEXT; }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct ImageViewASTCDecodeModeEXT + struct ExportSemaphoreCreateInfo { - using NativeType = VkImageViewASTCDecodeModeEXT; + using NativeType = VkExportSemaphoreCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , decodeMode( decodeMode_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } { } - VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageViewASTCDecodeModeEXT( *reinterpret_cast( &rhs ) ) + ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportSemaphoreCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT { - decodeMode = decodeMode_; + handleTypes = handleTypes_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT + operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, decodeMode ); + return std::tie( sType, pNext, handleTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageViewASTCDecodeModeEXT const & ) const = default; + auto operator<=>( ExportSemaphoreCreateInfo const & ) const = default; #else - bool operator==( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeMode == rhs.decodeMode ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); # endif } - bool operator!=( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageViewASTCDecodeModeEXT; + using Type = ExportSemaphoreCreateInfo; }; - struct ImageViewAddressPropertiesNVX + using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ExportSemaphoreWin32HandleInfoKHR { - using NativeType = VkImageViewAddressPropertiesNVX; + using NativeType = VkExportSemaphoreWin32HandleInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreWin32HandleInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceAddress( deviceAddress_ ) - , size( size_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pAttributes{ pAttributes_ } + , dwAccess{ dwAccess_ } + , name{ name_ } { } - VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageViewAddressPropertiesNVX( *reinterpret_cast( &rhs ) ) + ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ExportSemaphoreWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageViewAddressPropertiesNVX & operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExportSemaphoreWin32HandleInfoKHR & operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkImageViewAddressPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pAttributes = pAttributes_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkExportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std:: - tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, deviceAddress, size ); + return std::tie( sType, pNext, pAttributes, dwAccess, name ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageViewAddressPropertiesNVX const & ) const = default; -#else - bool operator==( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && ( size == rhs.size ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); +# endif } - bool operator!=( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; + const void * pNext = {}; + const SECURITY_ATTRIBUTES * pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageViewAddressPropertiesNVX; + using Type = ExportSemaphoreWin32HandleInfoKHR; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct ImageViewCreateInfo + struct ExtensionProperties { - using NativeType = VkImageViewCreateInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo; + using NativeType = VkExtensionProperties; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , image( image_ ) - , viewType( viewType_ ) - , format( format_ ) - , components( components_ ) - , subresourceRange( subresourceRange_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( std::array const & extensionName_ = {}, + uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : extensionName{ extensionName_ } + , specVersion{ specVersion_ } { } - VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewCreateInfo( *reinterpret_cast( &rhs ) ) + ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT : ExtensionProperties( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT { - flags = flags_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - image = image_; - return *this; + return std::tie( extensionName, specVersion ); } +#endif - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - viewType = viewType_; - return *this; - } + if ( auto cmp = strcmp( extensionName, rhs.extensionName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 ) + return cmp; - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT - { - format = format_; - return *this; + return std::strong_ordering::equivalent; } +#endif - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - components = components_; - return *this; + return ( strcmp( extensionName, rhs.extensionName ) == 0 ) && ( specVersion == rhs.specVersion ); } - VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & - setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - subresourceRange = subresourceRange_; - return *this; + return !operator==( rhs ); } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, flags, image, viewType, format, components, subresourceRange ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageViewCreateInfo const & ) const = default; -#else - bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( image == rhs.image ) && ( viewType == rhs.viewType ) && - ( format == rhs.format ) && ( components == rhs.components ) && ( subresourceRange == rhs.subresourceRange ); -# endif - } - - bool operator!=( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; - VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; - }; - - template <> - struct CppType - { - using Type = ImageViewCreateInfo; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D extensionName = {}; + uint32_t specVersion = {}; }; - struct ImageViewHandleInfoNVX + struct ExternalMemoryProperties { - using NativeType = VkImageViewHandleInfoNVX; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageView( imageView_ ) - , descriptorType( descriptorType_ ) - , sampler( sampler_ ) - { - } - - VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageViewHandleInfoNVX( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ImageViewHandleInfoNVX & operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + using NativeType = VkExternalMemoryProperties; - ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : externalMemoryFeatures{ externalMemoryFeatures_ } + , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ } + , compatibleHandleTypes{ compatibleHandleTypes_ } { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryProperties( *reinterpret_cast( &rhs ) ) { - imageView = imageView_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT - { - descriptorType = descriptorType_; - return *this; - } + ExternalMemoryProperties & operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT + ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - sampler = sampler_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImageViewHandleInfoNVX const &() const VULKAN_HPP_NOEXCEPT + operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT + operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, imageView, descriptorType, sampler ); + return std::tie( externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageViewHandleInfoNVX const & ) const = default; + auto operator<=>( ExternalMemoryProperties const & ) const = default; #else - bool operator==( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( descriptorType == rhs.descriptorType ) && - ( sampler == rhs.sampler ); + return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ); # endif } - bool operator!=( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageView imageView = {}; - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; - VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {}; }; - template <> - struct CppType - { - using Type = ImageViewHandleInfoNVX; - }; + using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; - struct ImageViewMinLodCreateInfoEXT + struct ExternalBufferProperties { - using NativeType = VkImageViewMinLodCreateInfoEXT; + using NativeType = VkExternalBufferProperties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewMinLodCreateInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( float minLod_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minLod( minLod_ ) - { - } - - VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties; - ImageViewMinLodCreateInfoEXT( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageViewMinLodCreateInfoEXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalBufferProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , externalMemoryProperties{ externalMemoryProperties_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageViewMinLodCreateInfoEXT & operator=( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageViewMinLodCreateInfoEXT & operator=( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalBufferProperties( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT + ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - minLod = minLod_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImageViewMinLodCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageViewMinLodCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, minLod ); + return std::tie( sType, pNext, externalMemoryProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageViewMinLodCreateInfoEXT const & ) const = default; + auto operator<=>( ExternalBufferProperties const & ) const = default; #else - bool operator==( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minLod == rhs.minLod ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties ); # endif } - bool operator!=( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewMinLodCreateInfoEXT; - const void * pNext = {}; - float minLod = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageViewMinLodCreateInfoEXT; + using Type = ExternalBufferProperties; }; - struct ImageViewSampleWeightCreateInfoQCOM + using ExternalBufferPropertiesKHR = ExternalBufferProperties; + + struct ExternalFenceProperties { - using NativeType = VkImageViewSampleWeightCreateInfoQCOM; + using NativeType = VkExternalFenceProperties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewSampleWeightCreateInfoQCOM; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM( VULKAN_HPP_NAMESPACE::Offset2D filterCenter_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D filterSize_ = {}, - uint32_t numPhases_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , filterCenter( filterCenter_ ) - , filterSize( filterSize_ ) - , numPhases( numPhases_ ) - { - } - - VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ImageViewSampleWeightCreateInfoQCOM( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageViewSampleWeightCreateInfoQCOM( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ImageViewSampleWeightCreateInfoQCOM & operator=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties; - ImageViewSampleWeightCreateInfoQCOM & operator=( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalFenceProperties( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ } + , compatibleHandleTypes{ compatibleHandleTypes_ } + , externalFenceFeatures{ externalFenceFeatures_ } { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterCenter( VULKAN_HPP_NAMESPACE::Offset2D const & filterCenter_ ) VULKAN_HPP_NOEXCEPT + ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalFenceProperties( *reinterpret_cast( &rhs ) ) { - filterCenter = filterCenter_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterSize( VULKAN_HPP_NAMESPACE::Extent2D const & filterSize_ ) VULKAN_HPP_NOEXCEPT - { - filterSize = filterSize_; - return *this; - } + ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setNumPhases( uint32_t numPhases_ ) VULKAN_HPP_NOEXCEPT + ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - numPhases = numPhases_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImageViewSampleWeightCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageViewSampleWeightCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT + operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -38257,1679 +40380,1704 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + void * const &, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags const &, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags const &, + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, filterCenter, filterSize, numPhases ); + return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalFenceFeatures ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageViewSampleWeightCreateInfoQCOM const & ) const = default; + auto operator<=>( ExternalFenceProperties const & ) const = default; #else - bool operator==( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCenter == rhs.filterCenter ) && ( filterSize == rhs.filterSize ) && - ( numPhases == rhs.numPhases ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && ( externalFenceFeatures == rhs.externalFenceFeatures ); # endif } - bool operator!=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewSampleWeightCreateInfoQCOM; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Offset2D filterCenter = {}; - VULKAN_HPP_NAMESPACE::Extent2D filterSize = {}; - uint32_t numPhases = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageViewSampleWeightCreateInfoQCOM; + using Type = ExternalFenceProperties; }; - struct ImageViewUsageCreateInfo + using ExternalFencePropertiesKHR = ExternalFenceProperties; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct ExternalFormatANDROID { - using NativeType = VkImageViewUsageCreateInfo; + using NativeType = VkExternalFormatANDROID; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , usage( usage_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , externalFormat{ externalFormat_ } { } - VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageViewUsageCreateInfo( *reinterpret_cast( &rhs ) ) + ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalFormatANDROID( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExternalFormatANDROID & operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT { - usage = usage_; + externalFormat = externalFormat_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkExternalFormatANDROID const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, usage ); + return std::tie( sType, pNext, externalFormat ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageViewUsageCreateInfo const & ) const = default; -#else - bool operator==( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalFormatANDROID const & ) const = default; +# else + bool operator==( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat ); +# endif } - bool operator!=( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID; + void * pNext = {}; + uint64_t externalFormat = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageViewUsageCreateInfo; + using Type = ExternalFormatANDROID; }; - using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -#if defined( VK_USE_PLATFORM_ANDROID_KHR ) - struct ImportAndroidHardwareBufferInfoANDROID +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + struct ExternalFormatQNX { - using NativeType = VkImportAndroidHardwareBufferInfoANDROID; + using NativeType = VkExternalFormatQNX; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportAndroidHardwareBufferInfoANDROID; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatQNX; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer * buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , buffer( buffer_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalFormatQNX( uint64_t externalFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , externalFormat{ externalFormat_ } { } - VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExternalFormatQNX( ExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - : ImportAndroidHardwareBufferInfoANDROID( *reinterpret_cast( &rhs ) ) - { - } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + ExternalFormatQNX( VkExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalFormatQNX( *reinterpret_cast( &rhs ) ) {} - ImportAndroidHardwareBufferInfoANDROID & operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExternalFormatQNX & operator=( ExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImportAndroidHardwareBufferInfoANDROID & operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + ExternalFormatQNX & operator=( VkExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalFormatQNX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer * buffer_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExternalFormatQNX & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT { - buffer = buffer_; + externalFormat = externalFormat_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImportAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT + operator VkExternalFormatQNX const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT + operator VkExternalFormatQNX &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, buffer ); + return std::tie( sType, pNext, externalFormat ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const & ) const = default; + auto operator<=>( ExternalFormatQNX const & ) const = default; # else - bool operator==( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalFormatQNX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat ); # endif } - bool operator!=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalFormatQNX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; - const void * pNext = {}; - struct AHardwareBuffer * buffer = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatQNX; + void * pNext = {}; + uint64_t externalFormat = {}; }; template <> - struct CppType + struct CppType { - using Type = ImportAndroidHardwareBufferInfoANDROID; + using Type = ExternalFormatQNX; }; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - struct ImportFenceFdInfoKHR + struct ExternalImageFormatProperties { - using NativeType = VkImportFenceFdInfoKHR; + using NativeType = VkExternalImageFormatProperties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( - VULKAN_HPP_NAMESPACE::Fence fence_ = {}, - VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, - int fd_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fence( fence_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , fd( fd_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , externalMemoryProperties{ externalMemoryProperties_ } { } - VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : ImportFenceFdInfoKHR( *reinterpret_cast( &rhs ) ) + ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalImageFormatProperties( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT { - fence = fence_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT { - flags = flags_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - handleType = handleType_; - return *this; + return std::tie( sType, pNext, externalMemoryProperties ); } +#endif - VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalImageFormatProperties const & ) const = default; +#else + bool operator==( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - fd = fd_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties ); +# endif } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return !operator==( rhs ); } +#endif - operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; + }; + + template <> + struct CppType + { + using Type = ExternalImageFormatProperties; + }; + + using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; + + struct ImageFormatProperties + { + using NativeType = VkImageFormatProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, + uint32_t maxMipLevels_ = {}, + uint32_t maxArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxExtent{ maxExtent_ } + , maxMipLevels{ maxMipLevels_ } + , maxArrayLayers{ maxArrayLayers_ } + , sampleCounts{ sampleCounts_ } + , maxResourceSize{ maxResourceSize_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatProperties( *reinterpret_cast( &rhs ) ) { - return *reinterpret_cast( this ); + } + + ImageFormatProperties & operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, fence, flags, handleType, fd ); + return std::tie( maxExtent, maxMipLevels, maxArrayLayers, sampleCounts, maxResourceSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImportFenceFdInfoKHR const & ) const = default; + auto operator<=>( ImageFormatProperties const & ) const = default; #else - bool operator==( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && - ( fd == rhs.fd ); + return ( maxExtent == rhs.maxExtent ) && ( maxMipLevels == rhs.maxMipLevels ) && ( maxArrayLayers == rhs.maxArrayLayers ) && + ( sampleCounts == rhs.sampleCounts ) && ( maxResourceSize == rhs.maxResourceSize ); # endif } - bool operator!=( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Fence fence = {}; - VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; - int fd = {}; + VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {}; + uint32_t maxMipLevels = {}; + uint32_t maxArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {}; }; - template <> - struct CppType + struct ExternalImageFormatPropertiesNV { - using Type = ImportFenceFdInfoKHR; - }; + using NativeType = VkExternalImageFormatPropertiesNV; -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - struct ImportFenceWin32HandleInfoKHR - { - using NativeType = VkImportFenceWin32HandleInfoKHR; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : imageFormatProperties{ imageFormatProperties_ } + , externalMemoryFeatures{ externalMemoryFeatures_ } + , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ } + , compatibleHandleTypes{ compatibleHandleTypes_ } + { + } - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR; + VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( - VULKAN_HPP_NAMESPACE::Fence fence_ = {}, - VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = {}, - LPCWSTR name_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fence( fence_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , handle( handle_ ) - , name( name_ ) + ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalImageFormatPropertiesNV( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExternalImageFormatPropertiesNV & operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : ImportFenceWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImportFenceWin32HandleInfoKHR & operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + operator VkExternalImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } - ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); - return *this; + return *reinterpret_cast( this ); } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return std::tie( imageFormatProperties, externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes ); } +#endif - VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalImageFormatPropertiesNV const & ) const = default; +#else + bool operator==( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - fence = fence_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( imageFormatProperties == rhs.imageFormatProperties ) && ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && + ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); +# endif } - VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - flags = flags_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & - setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {}; + }; + + struct ExternalMemoryAcquireUnmodifiedEXT + { + using NativeType = VkExternalMemoryAcquireUnmodifiedEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryAcquireUnmodifiedEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryAcquireUnmodifiedEXT( VULKAN_HPP_NAMESPACE::Bool32 acquireUnmodifiedMemory_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , acquireUnmodifiedMemory{ acquireUnmodifiedMemory_ } { - handleType = handleType_; + } + + VULKAN_HPP_CONSTEXPR ExternalMemoryAcquireUnmodifiedEXT( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExternalMemoryAcquireUnmodifiedEXT( VkExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryAcquireUnmodifiedEXT( *reinterpret_cast( &rhs ) ) + { + } + + ExternalMemoryAcquireUnmodifiedEXT & operator=( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ExternalMemoryAcquireUnmodifiedEXT & operator=( VkExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryAcquireUnmodifiedEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - handle = handle_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryAcquireUnmodifiedEXT & + setAcquireUnmodifiedMemory( VULKAN_HPP_NAMESPACE::Bool32 acquireUnmodifiedMemory_ ) VULKAN_HPP_NOEXCEPT { - name = name_; + acquireUnmodifiedMemory = acquireUnmodifiedMemory_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkExternalMemoryAcquireUnmodifiedEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkExternalMemoryAcquireUnmodifiedEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, fence, flags, handleType, handle, name ); + return std::tie( sType, pNext, acquireUnmodifiedMemory ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImportFenceWin32HandleInfoKHR const & ) const = default; -# else - bool operator==( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryAcquireUnmodifiedEXT const & ) const = default; +#else + bool operator==( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && - ( handle == rhs.handle ) && ( name == rhs.name ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireUnmodifiedMemory == rhs.acquireUnmodifiedMemory ); +# endif } - bool operator!=( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Fence fence = {}; - VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; - HANDLE handle = {}; - LPCWSTR name = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryAcquireUnmodifiedEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 acquireUnmodifiedMemory = {}; }; template <> - struct CppType + struct CppType { - using Type = ImportFenceWin32HandleInfoKHR; + using Type = ExternalMemoryAcquireUnmodifiedEXT; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#if defined( VK_USE_PLATFORM_FUCHSIA ) - struct ImportMemoryBufferCollectionFUCHSIA + struct ExternalMemoryBufferCreateInfo { - using NativeType = VkImportMemoryBufferCollectionFUCHSIA; + using NativeType = VkExternalMemoryBufferCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryBufferCollectionFUCHSIA; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, - uint32_t index_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , collection( collection_ ) - , index( index_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } { } - VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImportMemoryBufferCollectionFUCHSIA( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT - : ImportMemoryBufferCollectionFUCHSIA( *reinterpret_cast( &rhs ) ) + ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryBufferCreateInfo( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImportMemoryBufferCollectionFUCHSIA & operator=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImportMemoryBufferCollectionFUCHSIA & operator=( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT - { - collection = collection_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT { - index = index_; + handleTypes = handleTypes_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImportMemoryBufferCollectionFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImportMemoryBufferCollectionFUCHSIA &() VULKAN_HPP_NOEXCEPT + operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, collection, index ); + return std::tie( sType, pNext, handleTypes ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImportMemoryBufferCollectionFUCHSIA const & ) const = default; -# else - bool operator==( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalMemoryBufferCreateInfo const & ) const = default; +#else + bool operator==( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); +# endif } - bool operator!=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryBufferCollectionFUCHSIA; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {}; - uint32_t index = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; }; template <> - struct CppType + struct CppType { - using Type = ImportMemoryBufferCollectionFUCHSIA; + using Type = ExternalMemoryBufferCreateInfo; }; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct ImportMemoryFdInfoKHR + using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; + + struct ExternalMemoryImageCreateInfo { - using NativeType = VkImportMemoryFdInfoKHR; + using NativeType = VkExternalMemoryImageCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - int fd_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleType( handleType_ ) - , fd( fd_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } { } - VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : ImportMemoryFdInfoKHR( *reinterpret_cast( &rhs ) ) + ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryImageCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT { - fd = fd_; + handleTypes = handleTypes_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, handleType, fd ); + return std::tie( sType, pNext, handleTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImportMemoryFdInfoKHR const & ) const = default; + auto operator<=>( ExternalMemoryImageCreateInfo const & ) const = default; #else - bool operator==( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); # endif } - bool operator!=( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - int fd = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; }; template <> - struct CppType + struct CppType { - using Type = ImportMemoryFdInfoKHR; + using Type = ExternalMemoryImageCreateInfo; }; - struct ImportMemoryHostPointerInfoEXT + using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; + + struct ExternalMemoryImageCreateInfoNV { - using NativeType = VkImportMemoryHostPointerInfoEXT; + using NativeType = VkExternalMemoryImageCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - void * pHostPointer_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleType( handleType_ ) - , pHostPointer( pHostPointer_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } { } - VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ImportMemoryHostPointerInfoEXT( *reinterpret_cast( &rhs ) ) + ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalMemoryImageCreateInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExternalMemoryImageCreateInfoNV & operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & - setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & + setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT { - pHostPointer = pHostPointer_; + handleTypes = handleTypes_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkExternalMemoryImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std:: - tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, handleType, pHostPointer ); + return std::tie( sType, pNext, handleTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImportMemoryHostPointerInfoEXT const & ) const = default; + auto operator<=>( ExternalMemoryImageCreateInfoNV const & ) const = default; #else - bool operator==( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( pHostPointer == rhs.pHostPointer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); # endif } - bool operator!=( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - void * pHostPointer = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; }; template <> - struct CppType + struct CppType { - using Type = ImportMemoryHostPointerInfoEXT; + using Type = ExternalMemoryImageCreateInfoNV; }; -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - struct ImportMemoryWin32HandleInfoKHR + struct ExternalSemaphoreProperties { - using NativeType = VkImportMemoryWin32HandleInfoKHR; + using NativeType = VkExternalSemaphoreProperties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = {}, - LPCWSTR name_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleType( handleType_ ) - , handle( handle_ ) - , name( name_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ } + , compatibleHandleTypes{ compatibleHandleTypes_ } + , externalSemaphoreFeatures{ externalSemaphoreFeatures_ } { } - VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : ImportMemoryWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : ExternalSemaphoreProperties( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImportMemoryWin32HandleInfoKHR & operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & - setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT { - handleType = handleType_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT - { - handle = handle_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT - { - name = name_; - return *this; - } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkImportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + void * const &, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags const &, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags const &, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, handleType, handle, name ); + return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalSemaphoreFeatures ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImportMemoryWin32HandleInfoKHR const & ) const = default; -# else - bool operator==( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExternalSemaphoreProperties const & ) const = default; +#else + bool operator==( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && ( name == rhs.name ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && + ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures ); +# endif } - bool operator!=( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - HANDLE handle = {}; - LPCWSTR name = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {}; }; template <> - struct CppType + struct CppType { - using Type = ImportMemoryWin32HandleInfoKHR; + using Type = ExternalSemaphoreProperties; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - struct ImportMemoryWin32HandleInfoNV + using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; + + struct FenceCreateInfo { - using NativeType = VkImportMemoryWin32HandleInfoNV; + using NativeType = VkFenceCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, - HANDLE handle_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleType( handleType_ ) - , handle( handle_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FenceCreateInfo( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } { } - VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : ImportMemoryWin32HandleInfoNV( *reinterpret_cast( &rhs ) ) - { - } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : FenceCreateInfo( *reinterpret_cast( &rhs ) ) {} - ImportMemoryWin32HandleInfoNV & operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & - setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT - { - handleType = handleType_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - handle = handle_; + flags = flags_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, handleType, handle ); + return std::tie( sType, pNext, flags ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImportMemoryWin32HandleInfoNV const & ) const = default; -# else - bool operator==( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceCreateInfo const & ) const = default; +#else + bool operator==( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif } - bool operator!=( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {}; - HANDLE handle = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {}; }; template <> - struct CppType + struct CppType { - using Type = ImportMemoryWin32HandleInfoNV; + using Type = FenceCreateInfo; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#if defined( VK_USE_PLATFORM_FUCHSIA ) - struct ImportMemoryZirconHandleInfoFUCHSIA + struct FenceGetFdInfoKHR { - using NativeType = VkImportMemoryZirconHandleInfoFUCHSIA; + using NativeType = VkFenceGetFdInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - zx_handle_t handle_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleType( handleType_ ) - , handle( handle_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fence{ fence_ } + , handleType{ handleType_ } { } - VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImportMemoryZirconHandleInfoFUCHSIA( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT - : ImportMemoryZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) - { - } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : FenceGetFdInfoKHR( *reinterpret_cast( &rhs ) ) {} - ImportMemoryZirconHandleInfoFUCHSIA & operator=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImportMemoryZirconHandleInfoFUCHSIA & operator=( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & - setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT { - handleType = handleType_; + fence = fence_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setHandle( zx_handle_t handle_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - handle = handle_; + handleType = handleType_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImportMemoryZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImportMemoryZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + VULKAN_HPP_NAMESPACE::Fence const &, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, handleType, handle ); + return std::tie( sType, pNext, fence, handleType ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FenceGetFdInfoKHR const & ) const = default; +#else + bool operator==( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) - return cmp; - if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) - return cmp; - if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) - return cmp; - if ( auto cmp = memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ); cmp != 0 ) - return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - - return std::strong_ordering::equivalent; - } +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( handleType == rhs.handleType ); # endif - - bool operator==( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && - ( memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ) == 0 ); } - bool operator!=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - zx_handle_t handle = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; }; template <> - struct CppType + struct CppType { - using Type = ImportMemoryZirconHandleInfoFUCHSIA; + using Type = FenceGetFdInfoKHR; }; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ -#if defined( VK_USE_PLATFORM_METAL_EXT ) - struct ImportMetalBufferInfoEXT +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct FenceGetWin32HandleInfoKHR { - using NativeType = VkImportMetalBufferInfoEXT; + using NativeType = VkFenceGetWin32HandleInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalBufferInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT( MTLBuffer_id mtlBuffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , mtlBuffer( mtlBuffer_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fence{ fence_ } + , handleType{ handleType_ } { } - VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImportMetalBufferInfoEXT( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ImportMetalBufferInfoEXT( *reinterpret_cast( &rhs ) ) + FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : FenceGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImportMetalBufferInfoEXT & operator=( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImportMetalBufferInfoEXT & operator=( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT { - mtlBuffer = mtlBuffer_; + fence = fence_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handleType = handleType_; + return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkFenceGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, mtlBuffer ); + return std::tie( sType, pNext, fence, handleType ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImportMetalBufferInfoEXT const & ) const = default; + auto operator<=>( FenceGetWin32HandleInfoKHR const & ) const = default; # else - bool operator==( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlBuffer == rhs.mtlBuffer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( handleType == rhs.handleType ); # endif } - bool operator!=( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalBufferInfoEXT; - const void * pNext = {}; - MTLBuffer_id mtlBuffer = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; }; template <> - struct CppType + struct CppType { - using Type = ImportMetalBufferInfoEXT; + using Type = FenceGetWin32HandleInfoKHR; }; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#if defined( VK_USE_PLATFORM_METAL_EXT ) - struct ImportMetalIOSurfaceInfoEXT + struct FilterCubicImageViewImageFormatPropertiesEXT { - using NativeType = VkImportMetalIOSurfaceInfoEXT; + using NativeType = VkFilterCubicImageViewImageFormatPropertiesEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalIoSurfaceInfoEXT; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT( IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , ioSurface( ioSurface_ ) - { - } - - VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; - ImportMetalIOSurfaceInfoEXT( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ImportMetalIOSurfaceInfoEXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , filterCubic{ filterCubic_ } + , filterCubicMinmax{ filterCubicMinmax_ } { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImportMetalIOSurfaceInfoEXT & operator=( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImportMetalIOSurfaceInfoEXT & operator=( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : FilterCubicImageViewImageFormatPropertiesEXT( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) VULKAN_HPP_NOEXCEPT + FilterCubicImageViewImageFormatPropertiesEXT & operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - ioSurface = ioSurface_; + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, ioSurface ); + return std::tie( sType, pNext, filterCubic, filterCubicMinmax ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImportMetalIOSurfaceInfoEXT const & ) const = default; -# else - bool operator==( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const & ) const = default; +#else + bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ioSurface == rhs.ioSurface ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCubic == rhs.filterCubic ) && ( filterCubicMinmax == rhs.filterCubicMinmax ); +# endif } - bool operator!=( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalIoSurfaceInfoEXT; - const void * pNext = {}; - IOSurfaceRef ioSurface = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {}; }; template <> - struct CppType + struct CppType { - using Type = ImportMetalIOSurfaceInfoEXT; + using Type = FilterCubicImageViewImageFormatPropertiesEXT; }; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ -#if defined( VK_USE_PLATFORM_METAL_EXT ) - struct ImportMetalSharedEventInfoEXT + struct FormatProperties { - using NativeType = VkImportMetalSharedEventInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalSharedEventInfoEXT; + using NativeType = VkFormatProperties; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT( MTLSharedEvent_id mtlSharedEvent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , mtlSharedEvent( mtlSharedEvent_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : linearTilingFeatures{ linearTilingFeatures_ } + , optimalTilingFeatures{ optimalTilingFeatures_ } + , bufferFeatures{ bufferFeatures_ } { } - VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImportMetalSharedEventInfoEXT( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ImportMetalSharedEventInfoEXT( *reinterpret_cast( &rhs ) ) - { - } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties( *reinterpret_cast( &rhs ) ) {} - ImportMetalSharedEventInfoEXT & operator=( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImportMetalSharedEventInfoEXT & operator=( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) VULKAN_HPP_NOEXCEPT + operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT { - mtlSharedEvent = mtlSharedEvent_; - return *this; + return *reinterpret_cast( this ); } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return std::tie( linearTilingFeatures, optimalTilingFeatures, bufferFeatures ); } +#endif - operator VkImportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties const & ) const = default; +#else + bool operator==( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( linearTilingFeatures == rhs.linearTilingFeatures ) && ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && + ( bufferFeatures == rhs.bufferFeatures ); +# endif } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + bool operator!=( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, mtlSharedEvent ); + return !operator==( rhs ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImportMetalSharedEventInfoEXT const & ) const = default; + public: + VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {}; + }; + + struct FormatProperties2 + { + using NativeType = VkFormatProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , formatProperties{ formatProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties2( *reinterpret_cast( &rhs ) ) {} + + FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto # else - bool operator==( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) + return std::tie( sType, pNext, formatProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties2 const & ) const = default; +#else + bool operator==( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlSharedEvent == rhs.mtlSharedEvent ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatProperties == rhs.formatProperties ); +# endif } - bool operator!=( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalSharedEventInfoEXT; - const void * pNext = {}; - MTLSharedEvent_id mtlSharedEvent = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {}; }; template <> - struct CppType + struct CppType { - using Type = ImportMetalSharedEventInfoEXT; + using Type = FormatProperties2; }; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - -#if defined( VK_USE_PLATFORM_METAL_EXT ) - struct ImportMetalTextureInfoEXT - { - using NativeType = VkImportMetalTextureInfoEXT; - static const bool allowDuplicate = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalTextureInfoEXT; + using FormatProperties2KHR = FormatProperties2; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, - MTLTexture_id mtlTexture_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , plane( plane_ ) - , mtlTexture( mtlTexture_ ) - { - } + struct FormatProperties3 + { + using NativeType = VkFormatProperties3; - VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties3; - ImportMetalTextureInfoEXT( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ImportMetalTextureInfoEXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FormatProperties3( VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , linearTilingFeatures{ linearTilingFeatures_ } + , optimalTilingFeatures{ optimalTilingFeatures_ } + , bufferFeatures{ bufferFeatures_ } { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImportMetalTextureInfoEXT & operator=( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ImportMetalTextureInfoEXT & operator=( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR FormatProperties3( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + FormatProperties3( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties3( *reinterpret_cast( &rhs ) ) {} - VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPlane( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ ) VULKAN_HPP_NOEXCEPT - { - plane = plane_; - return *this; - } + FormatProperties3 & operator=( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) VULKAN_HPP_NOEXCEPT + FormatProperties3 & operator=( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT { - mtlTexture = mtlTexture_; + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkFormatProperties3 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkFormatProperties3 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, plane, mtlTexture ); + return std::tie( sType, pNext, linearTilingFeatures, optimalTilingFeatures, bufferFeatures ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImportMetalTextureInfoEXT const & ) const = default; -# else - bool operator==( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FormatProperties3 const & ) const = default; +#else + bool operator==( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( plane == rhs.plane ) && ( mtlTexture == rhs.mtlTexture ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( linearTilingFeatures == rhs.linearTilingFeatures ) && + ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && ( bufferFeatures == rhs.bufferFeatures ); +# endif } - bool operator!=( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalTextureInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; - MTLTexture_id mtlTexture = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties3; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 linearTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures = {}; }; template <> - struct CppType + struct CppType { - using Type = ImportMetalTextureInfoEXT; + using Type = FormatProperties3; }; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct ImportSemaphoreFdInfoKHR + using FormatProperties3KHR = FormatProperties3; + + struct FragmentShadingRateAttachmentInfoKHR { - using NativeType = VkImportSemaphoreFdInfoKHR; + using NativeType = VkFragmentShadingRateAttachmentInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFragmentShadingRateAttachmentInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( - VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, - int fd_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphore( semaphore_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , fd( fd_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pFragmentShadingRateAttachment{ pFragmentShadingRateAttachment_ } + , shadingRateAttachmentTexelSize{ shadingRateAttachmentTexelSize_ } { } - VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : ImportSemaphoreFdInfoKHR( *reinterpret_cast( &rhs ) ) + FragmentShadingRateAttachmentInfoKHR( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : FragmentShadingRateAttachmentInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + FragmentShadingRateAttachmentInfoKHR & operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + FragmentShadingRateAttachmentInfoKHR & operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT - { - semaphore = semaphore_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & - setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & + setPFragmentShadingRateAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ ) VULKAN_HPP_NOEXCEPT { - handleType = handleType_; + pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & + setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT { - fd = fd_; + shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -39938,555 +42086,552 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, + VULKAN_HPP_NAMESPACE::Extent2D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, semaphore, flags, handleType, fd ); + return std::tie( sType, pNext, pFragmentShadingRateAttachment, shadingRateAttachmentTexelSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImportSemaphoreFdInfoKHR const & ) const = default; + auto operator<=>( FragmentShadingRateAttachmentInfoKHR const & ) const = default; #else - bool operator==( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && - ( fd == rhs.fd ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment ) && + ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize ); # endif } - bool operator!=( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; - int fd = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFragmentShadingRateAttachmentInfoKHR; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment = {}; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {}; }; template <> - struct CppType + struct CppType { - using Type = ImportSemaphoreFdInfoKHR; + using Type = FragmentShadingRateAttachmentInfoKHR; }; -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - struct ImportSemaphoreWin32HandleInfoKHR + struct FrameBoundaryEXT { - using NativeType = VkImportSemaphoreWin32HandleInfoKHR; + using NativeType = VkFrameBoundaryEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreWin32HandleInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFrameBoundaryEXT; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( - VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, - HANDLE handle_ = {}, - LPCWSTR name_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FrameBoundaryEXT( VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT flags_ = {}, + uint64_t frameID_ = {}, + uint32_t imageCount_ = {}, + const VULKAN_HPP_NAMESPACE::Image * pImages_ = {}, + uint32_t bufferCount_ = {}, + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void * pTag_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , frameID{ frameID_ } + , imageCount{ imageCount_ } + , pImages{ pImages_ } + , bufferCount{ bufferCount_ } + , pBuffers{ pBuffers_ } + , tagName{ tagName_ } + , tagSize{ tagSize_ } + , pTag{ pTag_ } + { + } + + VULKAN_HPP_CONSTEXPR FrameBoundaryEXT( FrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + FrameBoundaryEXT( VkFrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT : FrameBoundaryEXT( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + FrameBoundaryEXT( VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT flags_, + uint64_t frameID_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & images_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & buffers_ = {}, + uint64_t tagName_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , semaphore( semaphore_ ) , flags( flags_ ) - , handleType( handleType_ ) - , handle( handle_ ) - , name( name_ ) + , frameID( frameID_ ) + , imageCount( static_cast( images_.size() ) ) + , pImages( images_.data() ) + , bufferCount( static_cast( buffers_.size() ) ) + , pBuffers( buffers_.data() ) + , tagName( tagName_ ) + , tagSize( tag_.size() * sizeof( T ) ) + , pTag( tag_.data() ) { } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + FrameBoundaryEXT & operator=( FrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : ImportSemaphoreWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + FrameBoundaryEXT & operator=( VkFrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImportSemaphoreWin32HandleInfoKHR & operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } - ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setFlags( VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + flags = flags_; return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setFrameID( uint64_t frameID_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + frameID = frameID_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setImageCount( uint32_t imageCount_ ) VULKAN_HPP_NOEXCEPT { - semaphore = semaphore_; + imageCount = imageCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPImages( const VULKAN_HPP_NAMESPACE::Image * pImages_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + pImages = pImages_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & - setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FrameBoundaryEXT & setImages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & images_ ) VULKAN_HPP_NOEXCEPT { - handleType = handleType_; + imageCount = static_cast( images_.size() ); + pImages = images_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setBufferCount( uint32_t bufferCount_ ) VULKAN_HPP_NOEXCEPT { - handle = handle_; + bufferCount = bufferCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPBuffers( const VULKAN_HPP_NAMESPACE::Buffer * pBuffers_ ) VULKAN_HPP_NOEXCEPT { - name = name_; + pBuffers = pBuffers_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FrameBoundaryEXT & setBuffers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & buffers_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + bufferCount = static_cast( buffers_.size() ); + pBuffers = buffers_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + tagName = tagName_; + return *this; } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tagSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPTag( const void * pTag_ ) VULKAN_HPP_NOEXCEPT + { + pTag = pTag_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + FrameBoundaryEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tag_.size() * sizeof( T ); + pTag = tag_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkFrameBoundaryEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFrameBoundaryEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT const &, + uint64_t const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::Image * const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::Buffer * const &, + uint64_t const &, + size_t const &, + const void * const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, semaphore, flags, handleType, handle, name ); + return std::tie( sType, pNext, flags, frameID, imageCount, pImages, bufferCount, pBuffers, tagName, tagSize, pTag ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const & ) const = default; -# else - bool operator==( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FrameBoundaryEXT const & ) const = default; +#else + bool operator==( FrameBoundaryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && - ( handle == rhs.handle ) && ( name == rhs.name ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( frameID == rhs.frameID ) && ( imageCount == rhs.imageCount ) && + ( pImages == rhs.pImages ) && ( bufferCount == rhs.bufferCount ) && ( pBuffers == rhs.pBuffers ) && ( tagName == rhs.tagName ) && + ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag ); +# endif } - bool operator!=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FrameBoundaryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; - HANDLE handle = {}; - LPCWSTR name = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFrameBoundaryEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT flags = {}; + uint64_t frameID = {}; + uint32_t imageCount = {}; + const VULKAN_HPP_NAMESPACE::Image * pImages = {}; + uint32_t bufferCount = {}; + const VULKAN_HPP_NAMESPACE::Buffer * pBuffers = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void * pTag = {}; }; template <> - struct CppType + struct CppType { - using Type = ImportSemaphoreWin32HandleInfoKHR; + using Type = FrameBoundaryEXT; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#if defined( VK_USE_PLATFORM_FUCHSIA ) - struct ImportSemaphoreZirconHandleInfoFUCHSIA + struct FramebufferAttachmentImageInfo { - using NativeType = VkImportSemaphoreZirconHandleInfoFUCHSIA; + using NativeType = VkFramebufferAttachmentImageInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( - VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, - zx_handle_t zirconHandle_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphore( semaphore_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , zirconHandle( zirconHandle_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layerCount_ = {}, + uint32_t viewFormatCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , usage{ usage_ } + , width{ width_ } + , height{ height_ } + , layerCount{ layerCount_ } + , viewFormatCount{ viewFormatCount_ } + , pViewFormats{ pViewFormats_ } { } - VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImportSemaphoreZirconHandleInfoFUCHSIA( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT - : ImportSemaphoreZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferAttachmentImageInfo( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, + uint32_t width_, + uint32_t height_, + uint32_t layerCount_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , usage( usage_ ) + , width( width_ ) + , height( height_ ) + , layerCount( layerCount_ ) + , viewFormatCount( static_cast( viewFormats_.size() ) ) + , pViewFormats( viewFormats_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + FramebufferAttachmentImageInfo & operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - semaphore = semaphore_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + usage = usage_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & - setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT { - handleType = handleType_; + width = width_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setZirconHandle( zx_handle_t zirconHandle_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT { - zirconHandle = zirconHandle_; + height = height_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImportSemaphoreZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + layerCount = layerCount_; + return *this; } - operator VkImportSemaphoreZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + viewFormatCount = viewFormatCount_; + return *this; } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, semaphore, flags, handleType, zirconHandle ); + pViewFormats = pViewFormats_; + return *this; } -# endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + FramebufferAttachmentImageInfo & + setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) VULKAN_HPP_NOEXCEPT { - if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) - return cmp; - if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) - return cmp; - if ( auto cmp = semaphore <=> rhs.semaphore; cmp != 0 ) - return cmp; - if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) - return cmp; - if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) - return cmp; - if ( auto cmp = memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ); cmp != 0 ) - return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + viewFormatCount = static_cast( viewFormats_.size() ); + pViewFormats = viewFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - return std::strong_ordering::equivalent; + operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple # endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, usage, width, height, layerCount, viewFormatCount, pViewFormats ); + } +#endif - bool operator==( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( FramebufferAttachmentImageInfo const & ) const = default; +#else + bool operator==( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && - ( memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ) == 0 ); +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && ( width == rhs.width ) && + ( height == rhs.height ) && ( layerCount == rhs.layerCount ) && ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats ); +# endif } - bool operator!=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; - zx_handle_t zirconHandle = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layerCount = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {}; }; template <> - struct CppType + struct CppType { - using Type = ImportSemaphoreZirconHandleInfoFUCHSIA; + using Type = FramebufferAttachmentImageInfo; }; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct IndirectCommandsLayoutTokenNV + using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo; + + struct FramebufferAttachmentsCreateInfo { - using NativeType = VkIndirectCommandsLayoutTokenNV; + using NativeType = VkFramebufferAttachmentsCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( - VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, - uint32_t stream_ = {}, - uint32_t offset_ = {}, - uint32_t vertexBindingUnit_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, - uint32_t pushconstantOffset_ = {}, - uint32_t pushconstantSize_ = {}, - VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, - uint32_t indexTypeCount_ = {}, - const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ = {}, - const uint32_t * pIndexTypeValues_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , tokenType( tokenType_ ) - , stream( stream_ ) - , offset( offset_ ) - , vertexBindingUnit( vertexBindingUnit_ ) - , vertexDynamicStride( vertexDynamicStride_ ) - , pushconstantPipelineLayout( pushconstantPipelineLayout_ ) - , pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ) - , pushconstantOffset( pushconstantOffset_ ) - , pushconstantSize( pushconstantSize_ ) - , indirectStateFlags( indirectStateFlags_ ) - , indexTypeCount( indexTypeCount_ ) - , pIndexTypes( pIndexTypes_ ) - , pIndexTypeValues( pIndexTypeValues_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( uint32_t attachmentImageInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , attachmentImageInfoCount{ attachmentImageInfoCount_ } + , pAttachmentImageInfos{ pAttachmentImageInfos_ } { } - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectCommandsLayoutTokenNV( *reinterpret_cast( &rhs ) ) + FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferAttachmentsCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_, - uint32_t stream_, - uint32_t offset_, - uint32_t vertexBindingUnit_, - VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_, - VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_, - VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_, - uint32_t pushconstantOffset_, - uint32_t pushconstantSize_, - VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypes_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypeValues_ = {}, - const void * pNext_ = nullptr ) + FramebufferAttachmentsCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentImageInfos_, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , tokenType( tokenType_ ) - , stream( stream_ ) - , offset( offset_ ) - , vertexBindingUnit( vertexBindingUnit_ ) - , vertexDynamicStride( vertexDynamicStride_ ) - , pushconstantPipelineLayout( pushconstantPipelineLayout_ ) - , pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ) - , pushconstantOffset( pushconstantOffset_ ) - , pushconstantSize( pushconstantSize_ ) - , indirectStateFlags( indirectStateFlags_ ) - , indexTypeCount( static_cast( indexTypes_.size() ) ) - , pIndexTypes( indexTypes_.data() ) - , pIndexTypeValues( indexTypeValues_.data() ) + , attachmentImageInfoCount( static_cast( attachmentImageInfos_.size() ) ) + , pAttachmentImageInfos( attachmentImageInfos_.data() ) { -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( indexTypes_.size() == indexTypeValues_.size() ); -# else - if ( indexTypes_.size() != indexTypeValues_.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING - "::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - IndirectCommandsLayoutTokenNV & operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + FramebufferAttachmentsCreateInfo & operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT + FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT - { - tokenType = tokenType_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setStream( uint32_t stream_ ) VULKAN_HPP_NOEXCEPT - { - stream = stream_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT - { - offset = offset_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT - { - vertexBindingUnit = vertexBindingUnit_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT - { - vertexDynamicStride = vertexDynamicStride_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & - setPushconstantPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT - { - pushconstantPipelineLayout = pushconstantPipelineLayout_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & - setPushconstantShaderStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT - { - pushconstantShaderStageFlags = pushconstantShaderStageFlags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT - { - pushconstantOffset = pushconstantOffset_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT - { - pushconstantSize = pushconstantSize_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & - setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT - { - indirectStateFlags = indirectStateFlags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT - { - indexTypeCount = indexTypeCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ ) VULKAN_HPP_NOEXCEPT - { - pIndexTypes = pIndexTypes_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectCommandsLayoutTokenNV & - setIndexTypes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypes_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT { - indexTypeCount = static_cast( indexTypes_.size() ); - pIndexTypes = indexTypes_.data(); + attachmentImageInfoCount = attachmentImageInfoCount_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t * pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & + setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT { - pIndexTypeValues = pIndexTypeValues_; + pAttachmentImageInfos = pAttachmentImageInfos_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectCommandsLayoutTokenNV & - setIndexTypeValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT + FramebufferAttachmentsCreateInfo & setAttachmentImageInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentImageInfos_ ) + VULKAN_HPP_NOEXCEPT { - indexTypeCount = static_cast( indexTypeValues_.size() ); - pIndexTypeValues = indexTypeValues_.data(); + attachmentImageInfoCount = static_cast( attachmentImageInfos_.size() ); + pAttachmentImageInfos = attachmentImageInfos_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkIndirectCommandsLayoutTokenNV const &() const VULKAN_HPP_NOEXCEPT + operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT + operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -40495,221 +42640,180 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - tokenType, - stream, - offset, - vertexBindingUnit, - vertexDynamicStride, - pushconstantPipelineLayout, - pushconstantShaderStageFlags, - pushconstantOffset, - pushconstantSize, - indirectStateFlags, - indexTypeCount, - pIndexTypes, - pIndexTypeValues ); + return std::tie( sType, pNext, attachmentImageInfoCount, pAttachmentImageInfos ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectCommandsLayoutTokenNV const & ) const = default; + auto operator<=>( FramebufferAttachmentsCreateInfo const & ) const = default; #else - bool operator==( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tokenType == rhs.tokenType ) && ( stream == rhs.stream ) && ( offset == rhs.offset ) && - ( vertexBindingUnit == rhs.vertexBindingUnit ) && ( vertexDynamicStride == rhs.vertexDynamicStride ) && - ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout ) && ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags ) && - ( pushconstantOffset == rhs.pushconstantOffset ) && ( pushconstantSize == rhs.pushconstantSize ) && - ( indirectStateFlags == rhs.indirectStateFlags ) && ( indexTypeCount == rhs.indexTypeCount ) && ( pIndexTypes == rhs.pIndexTypes ) && - ( pIndexTypeValues == rhs.pIndexTypeValues ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) && + ( pAttachmentImageInfos == rhs.pAttachmentImageInfos ); # endif } - bool operator!=( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup; - uint32_t stream = {}; - uint32_t offset = {}; - uint32_t vertexBindingUnit = {}; - VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {}; - uint32_t pushconstantOffset = {}; - uint32_t pushconstantSize = {}; - VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {}; - uint32_t indexTypeCount = {}; - const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes = {}; - const uint32_t * pIndexTypeValues = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo; + const void * pNext = {}; + uint32_t attachmentImageInfoCount = {}; + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos = {}; }; template <> - struct CppType + struct CppType { - using Type = IndirectCommandsLayoutTokenNV; + using Type = FramebufferAttachmentsCreateInfo; }; - struct IndirectCommandsLayoutCreateInfoNV + using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo; + + struct FramebufferCreateInfo { - using NativeType = VkIndirectCommandsLayoutCreateInfoNV; + using NativeType = VkFramebufferCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - IndirectCommandsLayoutCreateInfoNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - uint32_t tokenCount_ = {}, - const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ = {}, - uint32_t streamCount_ = {}, - const uint32_t * pStreamStrides_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , tokenCount( tokenCount_ ) - , pTokens( pTokens_ ) - , streamCount( streamCount_ ) - , pStreamStrides( pStreamStrides_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layers_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , renderPass{ renderPass_ } + , attachmentCount{ attachmentCount_ } + , pAttachments{ pAttachments_ } + , width{ width_ } + , height{ height_ } + , layers{ layers_ } { } - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectCommandsLayoutCreateInfoNV( *reinterpret_cast( &rhs ) ) + FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectCommandsLayoutCreateInfoNV( - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tokens_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streamStrides_ = {}, - const void * pNext_ = nullptr ) + FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layers_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , tokenCount( static_cast( tokens_.size() ) ) - , pTokens( tokens_.data() ) - , streamCount( static_cast( streamStrides_.size() ) ) - , pStreamStrides( streamStrides_.data() ) + , renderPass( renderPass_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , width( width_ ) + , height( height_ ) + , layers( layers_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - IndirectCommandsLayoutCreateInfoNV & operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + FramebufferCreateInfo & operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - IndirectCommandsLayoutCreateInfoNV & operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & - setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT { - pipelineBindPoint = pipelineBindPoint_; + renderPass = renderPass_; return *this; } - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT { - tokenCount = tokenCount_; + attachmentCount = attachmentCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & - setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT { - pTokens = pTokens_; + pAttachments = pAttachments_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectCommandsLayoutCreateInfoNV & - setTokens( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tokens_ ) VULKAN_HPP_NOEXCEPT + FramebufferCreateInfo & + setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT { - tokenCount = static_cast( tokens_.size() ); - pTokens = tokens_.data(); + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT { - streamCount = streamCount_; + width = width_; return *this; } - VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t * pStreamStrides_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT { - pStreamStrides = pStreamStrides_; + height = height_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectCommandsLayoutCreateInfoNV & - setStreamStrides( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streamStrides_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT { - streamCount = static_cast( streamStrides_.size() ); - pStreamStrides = streamStrides_.data(); + layers = layers_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkIndirectCommandsLayoutCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -40718,375 +42822,284 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + uint32_t const &, + uint32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, pipelineBindPoint, tokenCount, pTokens, streamCount, pStreamStrides ); + return std::tie( sType, pNext, flags, renderPass, attachmentCount, pAttachments, width, height, layers ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectCommandsLayoutCreateInfoNV const & ) const = default; + auto operator<=>( FramebufferCreateInfo const & ) const = default; #else - bool operator==( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && - ( tokenCount == rhs.tokenCount ) && ( pTokens == rhs.pTokens ) && ( streamCount == rhs.streamCount ) && ( pStreamStrides == rhs.pStreamStrides ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( renderPass == rhs.renderPass ) && + ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && ( width == rhs.width ) && ( height == rhs.height ) && + ( layers == rhs.layers ); # endif } - bool operator!=( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - uint32_t tokenCount = {}; - const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens = {}; - uint32_t streamCount = {}; - const uint32_t * pStreamStrides = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layers = {}; }; template <> - struct CppType + struct CppType { - using Type = IndirectCommandsLayoutCreateInfoNV; + using Type = FramebufferCreateInfo; }; - struct InitializePerformanceApiInfoINTEL + struct FramebufferMixedSamplesCombinationNV { - using NativeType = VkInitializePerformanceApiInfoINTEL; + using NativeType = VkFramebufferMixedSamplesCombinationNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInitializePerformanceApiInfoINTEL; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferMixedSamplesCombinationNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void * pUserData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pUserData( pUserData_ ) - { - } - - VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - : InitializePerformanceApiInfoINTEL( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , coverageReductionMode{ coverageReductionMode_ } + , rasterizationSamples{ rasterizationSamples_ } + , depthStencilSamples{ depthStencilSamples_ } + , colorSamples{ colorSamples_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - InitializePerformanceApiInfoINTEL & operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + : FramebufferMixedSamplesCombinationNV( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + FramebufferMixedSamplesCombinationNV & operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT + FramebufferMixedSamplesCombinationNV & operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT { - pUserData = pUserData_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkInitializePerformanceApiInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + operator VkFramebufferMixedSamplesCombinationNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT + operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pUserData ); + return std::tie( sType, pNext, coverageReductionMode, rasterizationSamples, depthStencilSamples, colorSamples ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( InitializePerformanceApiInfoINTEL const & ) const = default; + auto operator<=>( FramebufferMixedSamplesCombinationNV const & ) const = default; #else - bool operator==( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pUserData == rhs.pUserData ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode ) && + ( rasterizationSamples == rhs.rasterizationSamples ) && ( depthStencilSamples == rhs.depthStencilSamples ) && ( colorSamples == rhs.colorSamples ); # endif } - bool operator!=( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL; - const void * pNext = {}; - void * pUserData = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {}; }; template <> - struct CppType + struct CppType { - using Type = InitializePerformanceApiInfoINTEL; + using Type = FramebufferMixedSamplesCombinationNV; }; - struct InputAttachmentAspectReference + struct GeneratedCommandsInfoEXT { - using NativeType = VkInputAttachmentAspectReference; + using NativeType = VkGeneratedCommandsInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( uint32_t subpass_ = {}, - uint32_t inputAttachmentIndex_ = {}, - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT - : subpass( subpass_ ) - , inputAttachmentIndex( inputAttachmentIndex_ ) - , aspectMask( aspectMask_ ) - { - } - - VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoEXT; - InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT - : InputAttachmentAspectReference( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoEXT( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ = {}, + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indirectAddressSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress preprocessAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, + uint32_t maxSequenceCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress sequenceCountAddress_ = {}, + uint32_t maxDrawCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderStages{ shaderStages_ } + , indirectExecutionSet{ indirectExecutionSet_ } + , indirectCommandsLayout{ indirectCommandsLayout_ } + , indirectAddress{ indirectAddress_ } + , indirectAddressSize{ indirectAddressSize_ } + , preprocessAddress{ preprocessAddress_ } + , preprocessSize{ preprocessSize_ } + , maxSequenceCount{ maxSequenceCount_ } + , sequenceCountAddress{ sequenceCountAddress_ } + , maxDrawCount{ maxDrawCount_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - InputAttachmentAspectReference & operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoEXT( GeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoEXT( VkGeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsInfoEXT( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT - { - subpass = subpass_; - return *this; - } + GeneratedCommandsInfoEXT & operator=( GeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoEXT & operator=( VkGeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - inputAttachmentIndex = inputAttachmentIndex_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - aspectMask = aspectMask_; + pNext = pNext_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( subpass, inputAttachmentIndex, aspectMask ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( InputAttachmentAspectReference const & ) const = default; -#else - bool operator==( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( subpass == rhs.subpass ) && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) && ( aspectMask == rhs.aspectMask ); -# endif - } - - bool operator!=( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - uint32_t subpass = {}; - uint32_t inputAttachmentIndex = {}; - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - }; - using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; - - struct InstanceCreateInfo - { - using NativeType = VkInstanceCreateInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, - const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ = {}, - uint32_t enabledLayerCount_ = {}, - const char * const * ppEnabledLayerNames_ = {}, - uint32_t enabledExtensionCount_ = {}, - const char * const * ppEnabledExtensionNames_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pApplicationInfo( pApplicationInfo_ ) - , enabledLayerCount( enabledLayerCount_ ) - , ppEnabledLayerNames( ppEnabledLayerNames_ ) - , enabledExtensionCount( enabledExtensionCount_ ) - , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) - { - } - - VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : InstanceCreateInfo( *reinterpret_cast( &rhs ) ) {} - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_, - const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , pApplicationInfo( pApplicationInfo_ ) - , enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ) - , ppEnabledLayerNames( pEnabledLayerNames_.data() ) - , enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ) - , ppEnabledExtensionNames( pEnabledExtensionNames_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - InstanceCreateInfo & operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setShaderStages( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + shaderStages = shaderStages_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & + setIndirectExecutionSet( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + indirectExecutionSet = indirectExecutionSet_; return *this; } - VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & + setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + indirectCommandsLayout = indirectCommandsLayout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectAddress( VULKAN_HPP_NAMESPACE::DeviceAddress indirectAddress_ ) VULKAN_HPP_NOEXCEPT { - pApplicationInfo = pApplicationInfo_; + indirectAddress = indirectAddress_; return *this; } - VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectAddressSize( VULKAN_HPP_NAMESPACE::DeviceSize indirectAddressSize_ ) VULKAN_HPP_NOEXCEPT { - enabledLayerCount = enabledLayerCount_; + indirectAddressSize = indirectAddressSize_; return *this; } - VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPreprocessAddress( VULKAN_HPP_NAMESPACE::DeviceAddress preprocessAddress_ ) VULKAN_HPP_NOEXCEPT { - ppEnabledLayerNames = ppEnabledLayerNames_; + preprocessAddress = preprocessAddress_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - InstanceCreateInfo & - setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT { - enabledLayerCount = static_cast( pEnabledLayerNames_.size() ); - ppEnabledLayerNames = pEnabledLayerNames_.data(); + preprocessSize = preprocessSize_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setMaxSequenceCount( uint32_t maxSequenceCount_ ) VULKAN_HPP_NOEXCEPT { - enabledExtensionCount = enabledExtensionCount_; + maxSequenceCount = maxSequenceCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setSequenceCountAddress( VULKAN_HPP_NAMESPACE::DeviceAddress sequenceCountAddress_ ) VULKAN_HPP_NOEXCEPT { - ppEnabledExtensionNames = ppEnabledExtensionNames_; + sequenceCountAddress = sequenceCountAddress_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - InstanceCreateInfo & - setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setMaxDrawCount( uint32_t maxDrawCount_ ) VULKAN_HPP_NOEXCEPT { - enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); - ppEnabledExtensionNames = pEnabledExtensionNames_.data(); + maxDrawCount = maxDrawCount_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkGeneratedCommandsInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkGeneratedCommandsInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -41095,353 +43108,356 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::DeviceAddress const &, + uint32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, pApplicationInfo, enabledLayerCount, ppEnabledLayerNames, enabledExtensionCount, ppEnabledExtensionNames ); + return std::tie( sType, + pNext, + shaderStages, + indirectExecutionSet, + indirectCommandsLayout, + indirectAddress, + indirectAddressSize, + preprocessAddress, + preprocessSize, + maxSequenceCount, + sequenceCountAddress, + maxDrawCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { - if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) - return cmp; - if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) - return cmp; - if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) - return cmp; - if ( auto cmp = pApplicationInfo <=> rhs.pApplicationInfo; cmp != 0 ) - return cmp; - if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 ) - return cmp; - for ( size_t i = 0; i < enabledLayerCount; ++i ) - { - if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] ) - if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 ) - return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; - } - if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) - return cmp; - for ( size_t i = 0; i < enabledExtensionCount; ++i ) - { - if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] ) - if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 ) - return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; - } - - return std::strong_ordering::equivalent; - } -#endif - - bool operator==( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + auto operator<=>( GeneratedCommandsInfoEXT const & ) const = default; +#else + bool operator==( GeneratedCommandsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pApplicationInfo == rhs.pApplicationInfo ) && - ( enabledLayerCount == rhs.enabledLayerCount ) && - [this, rhs] - { - bool equal = true; - for ( size_t i = 0; equal && ( i < enabledLayerCount ); ++i ) - { - equal = ( ( ppEnabledLayerNames[i] == rhs.ppEnabledLayerNames[i] ) || ( strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ) == 0 ) ); - } - return equal; - }() && ( enabledExtensionCount == rhs.enabledExtensionCount ) && - [this, rhs] - { - bool equal = true; - for ( size_t i = 0; equal && ( i < enabledExtensionCount ); ++i ) - { - equal = ( ( ppEnabledExtensionNames[i] == rhs.ppEnabledExtensionNames[i] ) || - ( strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ) == 0 ) ); - } - return equal; - }(); +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderStages == rhs.shaderStages ) && ( indirectExecutionSet == rhs.indirectExecutionSet ) && + ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( indirectAddress == rhs.indirectAddress ) && + ( indirectAddressSize == rhs.indirectAddressSize ) && ( preprocessAddress == rhs.preprocessAddress ) && ( preprocessSize == rhs.preprocessSize ) && + ( maxSequenceCount == rhs.maxSequenceCount ) && ( sequenceCountAddress == rhs.sequenceCountAddress ) && ( maxDrawCount == rhs.maxDrawCount ); +# endif } - bool operator!=( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GeneratedCommandsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {}; - const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo = {}; - uint32_t enabledLayerCount = {}; - const char * const * ppEnabledLayerNames = {}; - uint32_t enabledExtensionCount = {}; - const char * const * ppEnabledExtensionNames = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages = {}; + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress indirectAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indirectAddressSize = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress preprocessAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {}; + uint32_t maxSequenceCount = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress sequenceCountAddress = {}; + uint32_t maxDrawCount = {}; }; template <> - struct CppType + struct CppType { - using Type = InstanceCreateInfo; + using Type = GeneratedCommandsInfoEXT; }; - struct LayerProperties + struct IndirectCommandsStreamNV { - using NativeType = VkLayerProperties; + using NativeType = VkIndirectCommandsStreamNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 LayerProperties( std::array const & layerName_ = {}, - uint32_t specVersion_ = {}, - uint32_t implementationVersion_ = {}, - std::array const & description_ = {} ) VULKAN_HPP_NOEXCEPT - : layerName( layerName_ ) - , specVersion( specVersion_ ) - , implementationVersion( implementationVersion_ ) - , description( description_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer{ buffer_ } + , offset{ offset_ } { } - VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT : LayerProperties( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsStreamNV( *reinterpret_cast( &rhs ) ) + { + } - LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT + IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + buffer = buffer_; + return *this; } - operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + offset = offset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectCommandsStreamNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &, - uint32_t const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( layerName, specVersion, implementationVersion, description ); + return std::tie( buffer, offset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( LayerProperties const & ) const = default; + auto operator<=>( IndirectCommandsStreamNV const & ) const = default; #else - bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( layerName == rhs.layerName ) && ( specVersion == rhs.specVersion ) && ( implementationVersion == rhs.implementationVersion ) && - ( description == rhs.description ); + return ( buffer == rhs.buffer ) && ( offset == rhs.offset ); # endif } - bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ArrayWrapper1D layerName = {}; - uint32_t specVersion = {}; - uint32_t implementationVersion = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; }; -#if defined( VK_USE_PLATFORM_MACOS_MVK ) - struct MacOSSurfaceCreateInfoMVK + struct GeneratedCommandsInfoNV { - using NativeType = VkMacOSSurfaceCreateInfoMVK; + using NativeType = VkGeneratedCommandsInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, - const void * pView_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pView( pView_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, + uint32_t streamCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ = {}, + uint32_t sequencesCount_ = {}, + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , pipeline{ pipeline_ } + , indirectCommandsLayout{ indirectCommandsLayout_ } + , streamCount{ streamCount_ } + , pStreams{ pStreams_ } + , sequencesCount{ sequencesCount_ } + , preprocessBuffer{ preprocessBuffer_ } + , preprocessOffset{ preprocessOffset_ } + , preprocessSize{ preprocessSize_ } + , sequencesCountBuffer{ sequencesCountBuffer_ } + , sequencesCountOffset{ sequencesCountOffset_ } + , sequencesIndexBuffer{ sequencesIndexBuffer_ } + , sequencesIndexOffset{ sequencesIndexOffset_ } { } - VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT - : MacOSSurfaceCreateInfoMVK( *reinterpret_cast( &rhs ) ) + GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsInfoNV( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MacOSSurfaceCreateInfoMVK & operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streams_, + uint32_t sequencesCount_ = {}, + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , streamCount( static_cast( streams_.size() ) ) + , pStreams( streams_.data() ) + , sequencesCount( sequencesCount_ ) + , preprocessBuffer( preprocessBuffer_ ) + , preprocessOffset( preprocessOffset_ ) + , preprocessSize( preprocessSize_ ) + , sequencesCountBuffer( sequencesCountBuffer_ ) + , sequencesCountOffset( sequencesCountOffset_ ) + , sequencesIndexBuffer( sequencesIndexBuffer_ ) + , sequencesIndexOffset( sequencesIndexOffset_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + pipelineBindPoint = pipelineBindPoint_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT { - pView = pView_; + pipeline = pipeline_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkMacOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & + setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + indirectCommandsLayout = indirectCommandsLayout_; + return *this; } - operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + streamCount = streamCount_; + return *this; } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std:: - tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, pView ); + pStreams = pStreams_; + return *this; } -# endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MacOSSurfaceCreateInfoMVK const & ) const = default; -# else - bool operator==( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsInfoNV & + setStreams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streams_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); -# endif + streamCount = static_cast( streams_.size() ); + pStreams = streams_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - bool operator!=( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + sequencesCount = sequencesCount_; + return *this; } -# endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {}; - const void * pView = {}; - }; - - template <> - struct CppType - { - using Type = MacOSSurfaceCreateInfoMVK; - }; -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - - struct MappedMemoryRange - { - using NativeType = VkMappedMemoryRange; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MappedMemoryRange( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memory( memory_ ) - , offset( offset_ ) - , size( size_ ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT { + preprocessBuffer = preprocessBuffer_; + return *this; } - VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT : MappedMemoryRange( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT + { + preprocessOffset = preprocessOffset_; + return *this; + } - MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + preprocessSize = preprocessSize_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + sequencesCountBuffer = sequencesCountBuffer_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT { - memory = memory_; + sequencesCountOffset = sequencesCountOffset_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT { - offset = offset_; + sequencesIndexBuffer = sequencesIndexBuffer_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT { - size = size_; + sequencesIndexOffset = sequencesIndexOffset_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT + operator VkGeneratedCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT + operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -41450,910 +43466,1182 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, memory, offset, size ); + return std::tie( sType, + pNext, + pipelineBindPoint, + pipeline, + indirectCommandsLayout, + streamCount, + pStreams, + sequencesCount, + preprocessBuffer, + preprocessOffset, + preprocessSize, + sequencesCountBuffer, + sequencesCountOffset, + sequencesIndexBuffer, + sequencesIndexOffset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MappedMemoryRange const & ) const = default; + auto operator<=>( GeneratedCommandsInfoNV const & ) const = default; #else - bool operator==( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) && ( size == rhs.size ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ) && + ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( streamCount == rhs.streamCount ) && ( pStreams == rhs.pStreams ) && + ( sequencesCount == rhs.sequencesCount ) && ( preprocessBuffer == rhs.preprocessBuffer ) && ( preprocessOffset == rhs.preprocessOffset ) && + ( preprocessSize == rhs.preprocessSize ) && ( sequencesCountBuffer == rhs.sequencesCountBuffer ) && + ( sequencesCountOffset == rhs.sequencesCountOffset ) && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) && + ( sequencesIndexOffset == rhs.sequencesIndexOffset ); # endif } - bool operator!=( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; + uint32_t streamCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV * pStreams = {}; + uint32_t sequencesCount = {}; + VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {}; }; template <> - struct CppType + struct CppType { - using Type = MappedMemoryRange; + using Type = GeneratedCommandsInfoNV; }; - struct MemoryAllocateFlagsInfo + struct GeneratedCommandsMemoryRequirementsInfoEXT { - using NativeType = VkMemoryAllocateFlagsInfo; + using NativeType = VkGeneratedCommandsMemoryRequirementsInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, - uint32_t deviceMask_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , deviceMask( deviceMask_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout_ = {}, + uint32_t maxSequenceCount_ = {}, + uint32_t maxDrawCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , indirectExecutionSet{ indirectExecutionSet_ } + , indirectCommandsLayout{ indirectCommandsLayout_ } + , maxSequenceCount{ maxSequenceCount_ } + , maxDrawCount{ maxDrawCount_ } { } - VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoEXT( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : MemoryAllocateFlagsInfo( *reinterpret_cast( &rhs ) ) + GeneratedCommandsMemoryRequirementsInfoEXT( VkGeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsMemoryRequirementsInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + GeneratedCommandsMemoryRequirementsInfoEXT & operator=( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsMemoryRequirementsInfoEXT & operator=( VkGeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & + setIndirectExecutionSet( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + indirectExecutionSet = indirectExecutionSet_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & + setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT { - deviceMask = deviceMask_; + indirectCommandsLayout = indirectCommandsLayout_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setMaxSequenceCount( uint32_t maxSequenceCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + maxSequenceCount = maxSequenceCount_; + return *this; } - operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setMaxDrawCount( uint32_t maxDrawCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + maxDrawCount = maxDrawCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGeneratedCommandsMemoryRequirementsInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsMemoryRequirementsInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, deviceMask ); + return std::tie( sType, pNext, indirectExecutionSet, indirectCommandsLayout, maxSequenceCount, maxDrawCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryAllocateFlagsInfo const & ) const = default; + auto operator<=>( GeneratedCommandsMemoryRequirementsInfoEXT const & ) const = default; #else - bool operator==( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( deviceMask == rhs.deviceMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indirectExecutionSet == rhs.indirectExecutionSet ) && + ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( maxSequenceCount == rhs.maxSequenceCount ) && ( maxDrawCount == rhs.maxDrawCount ); # endif } - bool operator!=( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {}; - uint32_t deviceMask = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout = {}; + uint32_t maxSequenceCount = {}; + uint32_t maxDrawCount = {}; }; template <> - struct CppType + struct CppType { - using Type = MemoryAllocateFlagsInfo; + using Type = GeneratedCommandsMemoryRequirementsInfoEXT; }; - using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; - struct MemoryAllocateInfo + struct GeneratedCommandsMemoryRequirementsInfoNV { - using NativeType = VkMemoryAllocateInfo; + using NativeType = VkGeneratedCommandsMemoryRequirementsInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, - uint32_t memoryTypeIndex_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , allocationSize( allocationSize_ ) - , memoryTypeIndex( memoryTypeIndex_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, + uint32_t maxSequencesCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , pipeline{ pipeline_ } + , indirectCommandsLayout{ indirectCommandsLayout_ } + , maxSequencesCount{ maxSequencesCount_ } { } - VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryAllocateInfo( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsMemoryRequirementsInfoNV( *reinterpret_cast( &rhs ) ) + { + } - MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + GeneratedCommandsMemoryRequirementsInfoNV & operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsMemoryRequirementsInfoNV & operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT { - allocationSize = allocationSize_; + pipelineBindPoint = pipelineBindPoint_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT { - memoryTypeIndex = memoryTypeIndex_; + pipeline = pipeline_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & + setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + indirectCommandsLayout = indirectCommandsLayout_; + return *this; } - operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + maxSequencesCount = maxSequencesCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGeneratedCommandsMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, allocationSize, memoryTypeIndex ); + return std::tie( sType, pNext, pipelineBindPoint, pipeline, indirectCommandsLayout, maxSequencesCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryAllocateInfo const & ) const = default; + auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const & ) const = default; #else - bool operator==( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeIndex == rhs.memoryTypeIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ) && + ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( maxSequencesCount == rhs.maxSequencesCount ); # endif } - bool operator!=( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; - uint32_t memoryTypeIndex = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {}; + uint32_t maxSequencesCount = {}; }; template <> - struct CppType + struct CppType { - using Type = MemoryAllocateInfo; + using Type = GeneratedCommandsMemoryRequirementsInfoNV; }; - struct MemoryBarrier + struct GeneratedCommandsPipelineInfoEXT { - using NativeType = VkMemoryBarrier; + using NativeType = VkGeneratedCommandsPipelineInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsPipelineInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsPipelineInfoEXT( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipeline{ pipeline_ } { } - VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryBarrier( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR GeneratedCommandsPipelineInfoEXT( GeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsPipelineInfoEXT( VkGeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsPipelineInfoEXT( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsPipelineInfoEXT & operator=( GeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GeneratedCommandsPipelineInfoEXT & operator=( VkGeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsPipelineInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - srcAccessMask = srcAccessMask_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsPipelineInfoEXT & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT { - dstAccessMask = dstAccessMask_; + pipeline = pipeline_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT + operator VkGeneratedCommandsPipelineInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT + operator VkGeneratedCommandsPipelineInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, srcAccessMask, dstAccessMask ); + return std::tie( sType, pNext, pipeline ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryBarrier const & ) const = default; + auto operator<=>( GeneratedCommandsPipelineInfoEXT const & ) const = default; #else - bool operator==( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GeneratedCommandsPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ); # endif } - bool operator!=( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GeneratedCommandsPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsPipelineInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; }; template <> - struct CppType + struct CppType { - using Type = MemoryBarrier; + using Type = GeneratedCommandsPipelineInfoEXT; }; - struct MemoryDedicatedAllocateInfo + struct GeneratedCommandsShaderInfoEXT { - using NativeType = VkMemoryDedicatedAllocateInfo; + using NativeType = VkGeneratedCommandsShaderInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsShaderInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image( image_ ) - , buffer( buffer_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsShaderInfoEXT( uint32_t shaderCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderCount{ shaderCount_ } + , pShaders{ pShaders_ } { } - VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR GeneratedCommandsShaderInfoEXT( GeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : MemoryDedicatedAllocateInfo( *reinterpret_cast( &rhs ) ) + GeneratedCommandsShaderInfoEXT( VkGeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsShaderInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsShaderInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shaders_, + void * pNext_ = nullptr ) + : pNext( pNext_ ), shaderCount( static_cast( shaders_.size() ) ), pShaders( shaders_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + GeneratedCommandsShaderInfoEXT & operator=( GeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GeneratedCommandsShaderInfoEXT & operator=( VkGeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setShaderCount( uint32_t shaderCount_ ) VULKAN_HPP_NOEXCEPT { - image = image_; + shaderCount = shaderCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setPShaders( const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders_ ) VULKAN_HPP_NOEXCEPT { - buffer = buffer_; + pShaders = pShaders_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsShaderInfoEXT & + setShaders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shaders_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + shaderCount = static_cast( shaders_.size() ); + pShaders = shaders_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT + operator VkGeneratedCommandsShaderInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsShaderInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, image, buffer ); + return std::tie( sType, pNext, shaderCount, pShaders ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryDedicatedAllocateInfo const & ) const = default; + auto operator<=>( GeneratedCommandsShaderInfoEXT const & ) const = default; #else - bool operator==( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GeneratedCommandsShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCount == rhs.shaderCount ) && ( pShaders == rhs.pShaders ); # endif } - bool operator!=( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GeneratedCommandsShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Image image = {}; - VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsShaderInfoEXT; + void * pNext = {}; + uint32_t shaderCount = {}; + const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders = {}; }; template <> - struct CppType + struct CppType { - using Type = MemoryDedicatedAllocateInfo; + using Type = GeneratedCommandsShaderInfoEXT; }; - using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; - struct MemoryDedicatedRequirements + struct LatencyTimingsFrameReportNV { - using NativeType = VkMemoryDedicatedRequirements; + using NativeType = VkLatencyTimingsFrameReportNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencyTimingsFrameReportNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , prefersDedicatedAllocation( prefersDedicatedAllocation_ ) - , requiresDedicatedAllocation( requiresDedicatedAllocation_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LatencyTimingsFrameReportNV( uint64_t presentID_ = {}, + uint64_t inputSampleTimeUs_ = {}, + uint64_t simStartTimeUs_ = {}, + uint64_t simEndTimeUs_ = {}, + uint64_t renderSubmitStartTimeUs_ = {}, + uint64_t renderSubmitEndTimeUs_ = {}, + uint64_t presentStartTimeUs_ = {}, + uint64_t presentEndTimeUs_ = {}, + uint64_t driverStartTimeUs_ = {}, + uint64_t driverEndTimeUs_ = {}, + uint64_t osRenderQueueStartTimeUs_ = {}, + uint64_t osRenderQueueEndTimeUs_ = {}, + uint64_t gpuRenderStartTimeUs_ = {}, + uint64_t gpuRenderEndTimeUs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentID{ presentID_ } + , inputSampleTimeUs{ inputSampleTimeUs_ } + , simStartTimeUs{ simStartTimeUs_ } + , simEndTimeUs{ simEndTimeUs_ } + , renderSubmitStartTimeUs{ renderSubmitStartTimeUs_ } + , renderSubmitEndTimeUs{ renderSubmitEndTimeUs_ } + , presentStartTimeUs{ presentStartTimeUs_ } + , presentEndTimeUs{ presentEndTimeUs_ } + , driverStartTimeUs{ driverStartTimeUs_ } + , driverEndTimeUs{ driverEndTimeUs_ } + , osRenderQueueStartTimeUs{ osRenderQueueStartTimeUs_ } + , osRenderQueueEndTimeUs{ osRenderQueueEndTimeUs_ } + , gpuRenderStartTimeUs{ gpuRenderStartTimeUs_ } + , gpuRenderEndTimeUs{ gpuRenderEndTimeUs_ } { } - VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR LatencyTimingsFrameReportNV( LatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - : MemoryDedicatedRequirements( *reinterpret_cast( &rhs ) ) + LatencyTimingsFrameReportNV( VkLatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT + : LatencyTimingsFrameReportNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + LatencyTimingsFrameReportNV & operator=( LatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + LatencyTimingsFrameReportNV & operator=( VkLatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT + operator VkLatencyTimingsFrameReportNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT + operator VkLatencyTimingsFrameReportNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, prefersDedicatedAllocation, requiresDedicatedAllocation ); + return std::tie( sType, + pNext, + presentID, + inputSampleTimeUs, + simStartTimeUs, + simEndTimeUs, + renderSubmitStartTimeUs, + renderSubmitEndTimeUs, + presentStartTimeUs, + presentEndTimeUs, + driverStartTimeUs, + driverEndTimeUs, + osRenderQueueStartTimeUs, + osRenderQueueEndTimeUs, + gpuRenderStartTimeUs, + gpuRenderEndTimeUs ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryDedicatedRequirements const & ) const = default; + auto operator<=>( LatencyTimingsFrameReportNV const & ) const = default; #else - bool operator==( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( LatencyTimingsFrameReportNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) && - ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentID == rhs.presentID ) && ( inputSampleTimeUs == rhs.inputSampleTimeUs ) && + ( simStartTimeUs == rhs.simStartTimeUs ) && ( simEndTimeUs == rhs.simEndTimeUs ) && ( renderSubmitStartTimeUs == rhs.renderSubmitStartTimeUs ) && + ( renderSubmitEndTimeUs == rhs.renderSubmitEndTimeUs ) && ( presentStartTimeUs == rhs.presentStartTimeUs ) && + ( presentEndTimeUs == rhs.presentEndTimeUs ) && ( driverStartTimeUs == rhs.driverStartTimeUs ) && ( driverEndTimeUs == rhs.driverEndTimeUs ) && + ( osRenderQueueStartTimeUs == rhs.osRenderQueueStartTimeUs ) && ( osRenderQueueEndTimeUs == rhs.osRenderQueueEndTimeUs ) && + ( gpuRenderStartTimeUs == rhs.gpuRenderStartTimeUs ) && ( gpuRenderEndTimeUs == rhs.gpuRenderEndTimeUs ); # endif } - bool operator!=( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( LatencyTimingsFrameReportNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {}; - VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencyTimingsFrameReportNV; + const void * pNext = {}; + uint64_t presentID = {}; + uint64_t inputSampleTimeUs = {}; + uint64_t simStartTimeUs = {}; + uint64_t simEndTimeUs = {}; + uint64_t renderSubmitStartTimeUs = {}; + uint64_t renderSubmitEndTimeUs = {}; + uint64_t presentStartTimeUs = {}; + uint64_t presentEndTimeUs = {}; + uint64_t driverStartTimeUs = {}; + uint64_t driverEndTimeUs = {}; + uint64_t osRenderQueueStartTimeUs = {}; + uint64_t osRenderQueueEndTimeUs = {}; + uint64_t gpuRenderStartTimeUs = {}; + uint64_t gpuRenderEndTimeUs = {}; }; template <> - struct CppType + struct CppType { - using Type = MemoryDedicatedRequirements; + using Type = LatencyTimingsFrameReportNV; }; - using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; - struct MemoryFdPropertiesKHR + struct GetLatencyMarkerInfoNV { - using NativeType = VkMemoryFdPropertiesKHR; + using NativeType = VkGetLatencyMarkerInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGetLatencyMarkerInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryTypeBits( memoryTypeBits_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GetLatencyMarkerInfoNV( uint32_t timingCount_ = {}, + VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV * pTimings_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , timingCount{ timingCount_ } + , pTimings{ pTimings_ } { } - VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR GetLatencyMarkerInfoNV( GetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : MemoryFdPropertiesKHR( *reinterpret_cast( &rhs ) ) + GetLatencyMarkerInfoNV( VkGetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GetLatencyMarkerInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GetLatencyMarkerInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & timings_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), timingCount( static_cast( timings_.size() ) ), pTimings( timings_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + GetLatencyMarkerInfoNV & operator=( GetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GetLatencyMarkerInfoNV & operator=( VkGetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setTimingCount( uint32_t timingCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + timingCount = timingCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setPTimings( VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV * pTimings_ ) VULKAN_HPP_NOEXCEPT + { + pTimings = pTimings_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GetLatencyMarkerInfoNV & + setTimings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & timings_ ) VULKAN_HPP_NOEXCEPT + { + timingCount = static_cast( timings_.size() ); + pTimings = timings_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGetLatencyMarkerInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGetLatencyMarkerInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, memoryTypeBits ); + return std::tie( sType, pNext, timingCount, pTimings ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryFdPropertiesKHR const & ) const = default; + auto operator<=>( GetLatencyMarkerInfoNV const & ) const = default; #else - bool operator==( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timingCount == rhs.timingCount ) && ( pTimings == rhs.pTimings ); # endif } - bool operator!=( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR; - void * pNext = {}; - uint32_t memoryTypeBits = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGetLatencyMarkerInfoNV; + const void * pNext = {}; + uint32_t timingCount = {}; + VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV * pTimings = {}; }; template <> - struct CppType + struct CppType { - using Type = MemoryFdPropertiesKHR; + using Type = GetLatencyMarkerInfoNV; }; -#if defined( VK_USE_PLATFORM_ANDROID_KHR ) - struct MemoryGetAndroidHardwareBufferInfoANDROID + struct VertexInputBindingDescription { - using NativeType = VkMemoryGetAndroidHardwareBufferInfoANDROID; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; + using NativeType = VkVertexInputBindingDescription; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memory( memory_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VertexInputBindingDescription( uint32_t binding_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT + : binding{ binding_ } + , stride{ stride_ } + , inputRate{ inputRate_ } { } - VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT - : MemoryGetAndroidHardwareBufferInfoANDROID( *reinterpret_cast( &rhs ) ) + VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDescription( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryGetAndroidHardwareBufferInfoANDROID & operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VertexInputBindingDescription & operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + binding = binding_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT { - memory = memory_; + stride = stride_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkMemoryGetAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + inputRate = inputRate_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT + operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, memory ); + return *reinterpret_cast( this ); } -# endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const & ) const = default; +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto # else - bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); -# endif + return std::tie( binding, stride, inputRate ); } +#endif - bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VertexInputBindingDescription const & ) const = default; +#else + bool operator==( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ); +# endif + } + + bool operator!=( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + uint32_t binding = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; }; - template <> - struct CppType + struct VertexInputAttributeDescription { - using Type = MemoryGetAndroidHardwareBufferInfoANDROID; - }; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + using NativeType = VkVertexInputAttributeDescription; - struct MemoryGetFdInfoKHR - { - using NativeType = VkMemoryGetFdInfoKHR; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( uint32_t location_ = {}, + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT + : location{ location_ } + , binding{ binding_ } + , format{ format_ } + , offset{ offset_ } + { + } - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR; + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memory( memory_ ) - , handleType( handleType_ ) + VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputAttributeDescription( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryGetFdInfoKHR( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + VertexInputAttributeDescription & operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } - MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + location = location_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + binding = binding_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT { - memory = memory_; + format = format_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT { - handleType = handleType_; + offset = offset_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, memory, handleType ); + return std::tie( location, binding, format, offset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryGetFdInfoKHR const & ) const = default; + auto operator<=>( VertexInputAttributeDescription const & ) const = default; #else - bool operator==( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); + return ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) && ( offset == rhs.offset ); # endif } - bool operator!=( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; - }; - - template <> - struct CppType - { - using Type = MemoryGetFdInfoKHR; + uint32_t location = {}; + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t offset = {}; }; - struct MemoryGetRemoteAddressInfoNV + struct PipelineVertexInputStateCreateInfo { - using NativeType = VkMemoryGetRemoteAddressInfoNV; + using NativeType = VkPipelineVertexInputStateCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetRemoteAddressInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputStateCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memory( memory_ ) - , handleType( handleType_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, + uint32_t vertexBindingDescriptionCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ = {}, + uint32_t vertexAttributeDescriptionCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , vertexBindingDescriptionCount{ vertexBindingDescriptionCount_ } + , pVertexBindingDescriptions{ pVertexBindingDescriptions_ } + , vertexAttributeDescriptionCount{ vertexAttributeDescriptionCount_ } + , pVertexAttributeDescriptions{ pVertexAttributeDescriptions_ } { } - VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryGetRemoteAddressInfoNV( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : MemoryGetRemoteAddressInfoNV( *reinterpret_cast( &rhs ) ) + PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineVertexInputStateCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryGetRemoteAddressInfoNV & operator=( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDescriptions_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexAttributeDescriptions_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , vertexBindingDescriptionCount( static_cast( vertexBindingDescriptions_.size() ) ) + , pVertexBindingDescriptions( vertexBindingDescriptions_.data() ) + , vertexAttributeDescriptionCount( static_cast( vertexAttributeDescriptions_.size() ) ) + , pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - MemoryGetRemoteAddressInfoNV & operator=( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineVertexInputStateCreateInfo & operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - memory = memory_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & - setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT { - handleType = handleType_; + vertexBindingDescriptionCount = vertexBindingDescriptionCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkMemoryGetRemoteAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & + setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pVertexBindingDescriptions = pVertexBindingDescriptions_; + return *this; } - operator VkMemoryGetRemoteAddressInfoNV &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDescriptions_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + vertexBindingDescriptionCount = static_cast( vertexBindingDescriptions_.size() ); + pVertexBindingDescriptions = vertexBindingDescriptions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & + setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & + setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + pVertexAttributeDescriptions = pVertexAttributeDescriptions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexAttributeDescriptions_ ) + VULKAN_HPP_NOEXCEPT + { + vertexAttributeDescriptionCount = static_cast( vertexAttributeDescriptions_.size() ); + pVertexAttributeDescriptions = vertexAttributeDescriptions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -42362,1442 +44650,1464 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, memory, handleType ); + return std::tie( + sType, pNext, flags, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryGetRemoteAddressInfoNV const & ) const = default; + auto operator<=>( PipelineVertexInputStateCreateInfo const & ) const = default; #else - bool operator==( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) && + ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions ); # endif } - bool operator!=( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetRemoteAddressInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {}; + uint32_t vertexBindingDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription * pVertexBindingDescriptions = {}; + uint32_t vertexAttributeDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions = {}; }; template <> - struct CppType + struct CppType { - using Type = MemoryGetRemoteAddressInfoNV; + using Type = PipelineVertexInputStateCreateInfo; }; -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - struct MemoryGetWin32HandleInfoKHR + struct PipelineInputAssemblyStateCreateInfo { - using NativeType = VkMemoryGetWin32HandleInfoKHR; + using NativeType = VkPipelineInputAssemblyStateCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetWin32HandleInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInputAssemblyStateCreateInfo; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memory( memory_ ) - , handleType( handleType_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineInputAssemblyStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , topology{ topology_ } + , primitiveRestartEnable{ primitiveRestartEnable_ } { } - VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : MemoryGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineInputAssemblyStateCreateInfo( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineInputAssemblyStateCreateInfo & operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - memory = memory_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & - setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT { - handleType = handleType_; + topology = topology_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkMemoryGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & + setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + primitiveRestartEnable = primitiveRestartEnable_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags const &, + VULKAN_HPP_NAMESPACE::PrimitiveTopology const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, memory, handleType ); + return std::tie( sType, pNext, flags, topology, primitiveRestartEnable ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryGetWin32HandleInfoKHR const & ) const = default; -# else - bool operator==( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineInputAssemblyStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( topology == rhs.topology ) && + ( primitiveRestartEnable == rhs.primitiveRestartEnable ); +# endif } - bool operator!=( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList; + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {}; }; template <> - struct CppType + struct CppType { - using Type = MemoryGetWin32HandleInfoKHR; + using Type = PipelineInputAssemblyStateCreateInfo; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#if defined( VK_USE_PLATFORM_FUCHSIA ) - struct MemoryGetZirconHandleInfoFUCHSIA + struct PipelineTessellationStateCreateInfo { - using NativeType = VkMemoryGetZirconHandleInfoFUCHSIA; + using NativeType = VkPipelineTessellationStateCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationStateCreateInfo; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memory( memory_ ) - , handleType( handleType_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, + uint32_t patchControlPoints_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , patchControlPoints{ patchControlPoints_ } { } - VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryGetZirconHandleInfoFUCHSIA( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT - : MemoryGetZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineTessellationStateCreateInfo( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryGetZirconHandleInfoFUCHSIA & operator=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - MemoryGetZirconHandleInfoFUCHSIA & operator=( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineTessellationStateCreateInfo & operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - memory = memory_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & - setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT { - handleType = handleType_; + patchControlPoints = patchControlPoints_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkMemoryGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMemoryGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags const &, + uint32_t const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, memory, handleType ); + return std::tie( sType, pNext, flags, patchControlPoints ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryGetZirconHandleInfoFUCHSIA const & ) const = default; -# else - bool operator==( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineTessellationStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( patchControlPoints == rhs.patchControlPoints ); +# endif } - bool operator!=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {}; + uint32_t patchControlPoints = {}; }; template <> - struct CppType + struct CppType { - using Type = MemoryGetZirconHandleInfoFUCHSIA; + using Type = PipelineTessellationStateCreateInfo; }; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct MemoryHeap + struct PipelineViewportStateCreateInfo { - using NativeType = VkMemoryHeap; + using NativeType = VkPipelineViewportStateCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : size( size_ ) - , flags( flags_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ = {}, + uint32_t scissorCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , viewportCount{ viewportCount_ } + , pViewports{ pViewports_ } + , scissorCount{ scissorCount_ } + , pScissors{ pScissors_ } { } - VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryHeap( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } - MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewports_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & scissors_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , viewportCount( static_cast( viewports_.size() ) ) + , pViewports( viewports_.data() ) + , scissorCount( static_cast( scissors_.size() ) ) + , pScissors( scissors_.data() ) { - *this = *reinterpret_cast( &rhs ); - return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT + PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + *this = *reinterpret_cast( &rhs ); + return *this; } - operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( size, flags ); + flags = flags_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryHeap const & ) const = default; -#else - bool operator==( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( size == rhs.size ) && ( flags == rhs.flags ); -# endif + viewportCount = viewportCount_; + return *this; } - bool operator!=( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport * pViewports_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + pViewports = pViewports_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {}; - }; - - struct MemoryHostPointerPropertiesEXT - { - using NativeType = VkMemoryHostPointerPropertiesEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryTypeBits( memoryTypeBits_ ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo & + setViewports( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewports_ ) VULKAN_HPP_NOEXCEPT { + viewportCount = static_cast( viewports_.size() ); + pViewports = viewports_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : MemoryHostPointerPropertiesEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT { + scissorCount = scissorCount_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ ) VULKAN_HPP_NOEXCEPT + { + pScissors = pScissors_; + return *this; + } - MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportStateCreateInfo & + setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & scissors_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + scissorCount = static_cast( scissors_.size() ); + pScissors = scissors_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, memoryTypeBits ); + return std::tie( sType, pNext, flags, viewportCount, pViewports, scissorCount, pScissors ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryHostPointerPropertiesEXT const & ) const = default; + auto operator<=>( PipelineViewportStateCreateInfo const & ) const = default; #else - bool operator==( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewportCount == rhs.viewportCount ) && + ( pViewports == rhs.pViewports ) && ( scissorCount == rhs.scissorCount ) && ( pScissors == rhs.pScissors ); # endif } - bool operator!=( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; - void * pNext = {}; - uint32_t memoryTypeBits = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::Viewport * pViewports = {}; + uint32_t scissorCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pScissors = {}; }; template <> - struct CppType + struct CppType { - using Type = MemoryHostPointerPropertiesEXT; + using Type = PipelineViewportStateCreateInfo; }; - struct MemoryOpaqueCaptureAddressAllocateInfo + struct PipelineRasterizationStateCreateInfo { - using NativeType = VkMemoryOpaqueCaptureAddressAllocateInfo; + using NativeType = VkPipelineRasterizationStateCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , opaqueCaptureAddress( opaqueCaptureAddress_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, + VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, + VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, + VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, + float depthBiasConstantFactor_ = {}, + float depthBiasClamp_ = {}, + float depthBiasSlopeFactor_ = {}, + float lineWidth_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , depthClampEnable{ depthClampEnable_ } + , rasterizerDiscardEnable{ rasterizerDiscardEnable_ } + , polygonMode{ polygonMode_ } + , cullMode{ cullMode_ } + , frontFace{ frontFace_ } + , depthBiasEnable{ depthBiasEnable_ } + , depthBiasConstantFactor{ depthBiasConstantFactor_ } + , depthBiasClamp{ depthBiasClamp_ } + , depthBiasSlopeFactor{ depthBiasSlopeFactor_ } + , lineWidth{ lineWidth_ } { } - VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : MemoryOpaqueCaptureAddressAllocateInfo( *reinterpret_cast( &rhs ) ) + PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationStateCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryOpaqueCaptureAddressAllocateInfo & operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - MemoryOpaqueCaptureAddressAllocateInfo & operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateCreateInfo & operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - opaqueCaptureAddress = opaqueCaptureAddress_; + flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + depthClampEnable = depthClampEnable_; + return *this; } - operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & + setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + rasterizerDiscardEnable = rasterizerDiscardEnable_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, opaqueCaptureAddress ); + polygonMode = polygonMode_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const & ) const = default; -#else - bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); -# endif + cullMode = cullMode_; + return *this; } - bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + frontFace = frontFace_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; - const void * pNext = {}; - uint64_t opaqueCaptureAddress = {}; - }; - - template <> - struct CppType - { - using Type = MemoryOpaqueCaptureAddressAllocateInfo; - }; - using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo; - struct MemoryPriorityAllocateInfoEXT - { - using NativeType = VkMemoryPriorityAllocateInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , priority( priority_ ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT { + depthBiasEnable = depthBiasEnable_; + return *this; } - VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : MemoryPriorityAllocateInfoEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT { + depthBiasConstantFactor = depthBiasConstantFactor_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - MemoryPriorityAllocateInfoEXT & operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + depthBiasClamp = depthBiasClamp_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + depthBiasSlopeFactor = depthBiasSlopeFactor_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT { - priority = priority_; + lineWidth = lineWidth_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkMemoryPriorityAllocateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, priority ); + return std::tie( sType, + pNext, + flags, + depthClampEnable, + rasterizerDiscardEnable, + polygonMode, + cullMode, + frontFace, + depthBiasEnable, + depthBiasConstantFactor, + depthBiasClamp, + depthBiasSlopeFactor, + lineWidth ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryPriorityAllocateInfoEXT const & ) const = default; + auto operator<=>( PipelineRasterizationStateCreateInfo const & ) const = default; #else - bool operator==( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priority == rhs.priority ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthClampEnable == rhs.depthClampEnable ) && + ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) && ( polygonMode == rhs.polygonMode ) && ( cullMode == rhs.cullMode ) && + ( frontFace == rhs.frontFace ) && ( depthBiasEnable == rhs.depthBiasEnable ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) && + ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) && ( lineWidth == rhs.lineWidth ); # endif } - bool operator!=( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT; - const void * pNext = {}; - float priority = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {}; + VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill; + VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {}; + VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {}; + float depthBiasConstantFactor = {}; + float depthBiasClamp = {}; + float depthBiasSlopeFactor = {}; + float lineWidth = {}; }; template <> - struct CppType + struct CppType { - using Type = MemoryPriorityAllocateInfoEXT; + using Type = PipelineRasterizationStateCreateInfo; }; - struct MemoryRequirements + struct PipelineMultisampleStateCreateInfo { - using NativeType = VkMemoryRequirements; + using NativeType = VkPipelineMultisampleStateCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, - uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT - : size( size_ ) - , alignment( alignment_ ) - , memoryTypeBits( memoryTypeBits_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineMultisampleStateCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineMultisampleStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, + float minSampleShading_ = {}, + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , rasterizationSamples{ rasterizationSamples_ } + , sampleShadingEnable{ sampleShadingEnable_ } + , minSampleShading{ minSampleShading_ } + , pSampleMask{ pSampleMask_ } + , alphaToCoverageEnable{ alphaToCoverageEnable_ } + , alphaToOneEnable{ alphaToOneEnable_ } { } - VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryRequirements( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineMultisampleStateCreateInfo( *reinterpret_cast( &rhs ) ) + { + } - MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineMultisampleStateCreateInfo & operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( size, alignment, memoryTypeBits ); + rasterizationSamples = rasterizationSamples_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryRequirements const & ) const = default; -#else - bool operator==( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( size == rhs.size ) && ( alignment == rhs.alignment ) && ( memoryTypeBits == rhs.memoryTypeBits ); -# endif + sampleShadingEnable = sampleShadingEnable_; + return *this; } - bool operator!=( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + minSampleShading = minSampleShading_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::DeviceSize alignment = {}; - uint32_t memoryTypeBits = {}; - }; - - struct MemoryRequirements2 - { - using NativeType = VkMemoryRequirements2; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryRequirements( memoryRequirements_ ) + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask_ ) VULKAN_HPP_NOEXCEPT { + pSampleMask = pSampleMask_; + return *this; } - VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryRequirements2( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & + setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT { + alphaToCoverageEnable = alphaToCoverageEnable_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + alphaToOneEnable = alphaToOneEnable_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT + operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, memoryRequirements ); + return std::tie( sType, pNext, flags, rasterizationSamples, sampleShadingEnable, minSampleShading, pSampleMask, alphaToCoverageEnable, alphaToOneEnable ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryRequirements2 const & ) const = default; + auto operator<=>( PipelineMultisampleStateCreateInfo const & ) const = default; #else - bool operator==( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rasterizationSamples == rhs.rasterizationSamples ) && + ( sampleShadingEnable == rhs.sampleShadingEnable ) && ( minSampleShading == rhs.minSampleShading ) && ( pSampleMask == rhs.pSampleMask ) && + ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) && ( alphaToOneEnable == rhs.alphaToOneEnable ); # endif } - bool operator!=( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {}; + float minSampleShading = {}; + const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {}; }; template <> - struct CppType + struct CppType { - using Type = MemoryRequirements2; + using Type = PipelineMultisampleStateCreateInfo; }; - using MemoryRequirements2KHR = MemoryRequirements2; - struct MemoryType + struct StencilOpState { - using NativeType = VkMemoryType; + using NativeType = VkStencilOpState; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : propertyFlags( propertyFlags_ ) - , heapIndex( heapIndex_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + uint32_t compareMask_ = {}, + uint32_t writeMask_ = {}, + uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT + : failOp{ failOp_ } + , passOp{ passOp_ } + , depthFailOp{ depthFailOp_ } + , compareOp{ compareOp_ } + , compareMask{ compareMask_ } + , writeMask{ writeMask_ } + , reference{ reference_ } { } - VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryType( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT : StencilOpState( *reinterpret_cast( &rhs ) ) {} - MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default; + StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT + StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + failOp = failOp_; + return *this; } - operator VkMemoryType &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + passOp = passOp_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( propertyFlags, heapIndex ); + depthFailOp = depthFailOp_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryType const & ) const = default; -#else - bool operator==( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( propertyFlags == rhs.propertyFlags ) && ( heapIndex == rhs.heapIndex ); -# endif + compareOp = compareOp_; + return *this; } - bool operator!=( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + compareMask = compareMask_; + return *this; } -#endif - public: - VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {}; - uint32_t heapIndex = {}; - }; + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT + { + writeMask = writeMask_; + return *this; + } -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - struct MemoryWin32HandlePropertiesKHR - { - using NativeType = VkMemoryWin32HandlePropertiesKHR; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryTypeBits( memoryTypeBits_ ) - { - } - - VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : MemoryWin32HandlePropertiesKHR( *reinterpret_cast( &rhs ) ) - { - } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + reference = reference_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkMemoryWin32HandlePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT + operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, memoryTypeBits ); + return std::tie( failOp, passOp, depthFailOp, compareOp, compareMask, writeMask, reference ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryWin32HandlePropertiesKHR const & ) const = default; -# else - bool operator==( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( StencilOpState const & ) const = default; +#else + bool operator==( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); -# endif +# else + return ( failOp == rhs.failOp ) && ( passOp == rhs.passOp ) && ( depthFailOp == rhs.depthFailOp ) && ( compareOp == rhs.compareOp ) && + ( compareMask == rhs.compareMask ) && ( writeMask == rhs.writeMask ) && ( reference == rhs.reference ); +# endif } - bool operator!=( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; - void * pNext = {}; - uint32_t memoryTypeBits = {}; - }; - - template <> - struct CppType - { - using Type = MemoryWin32HandlePropertiesKHR; + VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + uint32_t compareMask = {}; + uint32_t writeMask = {}; + uint32_t reference = {}; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#if defined( VK_USE_PLATFORM_FUCHSIA ) - struct MemoryZirconHandlePropertiesFUCHSIA + struct PipelineDepthStencilStateCreateInfo { - using NativeType = VkMemoryZirconHandlePropertiesFUCHSIA; + using NativeType = VkPipelineDepthStencilStateCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDepthStencilStateCreateInfo; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryTypeBits( memoryTypeBits_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, + VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, + VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, + float minDepthBounds_ = {}, + float maxDepthBounds_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , depthTestEnable{ depthTestEnable_ } + , depthWriteEnable{ depthWriteEnable_ } + , depthCompareOp{ depthCompareOp_ } + , depthBoundsTestEnable{ depthBoundsTestEnable_ } + , stencilTestEnable{ stencilTestEnable_ } + , front{ front_ } + , back{ back_ } + , minDepthBounds{ minDepthBounds_ } + , maxDepthBounds{ maxDepthBounds_ } { } - VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryZirconHandlePropertiesFUCHSIA( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT - : MemoryZirconHandlePropertiesFUCHSIA( *reinterpret_cast( &rhs ) ) + PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineDepthStencilStateCreateInfo( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryZirconHandlePropertiesFUCHSIA & operator=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - MemoryZirconHandlePropertiesFUCHSIA & operator=( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineDepthStencilStateCreateInfo & operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkMemoryZirconHandlePropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkMemoryZirconHandlePropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, memoryTypeBits ); + depthTestEnable = depthTestEnable_; + return *this; } -# endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryZirconHandlePropertiesFUCHSIA const & ) const = default; -# else - bool operator==( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); -# endif + depthWriteEnable = depthWriteEnable_; + return *this; } - bool operator!=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + depthCompareOp = depthCompareOp_; + return *this; } -# endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA; - void * pNext = {}; - uint32_t memoryTypeBits = {}; - }; - - template <> - struct CppType - { - using Type = MemoryZirconHandlePropertiesFUCHSIA; - }; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - -#if defined( VK_USE_PLATFORM_METAL_EXT ) - struct MetalSurfaceCreateInfoEXT - { - using NativeType = VkMetalSurfaceCreateInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, - const CAMetalLayer * pLayer_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pLayer( pLayer_ ) + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & + setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT { + depthBoundsTestEnable = depthBoundsTestEnable_; + return *this; } - VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : MetalSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT { + stencilTestEnable = stencilTestEnable_; + return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + front = front_; return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + back = back_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + minDepthBounds = minDepthBounds_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer * pLayer_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT { - pLayer = pLayer_; + maxDepthBounds = maxDepthBounds_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkMetalSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::CompareOp const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::StencilOpState const &, + VULKAN_HPP_NAMESPACE::StencilOpState const &, + float const &, + float const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, pLayer ); + return std::tie( sType, + pNext, + flags, + depthTestEnable, + depthWriteEnable, + depthCompareOp, + depthBoundsTestEnable, + stencilTestEnable, + front, + back, + minDepthBounds, + maxDepthBounds ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MetalSurfaceCreateInfoEXT const & ) const = default; -# else - bool operator==( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineDepthStencilStateCreateInfo const & ) const = default; +#else + bool operator==( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pLayer == rhs.pLayer ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthTestEnable == rhs.depthTestEnable ) && + ( depthWriteEnable == rhs.depthWriteEnable ) && ( depthCompareOp == rhs.depthCompareOp ) && + ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) && ( stencilTestEnable == rhs.stencilTestEnable ) && ( front == rhs.front ) && + ( back == rhs.back ) && ( minDepthBounds == rhs.minDepthBounds ) && ( maxDepthBounds == rhs.maxDepthBounds ); +# endif } - bool operator!=( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {}; - const CAMetalLayer * pLayer = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {}; + VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {}; + VULKAN_HPP_NAMESPACE::StencilOpState front = {}; + VULKAN_HPP_NAMESPACE::StencilOpState back = {}; + float minDepthBounds = {}; + float maxDepthBounds = {}; }; template <> - struct CppType + struct CppType { - using Type = MetalSurfaceCreateInfoEXT; + using Type = PipelineDepthStencilStateCreateInfo; }; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct MultiDrawIndexedInfoEXT + struct PipelineColorBlendAttachmentState { - using NativeType = VkMultiDrawIndexedInfoEXT; + using NativeType = VkPipelineColorBlendAttachmentState; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( uint32_t firstIndex_ = {}, uint32_t indexCount_ = {}, int32_t vertexOffset_ = {} ) VULKAN_HPP_NOEXCEPT - : firstIndex( firstIndex_ ) - , indexCount( indexCount_ ) - , vertexOffset( vertexOffset_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT + : blendEnable{ blendEnable_ } + , srcColorBlendFactor{ srcColorBlendFactor_ } + , dstColorBlendFactor{ dstColorBlendFactor_ } + , colorBlendOp{ colorBlendOp_ } + , srcAlphaBlendFactor{ srcAlphaBlendFactor_ } + , dstAlphaBlendFactor{ dstAlphaBlendFactor_ } + , alphaBlendOp{ alphaBlendOp_ } + , colorWriteMask{ colorWriteMask_ } { } - VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MultiDrawIndexedInfoEXT( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : MultiDrawIndexedInfoEXT( *reinterpret_cast( &rhs ) ) + PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorBlendAttachmentState( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MultiDrawIndexedInfoEXT & operator=( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineColorBlendAttachmentState & operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - MultiDrawIndexedInfoEXT & operator=( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT { - firstIndex = firstIndex_; + blendEnable = blendEnable_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT { - indexCount = indexCount_; + srcColorBlendFactor = srcColorBlendFactor_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT { - vertexOffset = vertexOffset_; + dstColorBlendFactor = dstColorBlendFactor_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkMultiDrawIndexedInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkMultiDrawIndexedInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( firstIndex, indexCount, vertexOffset ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MultiDrawIndexedInfoEXT const & ) const = default; -#else - bool operator==( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( firstIndex == rhs.firstIndex ) && ( indexCount == rhs.indexCount ) && ( vertexOffset == rhs.vertexOffset ); -# endif - } - - bool operator!=( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + colorBlendOp = colorBlendOp_; + return *this; } -#endif - - public: - uint32_t firstIndex = {}; - uint32_t indexCount = {}; - int32_t vertexOffset = {}; - }; - - struct MultiDrawInfoEXT - { - using NativeType = VkMultiDrawInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( uint32_t firstVertex_ = {}, uint32_t vertexCount_ = {} ) VULKAN_HPP_NOEXCEPT - : firstVertex( firstVertex_ ) - , vertexCount( vertexCount_ ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT { + srcAlphaBlendFactor = srcAlphaBlendFactor_; + return *this; } - VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - MultiDrawInfoEXT( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MultiDrawInfoEXT( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - MultiDrawInfoEXT & operator=( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - MultiDrawInfoEXT & operator=( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + dstAlphaBlendFactor = dstAlphaBlendFactor_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT { - firstVertex = firstVertex_; + alphaBlendOp = alphaBlendOp_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & + setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT { - vertexCount = vertexCount_; + colorWriteMask = colorWriteMask_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkMultiDrawInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMultiDrawInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( firstVertex, vertexCount ); + return std::tie( + blendEnable, srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp, colorWriteMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MultiDrawInfoEXT const & ) const = default; + auto operator<=>( PipelineColorBlendAttachmentState const & ) const = default; #else - bool operator==( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( firstVertex == rhs.firstVertex ) && ( vertexCount == rhs.vertexCount ); + return ( blendEnable == rhs.blendEnable ) && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && + ( colorBlendOp == rhs.colorBlendOp ) && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) && + ( alphaBlendOp == rhs.alphaBlendOp ) && ( colorWriteMask == rhs.colorWriteMask ); # endif } - bool operator!=( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t firstVertex = {}; - uint32_t vertexCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {}; + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {}; }; - struct MultisamplePropertiesEXT + struct PipelineColorBlendStateCreateInfo { - using NativeType = VkMultisamplePropertiesEXT; + using NativeType = VkPipelineColorBlendStateCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendStateCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxSampleLocationGridSize( maxSampleLocationGridSize_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, + VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ = {}, + std::array const & blendConstants_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , logicOpEnable{ logicOpEnable_ } + , logicOp{ logicOp_ } + , attachmentCount{ attachmentCount_ } + , pAttachments{ pAttachments_ } + , blendConstants{ blendConstants_ } { } - VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : MultisamplePropertiesEXT( *reinterpret_cast( &rhs ) ) + PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorBlendStateCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorBlendStateCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_, + VULKAN_HPP_NAMESPACE::LogicOp logicOp_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + std::array const & blendConstants_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , logicOpEnable( logicOpEnable_ ) + , logicOp( logicOp_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , blendConstants( blendConstants_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, maxSampleLocationGridSize ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MultisamplePropertiesEXT const & ) const = default; -#else - bool operator==( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); -# endif - } - - bool operator!=( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; - }; - - template <> - struct CppType - { - using Type = MultisamplePropertiesEXT; - }; - - struct MultisampledRenderToSingleSampledInfoEXT - { - using NativeType = VkMultisampledRenderToSingleSampledInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisampledRenderToSingleSampledInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - MultisampledRenderToSingleSampledInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , multisampledRenderToSingleSampledEnable( multisampledRenderToSingleSampledEnable_ ) - , rasterizationSamples( rasterizationSamples_ ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT { + logicOpEnable = logicOpEnable_; + return *this; } - VULKAN_HPP_CONSTEXPR MultisampledRenderToSingleSampledInfoEXT( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - MultisampledRenderToSingleSampledInfoEXT( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : MultisampledRenderToSingleSampledInfoEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT { + logicOp = logicOp_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - MultisampledRenderToSingleSampledInfoEXT & operator=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MultisampledRenderToSingleSampledInfoEXT & operator=( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + attachmentCount = attachmentCount_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & + setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + pAttachments = pAttachments_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & - setMultisampledRenderToSingleSampledEnable( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorBlendStateCreateInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT { - multisampledRenderToSingleSampledEnable = multisampledRenderToSingleSampledEnable_; + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & - setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setBlendConstants( std::array blendConstants_ ) VULKAN_HPP_NOEXCEPT { - rasterizationSamples = rasterizationSamples_; + blendConstants = blendConstants_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkMultisampledRenderToSingleSampledInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMultisampledRenderToSingleSampledInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -43806,346 +46116,435 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::LogicOp const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, multisampledRenderToSingleSampledEnable, rasterizationSamples ); + return std::tie( sType, pNext, flags, logicOpEnable, logicOp, attachmentCount, pAttachments, blendConstants ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MultisampledRenderToSingleSampledInfoEXT const & ) const = default; + auto operator<=>( PipelineColorBlendStateCreateInfo const & ) const = default; #else - bool operator==( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multisampledRenderToSingleSampledEnable == rhs.multisampledRenderToSingleSampledEnable ) && - ( rasterizationSamples == rhs.rasterizationSamples ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( logicOpEnable == rhs.logicOpEnable ) && + ( logicOp == rhs.logicOp ) && ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && + ( blendConstants == rhs.blendConstants ); # endif } - bool operator!=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisampledRenderToSingleSampledInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {}; + VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D blendConstants = {}; }; template <> - struct CppType + struct CppType { - using Type = MultisampledRenderToSingleSampledInfoEXT; + using Type = PipelineColorBlendStateCreateInfo; }; - struct MultiviewPerViewAttributesInfoNVX + struct PipelineDynamicStateCreateInfo { - using NativeType = VkMultiviewPerViewAttributesInfoNVX; + using NativeType = VkPipelineDynamicStateCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultiviewPerViewAttributesInfoNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , perViewAttributes( perViewAttributes_ ) - , perViewAttributesPositionXOnly( perViewAttributesPositionXOnly_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, + uint32_t dynamicStateCount_ = {}, + const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , dynamicStateCount{ dynamicStateCount_ } + , pDynamicStates{ pDynamicStates_ } { } - VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MultiviewPerViewAttributesInfoNVX( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT - : MultiviewPerViewAttributesInfoNVX( *reinterpret_cast( &rhs ) ) + PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineDynamicStateCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MultiviewPerViewAttributesInfoNVX & operator=( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicStates_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), dynamicStateCount( static_cast( dynamicStates_.size() ) ), pDynamicStates( dynamicStates_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - MultiviewPerViewAttributesInfoNVX & operator=( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributes( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - perViewAttributes = perViewAttributes_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & - setPerViewAttributesPositionXOnly( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT { - perViewAttributesPositionXOnly = perViewAttributesPositionXOnly_; + dynamicStateCount = dynamicStateCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkMultiviewPerViewAttributesInfoNVX const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pDynamicStates = pDynamicStates_; + return *this; } - operator VkMultiviewPerViewAttributesInfoNVX &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDynamicStateCreateInfo & + setDynamicStates( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicStates_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dynamicStateCount = static_cast( dynamicStates_.size() ); + pDynamicStates = dynamicStates_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, perViewAttributes, perViewAttributesPositionXOnly ); + return std::tie( sType, pNext, flags, dynamicStateCount, pDynamicStates ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MultiviewPerViewAttributesInfoNVX const & ) const = default; + auto operator<=>( PipelineDynamicStateCreateInfo const & ) const = default; #else - bool operator==( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewAttributes == rhs.perViewAttributes ) && - ( perViewAttributesPositionXOnly == rhs.perViewAttributesPositionXOnly ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dynamicStateCount == rhs.dynamicStateCount ) && + ( pDynamicStates == rhs.pDynamicStates ); # endif } - bool operator!=( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultiviewPerViewAttributesInfoNVX; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes = {}; - VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {}; + uint32_t dynamicStateCount = {}; + const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates = {}; }; template <> - struct CppType + struct CppType { - using Type = MultiviewPerViewAttributesInfoNVX; + using Type = PipelineDynamicStateCreateInfo; }; - struct MutableDescriptorTypeListEXT + struct GraphicsPipelineCreateInfo { - using NativeType = VkMutableDescriptorTypeListEXT; + using NativeType = VkGraphicsPipelineCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT( uint32_t descriptorTypeCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : descriptorTypeCount( descriptorTypeCount_ ) - , pDescriptorTypes( pDescriptorTypes_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , stageCount{ stageCount_ } + , pStages{ pStages_ } + , pVertexInputState{ pVertexInputState_ } + , pInputAssemblyState{ pInputAssemblyState_ } + , pTessellationState{ pTessellationState_ } + , pViewportState{ pViewportState_ } + , pRasterizationState{ pRasterizationState_ } + , pMultisampleState{ pMultisampleState_ } + , pDepthStencilState{ pDepthStencilState_ } + , pColorBlendState{ pColorBlendState_ } + , pDynamicState{ pDynamicState_ } + , layout{ layout_ } + , renderPass{ renderPass_ } + , subpass{ subpass_ } + , basePipelineHandle{ basePipelineHandle_ } + , basePipelineIndex{ basePipelineIndex_ } { } - VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MutableDescriptorTypeListEXT( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : MutableDescriptorTypeListEXT( *reinterpret_cast( &rhs ) ) + GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : GraphicsPipelineCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - MutableDescriptorTypeListEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorTypes_ ) - : descriptorTypeCount( static_cast( descriptorTypes_.size() ) ), pDescriptorTypes( descriptorTypes_.data() ) + GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , pVertexInputState( pVertexInputState_ ) + , pInputAssemblyState( pInputAssemblyState_ ) + , pTessellationState( pTessellationState_ ) + , pViewportState( pViewportState_ ) + , pRasterizationState( pRasterizationState_ ) + , pMultisampleState( pMultisampleState_ ) + , pDepthStencilState( pDepthStencilState_ ) + , pColorBlendState( pColorBlendState_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , renderPass( renderPass_ ) + , subpass( subpass_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MutableDescriptorTypeListEXT & operator=( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - MutableDescriptorTypeListEXT & operator=( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT & setDescriptorTypeCount( uint32_t descriptorTypeCount_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - descriptorTypeCount = descriptorTypeCount_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT & - setPDescriptorTypes( const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - pDescriptorTypes = pDescriptorTypes_; + flags = flags_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - MutableDescriptorTypeListEXT & setDescriptorTypes( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorTypes_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT { - descriptorTypeCount = static_cast( descriptorTypes_.size() ); - pDescriptorTypes = descriptorTypes_.data(); + stageCount = stageCount_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkMutableDescriptorTypeListEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pStages = pStages_; + return *this; } - operator VkMutableDescriptorTypeListEXT &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineCreateInfo & + setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( descriptorTypeCount, pDescriptorTypes ); + pVertexInputState = pVertexInputState_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MutableDescriptorTypeListEXT const & ) const = default; -#else - bool operator==( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( descriptorTypeCount == rhs.descriptorTypeCount ) && ( pDescriptorTypes == rhs.pDescriptorTypes ); -# endif + pInputAssemblyState = pInputAssemblyState_; + return *this; } - bool operator!=( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + pTessellationState = pTessellationState_; + return *this; } -#endif - - public: - uint32_t descriptorTypeCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes = {}; - }; - using MutableDescriptorTypeListVALVE = MutableDescriptorTypeListEXT; - - struct MutableDescriptorTypeCreateInfoEXT - { - using NativeType = VkMutableDescriptorTypeCreateInfoEXT; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMutableDescriptorTypeCreateInfoEXT; + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState_ ) VULKAN_HPP_NOEXCEPT + { + pViewportState = pViewportState_; + return *this; + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT( uint32_t mutableDescriptorTypeListCount_ = {}, - const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , mutableDescriptorTypeListCount( mutableDescriptorTypeListCount_ ) - , pMutableDescriptorTypeLists( pMutableDescriptorTypeLists_ ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState_ ) VULKAN_HPP_NOEXCEPT { + pRasterizationState = pRasterizationState_; + return *this; } - VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState_ ) VULKAN_HPP_NOEXCEPT + { + pMultisampleState = pMultisampleState_; + return *this; + } - MutableDescriptorTypeCreateInfoEXT( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : MutableDescriptorTypeCreateInfoEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT { + pDepthStencilState = pDepthStencilState_; + return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - MutableDescriptorTypeCreateInfoEXT( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mutableDescriptorTypeLists_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , mutableDescriptorTypeListCount( static_cast( mutableDescriptorTypeLists_.size() ) ) - , pMutableDescriptorTypeLists( mutableDescriptorTypeLists_.data() ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState_ ) VULKAN_HPP_NOEXCEPT { + pColorBlendState = pColorBlendState_; + return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MutableDescriptorTypeCreateInfoEXT & operator=( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & + setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicState = pDynamicState_; + return *this; + } - MutableDescriptorTypeCreateInfoEXT & operator=( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + layout = layout_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + renderPass = renderPass_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & - setMutableDescriptorTypeListCount( uint32_t mutableDescriptorTypeListCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT { - mutableDescriptorTypeListCount = mutableDescriptorTypeListCount_; + subpass = subpass_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & - setPMutableDescriptorTypeLists( const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT { - pMutableDescriptorTypeLists = pMutableDescriptorTypeLists_; + basePipelineHandle = basePipelineHandle_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - MutableDescriptorTypeCreateInfoEXT & setMutableDescriptorTypeLists( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mutableDescriptorTypeLists_ ) - VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT { - mutableDescriptorTypeListCount = static_cast( mutableDescriptorTypeLists_.size() ); - pMutableDescriptorTypeLists = mutableDescriptorTypeLists_.data(); + basePipelineIndex = basePipelineIndex_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkMutableDescriptorTypeCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMutableDescriptorTypeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -44154,277 +46553,466 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * const &, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * const &, + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * const &, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * const &, + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * const &, + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * const &, + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * const &, + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * const &, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * const &, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * const &, + VULKAN_HPP_NAMESPACE::PipelineLayout const &, + VULKAN_HPP_NAMESPACE::RenderPass const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Pipeline const &, + int32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, mutableDescriptorTypeListCount, pMutableDescriptorTypeLists ); + return std::tie( sType, + pNext, + flags, + stageCount, + pStages, + pVertexInputState, + pInputAssemblyState, + pTessellationState, + pViewportState, + pRasterizationState, + pMultisampleState, + pDepthStencilState, + pColorBlendState, + pDynamicState, + layout, + renderPass, + subpass, + basePipelineHandle, + basePipelineIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MutableDescriptorTypeCreateInfoEXT const & ) const = default; + auto operator<=>( GraphicsPipelineCreateInfo const & ) const = default; #else - bool operator==( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorTypeListCount == rhs.mutableDescriptorTypeListCount ) && - ( pMutableDescriptorTypeLists == rhs.pMutableDescriptorTypeLists ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( pVertexInputState == rhs.pVertexInputState ) && ( pInputAssemblyState == rhs.pInputAssemblyState ) && + ( pTessellationState == rhs.pTessellationState ) && ( pViewportState == rhs.pViewportState ) && + ( pRasterizationState == rhs.pRasterizationState ) && ( pMultisampleState == rhs.pMultisampleState ) && + ( pDepthStencilState == rhs.pDepthStencilState ) && ( pColorBlendState == rhs.pColorBlendState ) && ( pDynamicState == rhs.pDynamicState ) && + ( layout == rhs.layout ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ) && ( basePipelineHandle == rhs.basePipelineHandle ) && + ( basePipelineIndex == rhs.basePipelineIndex ); # endif } - bool operator!=( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMutableDescriptorTypeCreateInfoEXT; - const void * pNext = {}; - uint32_t mutableDescriptorTypeListCount = {}; - const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists = {}; - }; - - template <> - struct CppType + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {}; + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo * pInputAssemblyState = {}; + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {}; + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo * pViewportState = {}; + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo * pRasterizationState = {}; + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo * pMultisampleState = {}; + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo * pDepthStencilState = {}; + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo * pColorBlendState = {}; + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + + template <> + struct CppType { - using Type = MutableDescriptorTypeCreateInfoEXT; + using Type = GraphicsPipelineCreateInfo; }; - using MutableDescriptorTypeCreateInfoVALVE = MutableDescriptorTypeCreateInfoEXT; - struct PastPresentationTimingGOOGLE + struct GraphicsPipelineLibraryCreateInfoEXT { - using NativeType = VkPastPresentationTimingGOOGLE; + using NativeType = VkGraphicsPipelineLibraryCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( uint32_t presentID_ = {}, - uint64_t desiredPresentTime_ = {}, - uint64_t actualPresentTime_ = {}, - uint64_t earliestPresentTime_ = {}, - uint64_t presentMargin_ = {} ) VULKAN_HPP_NOEXCEPT - : presentID( presentID_ ) - , desiredPresentTime( desiredPresentTime_ ) - , actualPresentTime( actualPresentTime_ ) - , earliestPresentTime( earliestPresentTime_ ) - , presentMargin( presentMargin_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } { } - VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT - : PastPresentationTimingGOOGLE( *reinterpret_cast( &rhs ) ) + GraphicsPipelineLibraryCreateInfoEXT( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : GraphicsPipelineLibraryCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PastPresentationTimingGOOGLE & operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + GraphicsPipelineLibraryCreateInfoEXT & operator=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + GraphicsPipelineLibraryCreateInfoEXT & operator=( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPastPresentationTimingGOOGLE const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGraphicsPipelineLibraryCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsPipelineLibraryCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( presentID, desiredPresentTime, actualPresentTime, earliestPresentTime, presentMargin ); + return std::tie( sType, pNext, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PastPresentationTimingGOOGLE const & ) const = default; + auto operator<=>( GraphicsPipelineLibraryCreateInfoEXT const & ) const = default; #else - bool operator==( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ) && ( actualPresentTime == rhs.actualPresentTime ) && - ( earliestPresentTime == rhs.earliestPresentTime ) && ( presentMargin == rhs.presentMargin ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); # endif } - bool operator!=( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t presentID = {}; - uint64_t desiredPresentTime = {}; - uint64_t actualPresentTime = {}; - uint64_t earliestPresentTime = {}; - uint64_t presentMargin = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags = {}; }; - struct PerformanceConfigurationAcquireInfoINTEL + template <> + struct CppType { - using NativeType = VkPerformanceConfigurationAcquireInfoINTEL; + using Type = GraphicsPipelineLibraryCreateInfoEXT; + }; + + struct GraphicsShaderGroupCreateInfoNV + { + using NativeType = VkGraphicsShaderGroupCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PerformanceConfigurationAcquireInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = - VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , type( type_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stageCount{ stageCount_ } + , pStages{ pStages_ } + , pVertexInputState{ pVertexInputState_ } + , pTessellationState{ pTessellationState_ } { } - VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - : PerformanceConfigurationAcquireInfoINTEL( *reinterpret_cast( &rhs ) ) + GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GraphicsShaderGroupCreateInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , pVertexInputState( pVertexInputState_ ) + , pTessellationState( pTessellationState_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PerformanceConfigurationAcquireInfoINTEL & operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + GraphicsShaderGroupCreateInfoNV & operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & - setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT { - type = type_; + stageCount = stageCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPerformanceConfigurationAcquireInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pStages = pStages_; + return *this; } - operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsShaderGroupCreateInfoNV & + setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & + setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ ) VULKAN_HPP_NOEXCEPT + { + pVertexInputState = pVertexInputState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & + setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ ) VULKAN_HPP_NOEXCEPT + { + pTessellationState = pTessellationState_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGraphicsShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, type ); + return std::tie( sType, pNext, stageCount, pStages, pVertexInputState, pTessellationState ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const & ) const = default; + auto operator<=>( GraphicsShaderGroupCreateInfoNV const & ) const = default; #else - bool operator==( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( pVertexInputState == rhs.pVertexInputState ) && ( pTessellationState == rhs.pTessellationState ); # endif } - bool operator!=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = - VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV; + const void * pNext = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState = {}; + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState = {}; }; template <> - struct CppType + struct CppType { - using Type = PerformanceConfigurationAcquireInfoINTEL; + using Type = GraphicsShaderGroupCreateInfoNV; }; - struct PerformanceCounterDescriptionKHR + struct GraphicsPipelineShaderGroupsCreateInfoNV { - using NativeType = VkPerformanceCounterDescriptionKHR; + using NativeType = VkGraphicsPipelineShaderGroupsCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, - std::array const & name_ = {}, - std::array const & category_ = {}, - std::array const & description_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ = {}, + uint32_t pipelineCount_ = {}, + const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , groupCount{ groupCount_ } + , pGroups{ pGroups_ } + , pipelineCount{ pipelineCount_ } + , pPipelines{ pPipelines_ } + { + } + + VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : GraphicsPipelineShaderGroupsCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelines_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , flags( flags_ ) - , name( name_ ) - , category( category_ ) - , description( description_ ) + , groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , pipelineCount( static_cast( pipelines_.size() ) ) + , pPipelines( pipelines_.data() ) { } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + GraphicsPipelineShaderGroupsCreateInfoNV & operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PerformanceCounterDescriptionKHR( *reinterpret_cast( &rhs ) ) + GraphicsPipelineShaderGroupsCreateInfoNV & operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } - PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + groupCount = groupCount_; return *this; } - operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & + setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pGroups = pGroups_; + return *this; } - operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV & setGroups( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCount = pipelineCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ ) VULKAN_HPP_NOEXCEPT + { + pPipelines = pPipelines_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GraphicsPipelineShaderGroupsCreateInfoNV & + setPipelines( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelines_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCount = static_cast( pipelines_.size() ); + pPipelines = pipelines_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkGraphicsPipelineShaderGroupsCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -44432,1330 +47020,1254 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> + const void * const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::Pipeline * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, name, category, description ); + return std::tie( sType, pNext, groupCount, pGroups, pipelineCount, pPipelines ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PerformanceCounterDescriptionKHR const & ) const = default; + auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const & ) const = default; #else - bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( name == rhs.name ) && ( category == rhs.category ) && - ( description == rhs.description ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) && + ( pipelineCount == rhs.pipelineCount ) && ( pPipelines == rhs.pPipelines ); # endif } - bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D category = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; + const void * pNext = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV * pGroups = {}; + uint32_t pipelineCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines = {}; }; template <> - struct CppType + struct CppType { - using Type = PerformanceCounterDescriptionKHR; + using Type = GraphicsPipelineShaderGroupsCreateInfoNV; }; - struct PerformanceCounterKHR + struct XYColorEXT { - using NativeType = VkPerformanceCounterKHR; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR; + using NativeType = VkXYColorEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 - PerformanceCounterKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, - VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, - VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, - std::array const & uuid_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , unit( unit_ ) - , scope( scope_ ) - , storage( storage_ ) - , uuid( uuid_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT + : x{ x_ } + , y{ y_ } { } - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PerformanceCounterKHR( *reinterpret_cast( &rhs ) ) + XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT : XYColorEXT( *reinterpret_cast( &rhs ) ) {} + + XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } - PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + y = y_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT + operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, unit, scope, storage, uuid ); + return std::tie( x, y ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PerformanceCounterKHR const & ) const = default; + auto operator<=>( XYColorEXT const & ) const = default; #else - bool operator==( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( unit == rhs.unit ) && ( scope == rhs.scope ) && ( storage == rhs.storage ) && - ( uuid == rhs.uuid ); + return ( x == rhs.x ) && ( y == rhs.y ); # endif } - bool operator!=( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric; - VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer; - VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D uuid = {}; - }; - - template <> - struct CppType - { - using Type = PerformanceCounterKHR; + float x = {}; + float y = {}; }; - union PerformanceCounterResultKHR + struct HdrMetadataEXT { - using NativeType = VkPerformanceCounterResultKHR; -#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) - - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int32_t int32_ = {} ) : int32( int32_ ) {} + using NativeType = VkHdrMetadataEXT; - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int64_t int64_ ) : int64( int64_ ) {} + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT; - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint32_t uint32_ ) : uint32( uint32_ ) {} +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, + float maxLuminance_ = {}, + float minLuminance_ = {}, + float maxContentLightLevel_ = {}, + float maxFrameAverageLightLevel_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , displayPrimaryRed{ displayPrimaryRed_ } + , displayPrimaryGreen{ displayPrimaryGreen_ } + , displayPrimaryBlue{ displayPrimaryBlue_ } + , whitePoint{ whitePoint_ } + , maxLuminance{ maxLuminance_ } + , minLuminance{ minLuminance_ } + , maxContentLightLevel{ maxContentLightLevel_ } + , maxFrameAverageLightLevel{ maxFrameAverageLightLevel_ } + { + } - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint64_t uint64_ ) : uint64( uint64_ ) {} + VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( float float32_ ) : float32( float32_ ) {} + HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT : HdrMetadataEXT( *reinterpret_cast( &rhs ) ) {} - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( double float64_ ) : float64( float64_ ) {} -#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT + HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - int32 = int32_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - int64 = int64_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT { - uint32 = uint32_; + displayPrimaryRed = displayPrimaryRed_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT { - uint64 = uint64_; + displayPrimaryGreen = displayPrimaryGreen_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT { - float32 = float32_; + displayPrimaryBlue = displayPrimaryBlue_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT { - float64 = float64_; + whitePoint = whitePoint_; return *this; } -#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ - operator VkPerformanceCounterResultKHR const &() const + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + maxLuminance = maxLuminance_; + return *this; } - operator VkPerformanceCounterResultKHR &() + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + minLuminance = minLuminance_; + return *this; } - int32_t int32; - int64_t int64; - uint32_t uint32; - uint64_t uint64; - float float32; - double float64; - }; - - struct PerformanceMarkerInfoINTEL - { - using NativeType = VkPerformanceMarkerInfoINTEL; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , marker( marker_ ) + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT { + maxContentLightLevel = maxContentLightLevel_; + return *this; } - VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - : PerformanceMarkerInfoINTEL( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT - { - marker = marker_; + maxFrameAverageLightLevel = maxFrameAverageLightLevel_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, marker ); + return std::tie( sType, + pNext, + displayPrimaryRed, + displayPrimaryGreen, + displayPrimaryBlue, + whitePoint, + maxLuminance, + minLuminance, + maxContentLightLevel, + maxFrameAverageLightLevel ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PerformanceMarkerInfoINTEL const & ) const = default; + auto operator<=>( HdrMetadataEXT const & ) const = default; #else - bool operator==( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPrimaryRed == rhs.displayPrimaryRed ) && + ( displayPrimaryGreen == rhs.displayPrimaryGreen ) && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) && ( whitePoint == rhs.whitePoint ) && + ( maxLuminance == rhs.maxLuminance ) && ( minLuminance == rhs.minLuminance ) && ( maxContentLightLevel == rhs.maxContentLightLevel ) && + ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel ); # endif } - bool operator!=( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL; - const void * pNext = {}; - uint64_t marker = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {}; + float maxLuminance = {}; + float minLuminance = {}; + float maxContentLightLevel = {}; + float maxFrameAverageLightLevel = {}; }; template <> - struct CppType + struct CppType { - using Type = PerformanceMarkerInfoINTEL; + using Type = HdrMetadataEXT; }; - struct PerformanceOverrideInfoINTEL + struct HdrVividDynamicMetadataHUAWEI { - using NativeType = VkPerformanceOverrideInfoINTEL; + using NativeType = VkHdrVividDynamicMetadataHUAWEI; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrVividDynamicMetadataHUAWEI; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( - VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, - VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, - uint64_t parameter_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , type( type_ ) - , enable( enable_ ) - , parameter( parameter_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + HdrVividDynamicMetadataHUAWEI( size_t dynamicMetadataSize_ = {}, const void * pDynamicMetadata_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dynamicMetadataSize{ dynamicMetadataSize_ } + , pDynamicMetadata{ pDynamicMetadata_ } { } - VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR HdrVividDynamicMetadataHUAWEI( HdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - : PerformanceOverrideInfoINTEL( *reinterpret_cast( &rhs ) ) + HdrVividDynamicMetadataHUAWEI( VkHdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : HdrVividDynamicMetadataHUAWEI( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + HdrVividDynamicMetadataHUAWEI( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicMetadata_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), dynamicMetadataSize( dynamicMetadata_.size() * sizeof( T ) ), pDynamicMetadata( dynamicMetadata_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + HdrVividDynamicMetadataHUAWEI & operator=( HdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + HdrVividDynamicMetadataHUAWEI & operator=( VkHdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setDynamicMetadataSize( size_t dynamicMetadataSize_ ) VULKAN_HPP_NOEXCEPT { - type = type_; + dynamicMetadataSize = dynamicMetadataSize_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setPDynamicMetadata( const void * pDynamicMetadata_ ) VULKAN_HPP_NOEXCEPT { - enable = enable_; + pDynamicMetadata = pDynamicMetadata_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + HdrVividDynamicMetadataHUAWEI & setDynamicMetadata( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicMetadata_ ) VULKAN_HPP_NOEXCEPT { - parameter = parameter_; + dynamicMetadataSize = dynamicMetadata_.size() * sizeof( T ); + pDynamicMetadata = dynamicMetadata_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + operator VkHdrVividDynamicMetadataHUAWEI const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT + operator VkHdrVividDynamicMetadataHUAWEI &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, type, enable, parameter ); + return std::tie( sType, pNext, dynamicMetadataSize, pDynamicMetadata ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PerformanceOverrideInfoINTEL const & ) const = default; + auto operator<=>( HdrVividDynamicMetadataHUAWEI const & ) const = default; #else - bool operator==( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( HdrVividDynamicMetadataHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( enable == rhs.enable ) && ( parameter == rhs.parameter ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicMetadataSize == rhs.dynamicMetadataSize ) && + ( pDynamicMetadata == rhs.pDynamicMetadata ); # endif } - bool operator!=( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( HdrVividDynamicMetadataHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware; - VULKAN_HPP_NAMESPACE::Bool32 enable = {}; - uint64_t parameter = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrVividDynamicMetadataHUAWEI; + const void * pNext = {}; + size_t dynamicMetadataSize = {}; + const void * pDynamicMetadata = {}; }; template <> - struct CppType + struct CppType { - using Type = PerformanceOverrideInfoINTEL; + using Type = HdrVividDynamicMetadataHUAWEI; }; - struct PerformanceQuerySubmitInfoKHR + struct HeadlessSurfaceCreateInfoEXT { - using NativeType = VkPerformanceQuerySubmitInfoKHR; + using NativeType = VkHeadlessSurfaceCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , counterPassIndex( counterPassIndex_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } { } - VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PerformanceQuerySubmitInfoKHR( *reinterpret_cast( &rhs ) ) + HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : HeadlessSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { - counterPassIndex = counterPassIndex_; + flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, counterPassIndex ); + return std::tie( sType, pNext, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PerformanceQuerySubmitInfoKHR const & ) const = default; + auto operator<=>( HeadlessSurfaceCreateInfoEXT const & ) const = default; #else - bool operator==( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( counterPassIndex == rhs.counterPassIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); # endif } - bool operator!=( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR; - const void * pNext = {}; - uint32_t counterPassIndex = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {}; }; template <> - struct CppType + struct CppType { - using Type = PerformanceQuerySubmitInfoKHR; + using Type = HeadlessSurfaceCreateInfoEXT; }; - struct PerformanceStreamMarkerInfoINTEL + struct HostImageCopyDevicePerformanceQuery { - using NativeType = VkPerformanceStreamMarkerInfoINTEL; + using NativeType = VkHostImageCopyDevicePerformanceQuery; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , marker( marker_ ) - { - } - - VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageCopyDevicePerformanceQuery; - PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - : PerformanceStreamMarkerInfoINTEL( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQuery( VULKAN_HPP_NAMESPACE::Bool32 optimalDeviceAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , optimalDeviceAccess{ optimalDeviceAccess_ } + , identicalMemoryLayout{ identicalMemoryLayout_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQuery( HostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + HostImageCopyDevicePerformanceQuery( VkHostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT + : HostImageCopyDevicePerformanceQuery( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + HostImageCopyDevicePerformanceQuery & operator=( HostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT + HostImageCopyDevicePerformanceQuery & operator=( VkHostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT { - marker = marker_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + operator VkHostImageCopyDevicePerformanceQuery const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + operator VkHostImageCopyDevicePerformanceQuery &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, marker ); + return std::tie( sType, pNext, optimalDeviceAccess, identicalMemoryLayout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PerformanceStreamMarkerInfoINTEL const & ) const = default; + auto operator<=>( HostImageCopyDevicePerformanceQuery const & ) const = default; #else - bool operator==( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( HostImageCopyDevicePerformanceQuery const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( optimalDeviceAccess == rhs.optimalDeviceAccess ) && + ( identicalMemoryLayout == rhs.identicalMemoryLayout ); # endif } - bool operator!=( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( HostImageCopyDevicePerformanceQuery const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL; - const void * pNext = {}; - uint32_t marker = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHostImageCopyDevicePerformanceQuery; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 optimalDeviceAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryLayout = {}; }; template <> - struct CppType + struct CppType { - using Type = PerformanceStreamMarkerInfoINTEL; + using Type = HostImageCopyDevicePerformanceQuery; }; - union PerformanceValueDataINTEL - { - using NativeType = VkPerformanceValueDataINTEL; -#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) - - VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint32_t value32_ = {} ) : value32( value32_ ) {} - - VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint64_t value64_ ) : value64( value64_ ) {} + using HostImageCopyDevicePerformanceQueryEXT = HostImageCopyDevicePerformanceQuery; - VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( float valueFloat_ ) : valueFloat( valueFloat_ ) {} + struct HostImageLayoutTransitionInfo + { + using NativeType = VkHostImageLayoutTransitionInfo; - VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( const char * valueString_ ) : valueString( valueString_ ) {} -#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageLayoutTransitionInfo; -#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image{ image_ } + , oldLayout{ oldLayout_ } + , newLayout{ newLayout_ } + , subresourceRange{ subresourceRange_ } { - value32 = value32_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT - { - value64 = value64_; - return *this; - } + VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfo( HostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT + HostImageLayoutTransitionInfo( VkHostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : HostImageLayoutTransitionInfo( *reinterpret_cast( &rhs ) ) { - valueFloat = valueFloat_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT - { - valueBool = valueBool_; - return *this; - } + HostImageLayoutTransitionInfo & operator=( HostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueString( const char * valueString_ ) VULKAN_HPP_NOEXCEPT + HostImageLayoutTransitionInfo & operator=( VkHostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - valueString = valueString_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ - - operator VkPerformanceValueDataINTEL const &() const - { - return *reinterpret_cast( this ); - } - - operator VkPerformanceValueDataINTEL &() - { - return *reinterpret_cast( this ); - } - -#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - uint32_t value32; - uint64_t value64; - float valueFloat; - VULKAN_HPP_NAMESPACE::Bool32 valueBool; - const char * valueString; -#else - uint32_t value32; - uint64_t value64; - float valueFloat; - VkBool32 valueBool; - const char * valueString; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ - }; - - struct PerformanceValueINTEL - { - using NativeType = VkPerformanceValueINTEL; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 - PerformanceValueINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, - VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , data( data_ ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { + pNext = pNext_; + return *this; } - VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - : PerformanceValueINTEL( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT { + image = image_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + oldLayout = oldLayout_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT { - type = type_; + newLayout = newLayout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL & setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & data_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT { - data = data_; + subresourceRange = subresourceRange_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT + operator VkHostImageLayoutTransitionInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT + operator VkHostImageLayoutTransitionInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( type, data ); + return std::tie( sType, pNext, image, oldLayout, newLayout, subresourceRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HostImageLayoutTransitionInfo const & ) const = default; +#else + bool operator==( HostImageLayoutTransitionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( oldLayout == rhs.oldLayout ) && ( newLayout == rhs.newLayout ) && + ( subresourceRange == rhs.subresourceRange ); +# endif + } + + bool operator!=( HostImageLayoutTransitionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32; - VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHostImageLayoutTransitionInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; }; - struct PhysicalDevice16BitStorageFeatures + template <> + struct CppType { - using NativeType = VkPhysicalDevice16BitStorageFeatures; + using Type = HostImageLayoutTransitionInfo; + }; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice16BitStorageFeatures; + using HostImageLayoutTransitionInfoEXT = HostImageLayoutTransitionInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , storageBuffer16BitAccess( storageBuffer16BitAccess_ ) - , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) - , storagePushConstant16( storagePushConstant16_ ) - , storageInputOutput16( storageInputOutput16_ ) - { - } +#if defined( VK_USE_PLATFORM_IOS_MVK ) + struct IOSSurfaceCreateInfoMVK + { + using NativeType = VkIOSSurfaceCreateInfoMVK; - VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK; - PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevice16BitStorageFeatures( *reinterpret_cast( &rhs ) ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, + const void * pView_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pView{ pView_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevice16BitStorageFeatures & operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + : IOSSurfaceCreateInfoMVK( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + IOSSurfaceCreateInfoMVK & operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & - setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT { - storageBuffer16BitAccess = storageBuffer16BitAccess_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & - setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & - setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT { - storagePushConstant16 = storagePushConstant16_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & - setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT { - storageInputOutput16 = storageInputOutput16_; + pView = pView_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT + operator VkIOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT + operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, storageBuffer16BitAccess, uniformAndStorageBuffer16BitAccess, storagePushConstant16, storageInputOutput16 ); + return std::tie( sType, pNext, flags, pView ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevice16BitStorageFeatures const & ) const = default; -#else - bool operator==( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IOSSurfaceCreateInfoMVK const & ) const = default; +# else + bool operator==( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) && - ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && ( storagePushConstant16 == rhs.storagePushConstant16 ) && - ( storageInputOutput16 == rhs.storageInputOutput16 ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); +# endif } - bool operator!=( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {}; + const void * pView = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevice16BitStorageFeatures; + using Type = IOSSurfaceCreateInfoMVK; }; - using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ - struct PhysicalDevice4444FormatsFeaturesEXT + struct ImageAlignmentControlCreateInfoMESA { - using NativeType = VkPhysicalDevice4444FormatsFeaturesEXT; + using NativeType = VkImageAlignmentControlCreateInfoMESA; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageAlignmentControlCreateInfoMESA; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , formatA4R4G4B4( formatA4R4G4B4_ ) - , formatA4B4G4R4( formatA4B4G4R4_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageAlignmentControlCreateInfoMESA( uint32_t maximumRequestedAlignment_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maximumRequestedAlignment{ maximumRequestedAlignment_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageAlignmentControlCreateInfoMESA( ImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevice4444FormatsFeaturesEXT( *reinterpret_cast( &rhs ) ) + ImageAlignmentControlCreateInfoMESA( VkImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageAlignmentControlCreateInfoMESA( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevice4444FormatsFeaturesEXT & operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageAlignmentControlCreateInfoMESA & operator=( ImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDevice4444FormatsFeaturesEXT & operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageAlignmentControlCreateInfoMESA & operator=( VkImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageAlignmentControlCreateInfoMESA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT - { - formatA4R4G4B4 = formatA4R4G4B4_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageAlignmentControlCreateInfoMESA & setMaximumRequestedAlignment( uint32_t maximumRequestedAlignment_ ) VULKAN_HPP_NOEXCEPT { - formatA4B4G4R4 = formatA4B4G4R4_; + maximumRequestedAlignment = maximumRequestedAlignment_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDevice4444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkImageAlignmentControlCreateInfoMESA const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkImageAlignmentControlCreateInfoMESA &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, formatA4R4G4B4, formatA4B4G4R4 ); + return std::tie( sType, pNext, maximumRequestedAlignment ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const & ) const = default; + auto operator<=>( ImageAlignmentControlCreateInfoMESA const & ) const = default; #else - bool operator==( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageAlignmentControlCreateInfoMESA const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 ) && ( formatA4B4G4R4 == rhs.formatA4B4G4R4 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maximumRequestedAlignment == rhs.maximumRequestedAlignment ); # endif } - bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageAlignmentControlCreateInfoMESA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4 = {}; - VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4 = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageAlignmentControlCreateInfoMESA; + const void * pNext = {}; + uint32_t maximumRequestedAlignment = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevice4444FormatsFeaturesEXT; + using Type = ImageAlignmentControlCreateInfoMESA; }; - struct PhysicalDevice8BitStorageFeatures + struct ImageBlit { - using NativeType = VkPhysicalDevice8BitStorageFeatures; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice8BitStorageFeatures; + using NativeType = VkImageBlit; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , storageBuffer8BitAccess( storageBuffer8BitAccess_ ) - , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) - , storagePushConstant8( storagePushConstant8_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + std::array const & srcOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + std::array const & dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource{ srcSubresource_ } + , srcOffsets{ srcOffsets_ } + , dstSubresource{ dstSubresource_ } + , dstOffsets{ dstOffsets_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevice8BitStorageFeatures( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit( *reinterpret_cast( &rhs ) ) {} - PhysicalDevice8BitStorageFeatures & operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + srcSubresource = srcSubresource_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & - setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcOffsets( std::array const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT { - storageBuffer8BitAccess = storageBuffer8BitAccess_; + srcOffsets = srcOffsets_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & - setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT { - uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; + dstSubresource = dstSubresource_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & - setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstOffsets( std::array const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT { - storagePushConstant8 = storagePushConstant8_; + dstOffsets = dstOffsets_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT + operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT + operator VkImageBlit &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, storageBuffer8BitAccess, uniformAndStorageBuffer8BitAccess, storagePushConstant8 ); + return std::tie( srcSubresource, srcOffsets, dstSubresource, dstOffsets ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevice8BitStorageFeatures const & ) const = default; + auto operator<=>( ImageBlit const & ) const = default; #else - bool operator==( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) && - ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && ( storagePushConstant8 == rhs.storagePushConstant8 ); + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffsets == rhs.dstOffsets ); # endif } - bool operator!=( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDevice8BitStorageFeatures; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D srcOffsets = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D dstOffsets = {}; }; - using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures; - struct PhysicalDeviceASTCDecodeFeaturesEXT + struct ImageCaptureDescriptorDataInfoEXT { - using NativeType = VkPhysicalDeviceASTCDecodeFeaturesEXT; + using NativeType = VkImageCaptureDescriptorDataInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCaptureDescriptorDataInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , decodeModeSharedExponent( decodeModeSharedExponent_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image{ image_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageCaptureDescriptorDataInfoEXT( ImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceASTCDecodeFeaturesEXT( *reinterpret_cast( &rhs ) ) + ImageCaptureDescriptorDataInfoEXT( VkImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageCaptureDescriptorDataInfoEXT & operator=( ImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageCaptureDescriptorDataInfoEXT & operator=( VkImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & - setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCaptureDescriptorDataInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT { - decodeModeSharedExponent = decodeModeSharedExponent_; + image = image_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkImageCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkImageCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, decodeModeSharedExponent ); + return std::tie( sType, pNext, image ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const & ) const = default; + auto operator<=>( ImageCaptureDescriptorDataInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); # endif } - bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCaptureDescriptorDataInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceASTCDecodeFeaturesEXT; + using Type = ImageCaptureDescriptorDataInfoEXT; }; - struct PhysicalDeviceAccelerationStructureFeaturesKHR + struct ImageCompressionControlEXT { - using NativeType = VkPhysicalDeviceAccelerationStructureFeaturesKHR; + using NativeType = VkImageCompressionControlEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCompressionControlEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PhysicalDeviceAccelerationStructureFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , accelerationStructure( accelerationStructure_ ) - , accelerationStructureCaptureReplay( accelerationStructureCaptureReplay_ ) - , accelerationStructureIndirectBuild( accelerationStructureIndirectBuild_ ) - , accelerationStructureHostCommands( accelerationStructureHostCommands_ ) - , descriptorBindingAccelerationStructureUpdateAfterBind( descriptorBindingAccelerationStructureUpdateAfterBind_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_ = {}, + uint32_t compressionControlPlaneCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , compressionControlPlaneCount{ compressionControlPlaneCount_ } + , pFixedRateFlags{ pFixedRateFlags_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceAccelerationStructureFeaturesKHR( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceAccelerationStructureFeaturesKHR( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceAccelerationStructureFeaturesKHR( *reinterpret_cast( &rhs ) ) + ImageCompressionControlEXT( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageCompressionControlEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCompressionControlEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fixedRateFlags_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , compressionControlPlaneCount( static_cast( fixedRateFlags_.size() ) ) + , pFixedRateFlags( fixedRateFlags_.data() ) { - *this = *reinterpret_cast( &rhs ); - return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + ImageCompressionControlEXT & operator=( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageCompressionControlEXT & operator=( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & - setAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - accelerationStructure = accelerationStructure_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & - setAccelerationStructureCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setFlags( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { - accelerationStructureCaptureReplay = accelerationStructureCaptureReplay_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & - setAccelerationStructureIndirectBuild( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setCompressionControlPlaneCount( uint32_t compressionControlPlaneCount_ ) VULKAN_HPP_NOEXCEPT { - accelerationStructureIndirectBuild = accelerationStructureIndirectBuild_; + compressionControlPlaneCount = compressionControlPlaneCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & - setAccelerationStructureHostCommands( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & + setPFixedRateFlags( VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ ) VULKAN_HPP_NOEXCEPT { - accelerationStructureHostCommands = accelerationStructureHostCommands_; + pFixedRateFlags = pFixedRateFlags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setDescriptorBindingAccelerationStructureUpdateAfterBind( - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageCompressionControlEXT & setFixedRateFlags( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fixedRateFlags_ ) VULKAN_HPP_NOEXCEPT { - descriptorBindingAccelerationStructureUpdateAfterBind = descriptorBindingAccelerationStructureUpdateAfterBind_; + compressionControlPlaneCount = static_cast( fixedRateFlags_.size() ); + pFixedRateFlags = fixedRateFlags_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkImageCompressionControlEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceAccelerationStructureFeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkImageCompressionControlEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -45763,117 +48275,91 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + const void * const &, + VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - accelerationStructure, - accelerationStructureCaptureReplay, - accelerationStructureIndirectBuild, - accelerationStructureHostCommands, - descriptorBindingAccelerationStructureUpdateAfterBind ); + return std::tie( sType, pNext, flags, compressionControlPlaneCount, pFixedRateFlags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceAccelerationStructureFeaturesKHR const & ) const = default; + auto operator<=>( ImageCompressionControlEXT const & ) const = default; #else - bool operator==( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) && - ( accelerationStructureCaptureReplay == rhs.accelerationStructureCaptureReplay ) && - ( accelerationStructureIndirectBuild == rhs.accelerationStructureIndirectBuild ) && - ( accelerationStructureHostCommands == rhs.accelerationStructureHostCommands ) && - ( descriptorBindingAccelerationStructureUpdateAfterBind == rhs.descriptorBindingAccelerationStructureUpdateAfterBind ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( compressionControlPlaneCount == rhs.compressionControlPlaneCount ) && ( pFixedRateFlags == rhs.pFixedRateFlags ); # endif } - bool operator!=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure = {}; - VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay = {}; - VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild = {}; - VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCompressionControlEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT flags = {}; + uint32_t compressionControlPlaneCount = {}; + VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceAccelerationStructureFeaturesKHR; + using Type = ImageCompressionControlEXT; }; - struct PhysicalDeviceAccelerationStructurePropertiesKHR + struct ImageCompressionPropertiesEXT { - using NativeType = VkPhysicalDeviceAccelerationStructurePropertiesKHR; + using NativeType = VkImageCompressionPropertiesEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCompressionPropertiesEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( uint64_t maxGeometryCount_ = {}, - uint64_t maxInstanceCount_ = {}, - uint64_t maxPrimitiveCount_ = {}, - uint32_t maxPerStageDescriptorAccelerationStructures_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ = {}, - uint32_t maxDescriptorSetAccelerationStructures_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_ = {}, - uint32_t minAccelerationStructureScratchOffsetAlignment_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxGeometryCount( maxGeometryCount_ ) - , maxInstanceCount( maxInstanceCount_ ) - , maxPrimitiveCount( maxPrimitiveCount_ ) - , maxPerStageDescriptorAccelerationStructures( maxPerStageDescriptorAccelerationStructures_ ) - , maxPerStageDescriptorUpdateAfterBindAccelerationStructures( maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ ) - , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) - , maxDescriptorSetUpdateAfterBindAccelerationStructures( maxDescriptorSetUpdateAfterBindAccelerationStructures_ ) - , minAccelerationStructureScratchOffsetAlignment( minAccelerationStructureScratchOffsetAlignment_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT imageCompressionFlags_ = {}, + VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageCompressionFlags{ imageCompressionFlags_ } + , imageCompressionFixedRateFlags{ imageCompressionFixedRateFlags_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceAccelerationStructurePropertiesKHR( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceAccelerationStructurePropertiesKHR( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceAccelerationStructurePropertiesKHR( *reinterpret_cast( &rhs ) ) + ImageCompressionPropertiesEXT( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageCompressionPropertiesEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageCompressionPropertiesEXT & operator=( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ImageCompressionPropertiesEXT & operator=( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkImageCompressionPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceAccelerationStructurePropertiesKHR &() VULKAN_HPP_NOEXCEPT + operator VkImageCompressionPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -45882,668 +48368,717 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT const &, + VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - maxGeometryCount, - maxInstanceCount, - maxPrimitiveCount, - maxPerStageDescriptorAccelerationStructures, - maxPerStageDescriptorUpdateAfterBindAccelerationStructures, - maxDescriptorSetAccelerationStructures, - maxDescriptorSetUpdateAfterBindAccelerationStructures, - minAccelerationStructureScratchOffsetAlignment ); + return std::tie( sType, pNext, imageCompressionFlags, imageCompressionFixedRateFlags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceAccelerationStructurePropertiesKHR const & ) const = default; + auto operator<=>( ImageCompressionPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGeometryCount == rhs.maxGeometryCount ) && ( maxInstanceCount == rhs.maxInstanceCount ) && - ( maxPrimitiveCount == rhs.maxPrimitiveCount ) && - ( maxPerStageDescriptorAccelerationStructures == rhs.maxPerStageDescriptorAccelerationStructures ) && - ( maxPerStageDescriptorUpdateAfterBindAccelerationStructures == rhs.maxPerStageDescriptorUpdateAfterBindAccelerationStructures ) && - ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ) && - ( maxDescriptorSetUpdateAfterBindAccelerationStructures == rhs.maxDescriptorSetUpdateAfterBindAccelerationStructures ) && - ( minAccelerationStructureScratchOffsetAlignment == rhs.minAccelerationStructureScratchOffsetAlignment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionFlags == rhs.imageCompressionFlags ) && + ( imageCompressionFixedRateFlags == rhs.imageCompressionFixedRateFlags ); # endif } - bool operator!=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; - void * pNext = {}; - uint64_t maxGeometryCount = {}; - uint64_t maxInstanceCount = {}; - uint64_t maxPrimitiveCount = {}; - uint32_t maxPerStageDescriptorAccelerationStructures = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures = {}; - uint32_t maxDescriptorSetAccelerationStructures = {}; - uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures = {}; - uint32_t minAccelerationStructureScratchOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCompressionPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT imageCompressionFlags = {}; + VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceAccelerationStructurePropertiesKHR; + using Type = ImageCompressionPropertiesEXT; }; - struct PhysicalDeviceAmigoProfilingFeaturesSEC +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImageFormatConstraintsInfoFUCHSIA { - using NativeType = VkPhysicalDeviceAmigoProfilingFeaturesSEC; + using NativeType = VkImageFormatConstraintsInfoFUCHSIA; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatConstraintsInfoFUCHSIA; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC( VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , amigoProfiling( amigoProfiling_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ = {}, + uint64_t sysmemPixelFormat_ = {}, + uint32_t colorSpaceCount_ = {}, + const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageCreateInfo{ imageCreateInfo_ } + , requiredFormatFeatures{ requiredFormatFeatures_ } + , flags{ flags_ } + , sysmemPixelFormat{ sysmemPixelFormat_ } + , colorSpaceCount{ colorSpaceCount_ } + , pColorSpaces{ pColorSpaces_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceAmigoProfilingFeaturesSEC( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceAmigoProfilingFeaturesSEC( *reinterpret_cast( &rhs ) ) + ImageFormatConstraintsInfoFUCHSIA( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatConstraintsInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo_, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_, + VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_, + uint64_t sysmemPixelFormat_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorSpaces_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , imageCreateInfo( imageCreateInfo_ ) + , requiredFormatFeatures( requiredFormatFeatures_ ) + , flags( flags_ ) + , sysmemPixelFormat( sysmemPixelFormat_ ) + , colorSpaceCount( static_cast( colorSpaces_.size() ) ) + , pColorSpaces( colorSpaces_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT + ImageFormatConstraintsInfoFUCHSIA & operator=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageFormatConstraintsInfoFUCHSIA & operator=( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setAmigoProfiling( VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & + setImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & imageCreateInfo_ ) VULKAN_HPP_NOEXCEPT { - amigoProfiling = amigoProfiling_; + imageCreateInfo = imageCreateInfo_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceAmigoProfilingFeaturesSEC const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & + setRequiredFormatFeatures( VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + requiredFormatFeatures = requiredFormatFeatures_; + return *this; } - operator VkPhysicalDeviceAmigoProfilingFeaturesSEC &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, amigoProfiling ); + sysmemPixelFormat = sysmemPixelFormat_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceAmigoProfilingFeaturesSEC const & ) const = default; -#else - bool operator==( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setColorSpaceCount( uint32_t colorSpaceCount_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( amigoProfiling == rhs.amigoProfiling ); -# endif + colorSpaceCount = colorSpaceCount_; + return *this; } - bool operator!=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & + setPColorSpaces( const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + pColorSpaces = pColorSpaces_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceAmigoProfilingFeaturesSEC; - }; - struct PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT - { - using NativeType = VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , attachmentFeedbackLoopLayout( attachmentFeedbackLoopLayout_ ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatConstraintsInfoFUCHSIA & setColorSpaces( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorSpaces_ ) VULKAN_HPP_NOEXCEPT { + colorSpaceCount = static_cast( colorSpaces_.size() ); + pColorSpaces = colorSpaces_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR - PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( *reinterpret_cast( &rhs ) ) + operator VkImageFormatConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { + return *reinterpret_cast( this ); } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & - operator=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & operator=( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkImageFormatConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); - return *this; + return *reinterpret_cast( this ); } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return std::tie( sType, pNext, imageCreateInfo, requiredFormatFeatures, flags, sysmemPixelFormat, colorSpaceCount, pColorSpaces ); } +# endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & - setAttachmentFeedbackLoopLayout( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout_ ) VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageFormatConstraintsInfoFUCHSIA const & ) const = default; +# else + bool operator==( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { - attachmentFeedbackLoopLayout = attachmentFeedbackLoopLayout_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCreateInfo == rhs.imageCreateInfo ) && + ( requiredFormatFeatures == rhs.requiredFormatFeatures ) && ( flags == rhs.flags ) && ( sysmemPixelFormat == rhs.sysmemPixelFormat ) && + ( colorSpaceCount == rhs.colorSpaceCount ) && ( pColorSpaces == rhs.pColorSpaces ); +# endif } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return !operator==( rhs ); } - - operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, attachmentFeedbackLoopLayout ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & ) const = default; -#else - bool operator==( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentFeedbackLoopLayout == rhs.attachmentFeedbackLoopLayout ); -# endif - } - - bool operator!=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatConstraintsInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateInfo imageCreateInfo = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures = {}; + VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA flags = {}; + uint64_t sysmemPixelFormat = {}; + uint32_t colorSpaceCount = {}; + const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + using Type = ImageFormatConstraintsInfoFUCHSIA; }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImageConstraintsInfoFUCHSIA { - using NativeType = VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; + using NativeType = VkImageConstraintsInfoFUCHSIA; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageConstraintsInfoFUCHSIA; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , advancedBlendCoherentOperations( advancedBlendCoherentOperations_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( uint32_t formatConstraintsCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ = {}, + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, + VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , formatConstraintsCount{ formatConstraintsCount_ } + , pFormatConstraints{ pFormatConstraints_ } + , bufferCollectionConstraints{ bufferCollectionConstraints_ } + , flags{ flags_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceBlendOperationAdvancedFeaturesEXT( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceBlendOperationAdvancedFeaturesEXT( *reinterpret_cast( &rhs ) ) + ImageConstraintsInfoFUCHSIA( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageConstraintsInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & formatConstraints_, + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, + VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , formatConstraintsCount( static_cast( formatConstraints_.size() ) ) + , pFormatConstraints( formatConstraints_.data() ) + , bufferCollectionConstraints( bufferCollectionConstraints_ ) + , flags( flags_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageConstraintsInfoFUCHSIA & operator=( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageConstraintsInfoFUCHSIA & operator=( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & - setAdvancedBlendCoherentOperations( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFormatConstraintsCount( uint32_t formatConstraintsCount_ ) VULKAN_HPP_NOEXCEPT { - advancedBlendCoherentOperations = advancedBlendCoherentOperations_; + formatConstraintsCount = formatConstraintsCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & + setPFormatConstraints( const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pFormatConstraints = pFormatConstraints_; + return *this; } - operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageConstraintsInfoFUCHSIA & setFormatConstraints( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & formatConstraints_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + formatConstraintsCount = static_cast( formatConstraints_.size() ); + pFormatConstraints = formatConstraints_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & + setBufferCollectionConstraints( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) VULKAN_HPP_NOEXCEPT + { + bufferCollectionConstraints = bufferCollectionConstraints_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, advancedBlendCoherentOperations ); + return std::tie( sType, pNext, formatConstraintsCount, pFormatConstraints, bufferCollectionConstraints, flags ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & ) const = default; -#else - bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageConstraintsInfoFUCHSIA const & ) const = default; +# else + bool operator==( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatConstraintsCount == rhs.formatConstraintsCount ) && + ( pFormatConstraints == rhs.pFormatConstraints ) && ( bufferCollectionConstraints == rhs.bufferCollectionConstraints ) && ( flags == rhs.flags ); +# endif } - bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageConstraintsInfoFUCHSIA; + const void * pNext = {}; + uint32_t formatConstraintsCount = {}; + const VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {}; + VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT; + using Type = ImageConstraintsInfoFUCHSIA; }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT + struct ImageCopy { - using NativeType = VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; + using NativeType = VkImageCopy; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageCopy( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource{ srcSubresource_ } + , srcOffset{ srcOffset_ } + , dstSubresource{ dstSubresource_ } + , dstOffset{ dstOffset_ } + , extent{ extent_ } + { + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( uint32_t advancedBlendMaxColorAttachments_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ ) - , advancedBlendIndependentBlend( advancedBlendIndependentBlend_ ) - , advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ ) - , advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ ) - , advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ ) - , advancedBlendAllOperations( advancedBlendAllOperations_ ) + VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy( *reinterpret_cast( &rhs ) ) {} + + ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceBlendOperationAdvancedPropertiesEXT( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } - PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceBlendOperationAdvancedPropertiesEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT { + srcOffset = srcOffset_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceBlendOperationAdvancedPropertiesEXT & - operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } - PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + dstOffset = dstOffset_; return *this; } - operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + extent = extent_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT + operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkImageCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - advancedBlendMaxColorAttachments, - advancedBlendIndependentBlend, - advancedBlendNonPremultipliedSrcColor, - advancedBlendNonPremultipliedDstColor, - advancedBlendCorrelatedOverlap, - advancedBlendAllOperations ); + return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & ) const = default; + auto operator<=>( ImageCopy const & ) const = default; #else - bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) && - ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) && - ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) && - ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) && - ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations ); + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); # endif } - bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; - void * pNext = {}; - uint32_t advancedBlendMaxColorAttachments = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {}; - VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; }; - struct PhysicalDeviceBorderColorSwizzleFeaturesEXT + struct SubresourceLayout { - using NativeType = VkPhysicalDeviceBorderColorSwizzleFeaturesEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT; + using NativeType = VkSubresourceLayout; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , borderColorSwizzle( borderColorSwizzle_ ) - , borderColorSwizzleFromImage( borderColorSwizzleFromImage_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubresourceLayout( VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT + : offset{ offset_ } + , size{ size_ } + , rowPitch{ rowPitch_ } + , arrayPitch{ arrayPitch_ } + , depthPitch{ depthPitch_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceBorderColorSwizzleFeaturesEXT( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceBorderColorSwizzleFeaturesEXT( *reinterpret_cast( &rhs ) ) + SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT : SubresourceLayout( *reinterpret_cast( &rhs ) ) {} + + SubresourceLayout & operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } - PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + size = size_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setRowPitch( VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + rowPitch = rowPitch_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & - setBorderColorSwizzle( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setArrayPitch( VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ ) VULKAN_HPP_NOEXCEPT { - borderColorSwizzle = borderColorSwizzle_; + arrayPitch = arrayPitch_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & - setBorderColorSwizzleFromImage( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setDepthPitch( VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ ) VULKAN_HPP_NOEXCEPT { - borderColorSwizzleFromImage = borderColorSwizzleFromImage_; + depthPitch = depthPitch_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, borderColorSwizzle, borderColorSwizzleFromImage ); + return std::tie( offset, size, rowPitch, arrayPitch, depthPitch ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & ) const = default; + auto operator<=>( SubresourceLayout const & ) const = default; #else - bool operator==( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( borderColorSwizzle == rhs.borderColorSwizzle ) && - ( borderColorSwizzleFromImage == rhs.borderColorSwizzleFromImage ); + return ( offset == rhs.offset ) && ( size == rhs.size ) && ( rowPitch == rhs.rowPitch ) && ( arrayPitch == rhs.arrayPitch ) && + ( depthPitch == rhs.depthPitch ); # endif } - bool operator!=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle = {}; - VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceBorderColorSwizzleFeaturesEXT; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {}; + VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {}; + VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {}; }; - struct PhysicalDeviceBufferDeviceAddressFeatures + struct ImageDrmFormatModifierExplicitCreateInfoEXT { - using NativeType = VkPhysicalDeviceBufferDeviceAddressFeatures; + using NativeType = VkImageDrmFormatModifierExplicitCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , bufferDeviceAddress( bufferDeviceAddress_ ) - , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) - , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , drmFormatModifier{ drmFormatModifier_ } + , drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ } + , pPlaneLayouts{ pPlaneLayouts_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceBufferDeviceAddressFeatures( *reinterpret_cast( &rhs ) ) + ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierExplicitCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceBufferDeviceAddressFeatures & operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierExplicitCreateInfoEXT( + uint64_t drmFormatModifier_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & planeLayouts_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( static_cast( planeLayouts_.size() ) ) + , pPlaneLayouts( planeLayouts_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceBufferDeviceAddressFeatures & operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & - setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT { - bufferDeviceAddress = bufferDeviceAddress_; + drmFormatModifier = drmFormatModifier_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & - setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & + setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT { - bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & - setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & + setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT { - bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + pPlaneLayouts = pPlaneLayouts_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierExplicitCreateInfoEXT & + setPlaneLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & planeLayouts_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + drmFormatModifierPlaneCount = static_cast( planeLayouts_.size() ); + pPlaneLayouts = planeLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT + operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -46551,1749 +49086,1877 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + const void * const &, + uint64_t const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::SubresourceLayout * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice ); + return std::tie( sType, pNext, drmFormatModifier, drmFormatModifierPlaneCount, pPlaneLayouts ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const & ) const = default; + auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && - ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && - ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && + ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && ( pPlaneLayouts == rhs.pPlaneLayouts ); # endif } - bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; + const void * pNext = {}; + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceBufferDeviceAddressFeatures; + using Type = ImageDrmFormatModifierExplicitCreateInfoEXT; }; - using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures; - struct PhysicalDeviceBufferDeviceAddressFeaturesEXT + struct ImageDrmFormatModifierListCreateInfoEXT { - using NativeType = VkPhysicalDeviceBufferDeviceAddressFeaturesEXT; + using NativeType = VkImageDrmFormatModifierListCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , bufferDeviceAddress( bufferDeviceAddress_ ) - , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) - , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {}, + const uint64_t * pDrmFormatModifiers_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , drmFormatModifierCount{ drmFormatModifierCount_ } + , pDrmFormatModifiers{ pDrmFormatModifiers_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceBufferDeviceAddressFeaturesEXT( *reinterpret_cast( &rhs ) ) + ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierListCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierListCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifiers_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), drmFormatModifierCount( static_cast( drmFormatModifiers_.size() ) ), pDrmFormatModifiers( drmFormatModifiers_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageDrmFormatModifierListCreateInfoEXT & operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & - setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT { - bufferDeviceAddress = bufferDeviceAddress_; + drmFormatModifierCount = drmFormatModifierCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & - setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT { - bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + pDrmFormatModifiers = pDrmFormatModifiers_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & - setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageDrmFormatModifierListCreateInfoEXT & + setDrmFormatModifiers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT { - bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + drmFormatModifierCount = static_cast( drmFormatModifiers_.size() ); + pDrmFormatModifiers = drmFormatModifiers_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice ); + return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifiers ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & ) const = default; + auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && - ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && - ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && + ( pDrmFormatModifiers == rhs.pDrmFormatModifiers ); # endif } - bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; + const void * pNext = {}; + uint32_t drmFormatModifierCount = {}; + const uint64_t * pDrmFormatModifiers = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + using Type = ImageDrmFormatModifierListCreateInfoEXT; }; - using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT; - struct PhysicalDeviceCoherentMemoryFeaturesAMD + struct ImageDrmFormatModifierPropertiesEXT { - using NativeType = VkPhysicalDeviceCoherentMemoryFeaturesAMD; + using NativeType = VkImageDrmFormatModifierPropertiesEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceCoherentMemory( deviceCoherentMemory_ ) - { - } - - VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierPropertiesEXT; - PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceCoherentMemoryFeaturesAMD( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , drmFormatModifier{ drmFormatModifier_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageDrmFormatModifierPropertiesEXT( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & - setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT + ImageDrmFormatModifierPropertiesEXT & operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - deviceCoherentMemory = deviceCoherentMemory_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT + operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT + operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, deviceCoherentMemory ); + return std::tie( sType, pNext, drmFormatModifier ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const & ) const = default; + auto operator<=>( ImageDrmFormatModifierPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceCoherentMemory == rhs.deviceCoherentMemory ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ); # endif } - bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT; + void * pNext = {}; + uint64_t drmFormatModifier = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceCoherentMemoryFeaturesAMD; + using Type = ImageDrmFormatModifierPropertiesEXT; }; - struct PhysicalDeviceColorWriteEnableFeaturesEXT + struct ImageFormatListCreateInfo { - using NativeType = VkPhysicalDeviceColorWriteEnableFeaturesEXT; + using NativeType = VkImageFormatListCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , colorWriteEnable( colorWriteEnable_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , viewFormatCount{ viewFormatCount_ } + , pViewFormats{ pViewFormats_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceColorWriteEnableFeaturesEXT( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceColorWriteEnableFeaturesEXT( *reinterpret_cast( &rhs ) ) + ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatListCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatListCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), viewFormatCount( static_cast( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageFormatListCreateInfo & operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & - setColorWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT { - colorWriteEnable = colorWriteEnable_; + viewFormatCount = viewFormatCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pViewFormats = pViewFormats_; + return *this; } - operator VkPhysicalDeviceColorWriteEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ImageFormatListCreateInfo & + setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewFormats_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + viewFormatCount = static_cast( viewFormats_.size() ); + pViewFormats = viewFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, colorWriteEnable ); + return std::tie( sType, pNext, viewFormatCount, pViewFormats ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceColorWriteEnableFeaturesEXT const & ) const = default; + auto operator<=>( ImageFormatListCreateInfo const & ) const = default; #else - bool operator==( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorWriteEnable == rhs.colorWriteEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats ); # endif } - bool operator!=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo; + const void * pNext = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pViewFormats = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceColorWriteEnableFeaturesEXT; + using Type = ImageFormatListCreateInfo; }; - struct PhysicalDeviceComputeShaderDerivativesFeaturesNV + using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo; + + struct ImageFormatProperties2 { - using NativeType = VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; + using NativeType = VkImageFormatProperties2; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ) - , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ ) - { - } - - VULKAN_HPP_CONSTEXPR - PhysicalDeviceComputeShaderDerivativesFeaturesNV( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2; - PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceComputeShaderDerivativesFeaturesNV( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageFormatProperties2( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageFormatProperties{ imageFormatProperties_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageFormatProperties2( *reinterpret_cast( &rhs ) ) { - pNext = pNext_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & - setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT - { - computeDerivativeGroupQuads = computeDerivativeGroupQuads_; - return *this; - } + ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & - setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT + ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { - computeDerivativeGroupLinear = computeDerivativeGroupLinear_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT + operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, computeDerivativeGroupQuads, computeDerivativeGroupLinear ); + return std::tie( sType, pNext, imageFormatProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & ) const = default; + auto operator<=>( ImageFormatProperties2 const & ) const = default; #else - bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) && - ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFormatProperties == rhs.imageFormatProperties ); # endif } - bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {}; - VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceComputeShaderDerivativesFeaturesNV; + using Type = ImageFormatProperties2; }; - struct PhysicalDeviceConditionalRenderingFeaturesEXT + using ImageFormatProperties2KHR = ImageFormatProperties2; + + struct ImageMemoryBarrier { - using NativeType = VkPhysicalDeviceConditionalRenderingFeaturesEXT; + using NativeType = VkImageMemoryBarrier; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , conditionalRendering( conditionalRendering_ ) - , inheritedConditionalRendering( inheritedConditionalRendering_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcAccessMask{ srcAccessMask_ } + , dstAccessMask{ dstAccessMask_ } + , oldLayout{ oldLayout_ } + , newLayout{ newLayout_ } + , srcQueueFamilyIndex{ srcQueueFamilyIndex_ } + , dstQueueFamilyIndex{ dstQueueFamilyIndex_ } + , image{ image_ } + , subresourceRange{ subresourceRange_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceConditionalRenderingFeaturesEXT( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceConditionalRenderingFeaturesEXT( *reinterpret_cast( &rhs ) ) + ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier( *reinterpret_cast( &rhs ) ) {} + + ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } - PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + srcAccessMask = srcAccessMask_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + dstAccessMask = dstAccessMask_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & - setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT { - conditionalRendering = conditionalRendering_; + oldLayout = oldLayout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & - setInheritedConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT { - inheritedConditionalRendering = inheritedConditionalRendering_; + newLayout = newLayout_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; } - operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, conditionalRendering, inheritedConditionalRendering ); + return std::tie( sType, pNext, srcAccessMask, dstAccessMask, oldLayout, newLayout, srcQueueFamilyIndex, dstQueueFamilyIndex, image, subresourceRange ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const & ) const = default; + auto operator<=>( ImageMemoryBarrier const & ) const = default; #else - bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRendering == rhs.conditionalRendering ) && - ( inheritedConditionalRendering == rhs.inheritedConditionalRendering ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) && + ( oldLayout == rhs.oldLayout ) && ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && + ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) && ( subresourceRange == rhs.subresourceRange ); # endif } - bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {}; - VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceConditionalRenderingFeaturesEXT; + using Type = ImageMemoryBarrier; }; - struct PhysicalDeviceConservativeRasterizationPropertiesEXT + struct ImageMemoryRequirementsInfo2 { - using NativeType = VkPhysicalDeviceConservativeRasterizationPropertiesEXT; + using NativeType = VkImageMemoryRequirementsInfo2; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = {}, - float maxExtraPrimitiveOverestimationSize_ = {}, - float extraPrimitiveOverestimationSizeGranularity_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , primitiveOverestimationSize( primitiveOverestimationSize_ ) - , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ) - , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ) - , primitiveUnderestimation( primitiveUnderestimation_ ) - , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ) - , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ) - , degenerateLinesRasterized( degenerateLinesRasterized_ ) - , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ) - , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image{ image_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceConservativeRasterizationPropertiesEXT( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceConservativeRasterizationPropertiesEXT( *reinterpret_cast( &rhs ) ) + ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceConservativeRasterizationPropertiesEXT & - operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + image = image_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - primitiveOverestimationSize, - maxExtraPrimitiveOverestimationSize, - extraPrimitiveOverestimationSizeGranularity, - primitiveUnderestimation, - conservativePointAndLineRasterization, - degenerateTrianglesRasterized, - degenerateLinesRasterized, - fullyCoveredFragmentShaderInputVariable, - conservativeRasterizationPostDepthCoverage ); + return std::tie( sType, pNext, image ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const & ) const = default; + auto operator<=>( ImageMemoryRequirementsInfo2 const & ) const = default; #else - bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) && - ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) && - ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) && - ( primitiveUnderestimation == rhs.primitiveUnderestimation ) && - ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) && - ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) && - ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) && - ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); # endif } - bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; - void * pNext = {}; - float primitiveOverestimationSize = {}; - float maxExtraPrimitiveOverestimationSize = {}; - float extraPrimitiveOverestimationSizeGranularity = {}; - VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {}; - VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {}; - VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {}; - VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {}; - VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {}; - VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT; + using Type = ImageMemoryRequirementsInfo2; }; - struct PhysicalDeviceCooperativeMatrixFeaturesNV + using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImagePipeSurfaceCreateInfoFUCHSIA { - using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesNV; + using NativeType = VkImagePipeSurfaceCreateInfoFUCHSIA; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , cooperativeMatrix( cooperativeMatrix_ ) - , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, + zx_handle_t imagePipeHandle_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , imagePipeHandle{ imagePipeHandle_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceCooperativeMatrixFeaturesNV( *reinterpret_cast( &rhs ) ) + ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImagePipeSurfaceCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImagePipeSurfaceCreateInfoFUCHSIA & operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & - setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT { - cooperativeMatrix = cooperativeMatrix_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & - setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT { - cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; + imagePipeHandle = imagePipeHandle_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + operator VkImagePipeSurfaceCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT + operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess ); + return std::tie( sType, pNext, flags, imagePipeHandle ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const & ) const = default; -#else - bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) && - ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess ); + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } # endif + + bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 ); } - bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {}; - VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {}; + zx_handle_t imagePipeHandle = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceCooperativeMatrixFeaturesNV; + using Type = ImagePipeSurfaceCreateInfoFUCHSIA; }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct PhysicalDeviceCooperativeMatrixPropertiesNV + struct ImagePlaneMemoryRequirementsInfo { - using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesNV; + using NativeType = VkImagePlaneMemoryRequirementsInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , planeAspect{ planeAspect_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceCooperativeMatrixPropertiesNV( *reinterpret_cast( &rhs ) ) + ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImagePlaneMemoryRequirementsInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + planeAspect = planeAspect_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, cooperativeMatrixSupportedStages ); + return std::tie( sType, pNext, planeAspect ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const & ) const = default; + auto operator<=>( ImagePlaneMemoryRequirementsInfo const & ) const = default; #else - bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect ); # endif } - bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceCooperativeMatrixPropertiesNV; + using Type = ImagePlaneMemoryRequirementsInfo; }; - struct PhysicalDeviceCornerSampledImageFeaturesNV - { - using NativeType = VkPhysicalDeviceCornerSampledImageFeaturesNV; + using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; + struct ImageResolve + { + using NativeType = VkImageResolve; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , cornerSampledImage( cornerSampledImage_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageResolve( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource{ srcSubresource_ } + , srcOffset{ srcOffset_ } + , dstSubresource{ dstSubresource_ } + , dstOffset{ dstOffset_ } + , extent{ extent_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceCornerSampledImageFeaturesNV( *reinterpret_cast( &rhs ) ) + ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT : ImageResolve( *reinterpret_cast( &rhs ) ) {} + + ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } - PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + srcOffset = srcOffset_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + dstSubresource = dstSubresource_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & - setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT { - cornerSampledImage = cornerSampledImage_; + dstOffset = dstOffset_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceCornerSampledImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + extent = extent_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT + operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkImageResolve &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, cornerSampledImage ); + return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const & ) const = default; + auto operator<=>( ImageResolve const & ) const = default; #else - bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cornerSampledImage == rhs.cornerSampledImage ); + return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && + ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); # endif } - bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceCornerSampledImageFeaturesNV; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; }; - struct PhysicalDeviceCoverageReductionModeFeaturesNV + struct ImageResolve2 { - using NativeType = VkPhysicalDeviceCoverageReductionModeFeaturesNV; + using NativeType = VkImageResolve2; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , coverageReductionMode( coverageReductionMode_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageResolve2( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcSubresource{ srcSubresource_ } + , srcOffset{ srcOffset_ } + , dstSubresource{ dstSubresource_ } + , dstOffset{ dstOffset_ } + , extent{ extent_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceCoverageReductionModeFeaturesNV( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageResolve2( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceCoverageReductionModeFeaturesNV( *reinterpret_cast( &rhs ) ) + ImageResolve2( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageResolve2( *reinterpret_cast( &rhs ) ) {} + + ImageResolve2 & operator=( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageResolve2 & operator=( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } - PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + srcSubresource = srcSubresource_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + srcOffset = srcOffset_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & - setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT { - coverageReductionMode = coverageReductionMode_; + dstSubresource = dstSubresource_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dstOffset = dstOffset_; + return *this; } - operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + extent = extent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageResolve2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageResolve2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, coverageReductionMode ); + return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const & ) const = default; + auto operator<=>( ImageResolve2 const & ) const = default; #else - bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && + ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); # endif } - bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceCoverageReductionModeFeaturesNV; + using Type = ImageResolve2; }; - struct PhysicalDeviceCustomBorderColorFeaturesEXT + using ImageResolve2KHR = ImageResolve2; + + struct ImageSparseMemoryRequirementsInfo2 { - using NativeType = VkPhysicalDeviceCustomBorderColorFeaturesEXT; + using NativeType = VkImageSparseMemoryRequirementsInfo2; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSparseMemoryRequirementsInfo2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , customBorderColors( customBorderColors_ ) - , customBorderColorWithoutFormat( customBorderColorWithoutFormat_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image{ image_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceCustomBorderColorFeaturesEXT( *reinterpret_cast( &rhs ) ) + ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSparseMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageSparseMemoryRequirementsInfo2 & operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageSparseMemoryRequirementsInfo2 & operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & - setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT - { - customBorderColors = customBorderColors_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & - setCustomBorderColorWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT { - customBorderColorWithoutFormat = customBorderColorWithoutFormat_; + image = image_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkImageSparseMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, customBorderColors, customBorderColorWithoutFormat ); + return std::tie( sType, pNext, image ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const & ) const = default; + auto operator<=>( ImageSparseMemoryRequirementsInfo2 const & ) const = default; #else - bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( customBorderColors == rhs.customBorderColors ) && - ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); # endif } - bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {}; - VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceCustomBorderColorFeaturesEXT; + using Type = ImageSparseMemoryRequirementsInfo2; }; - struct PhysicalDeviceCustomBorderColorPropertiesEXT + using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; + + struct ImageStencilUsageCreateInfo { - using NativeType = VkPhysicalDeviceCustomBorderColorPropertiesEXT; + using NativeType = VkImageStencilUsageCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( uint32_t maxCustomBorderColorSamplers_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxCustomBorderColorSamplers( maxCustomBorderColorSamplers_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stencilUsage{ stencilUsage_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceCustomBorderColorPropertiesEXT( *reinterpret_cast( &rhs ) ) + ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageStencilUsageCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + stencilUsage = stencilUsage_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxCustomBorderColorSamplers ); + return std::tie( sType, pNext, stencilUsage ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const & ) const = default; + auto operator<=>( ImageStencilUsageCreateInfo const & ) const = default; #else - bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilUsage == rhs.stencilUsage ); # endif } - bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; - void * pNext = {}; - uint32_t maxCustomBorderColorSamplers = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceCustomBorderColorPropertiesEXT; + using Type = ImageStencilUsageCreateInfo; }; - struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV - { - using NativeType = VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + struct ImageSwapchainCreateInfoKHR + { + using NativeType = VkImageSwapchainCreateInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchain{ swapchain_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) - VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( *reinterpret_cast( &rhs ) ) + ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & - operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & - operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & - setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT { - dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_; + swapchain = swapchain_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT + operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, dedicatedAllocationImageAliasing ); + return std::tie( sType, pNext, swapchain ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & ) const = default; + auto operator<=>( ImageSwapchainCreateInfoKHR const & ) const = default; #else - bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ); # endif } - bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + using Type = ImageSwapchainCreateInfoKHR; }; - struct PhysicalDeviceDepthClampZeroOneFeaturesEXT + struct ImageViewASTCDecodeModeEXT { - using NativeType = VkPhysicalDeviceDepthClampZeroOneFeaturesEXT; + using NativeType = VkImageViewASTCDecodeModeEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , depthClampZeroOne( depthClampZeroOne_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , decodeMode{ decodeMode_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesEXT( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDepthClampZeroOneFeaturesEXT( VkPhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceDepthClampZeroOneFeaturesEXT( *reinterpret_cast( &rhs ) ) + ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewASTCDecodeModeEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDepthClampZeroOneFeaturesEXT & operator=( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceDepthClampZeroOneFeaturesEXT & operator=( VkPhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesEXT & - setDepthClampZeroOne( VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT { - depthClampZeroOne = depthClampZeroOne_; + decodeMode = decodeMode_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceDepthClampZeroOneFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceDepthClampZeroOneFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, depthClampZeroOne ); + return std::tie( sType, pNext, decodeMode ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & ) const = default; + auto operator<=>( ImageViewASTCDecodeModeEXT const & ) const = default; #else - bool operator==( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClampZeroOne == rhs.depthClampZeroOne ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeMode == rhs.decodeMode ); # endif } - bool operator!=( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDepthClampZeroOneFeaturesEXT; + using Type = ImageViewASTCDecodeModeEXT; }; - struct PhysicalDeviceDepthClipControlFeaturesEXT + struct ImageViewAddressPropertiesNVX { - using NativeType = VkPhysicalDeviceDepthClipControlFeaturesEXT; + using NativeType = VkImageViewAddressPropertiesNVX; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , depthClipControl( depthClipControl_ ) - { - } - - VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX; - PhysicalDeviceDepthClipControlFeaturesEXT( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceDepthClipControlFeaturesEXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceAddress{ deviceAddress_ } + , size{ size_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDepthClipControlFeaturesEXT & operator=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDepthClipControlFeaturesEXT & operator=( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewAddressPropertiesNVX( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + ImageViewAddressPropertiesNVX & operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & - setDepthClipControl( VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ ) VULKAN_HPP_NOEXCEPT + ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT { - depthClipControl = depthClipControl_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceDepthClipControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkImageViewAddressPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceDepthClipControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std:: + tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, depthClipControl ); + return std::tie( sType, pNext, deviceAddress, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDepthClipControlFeaturesEXT const & ) const = default; + auto operator<=>( ImageViewAddressPropertiesNVX const & ) const = default; #else - bool operator==( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipControl == rhs.depthClipControl ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && ( size == rhs.size ); # endif } - bool operator!=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthClipControl = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDepthClipControlFeaturesEXT; + using Type = ImageViewAddressPropertiesNVX; }; - struct PhysicalDeviceDepthClipEnableFeaturesEXT + struct ImageViewCaptureDescriptorDataInfoEXT { - using NativeType = VkPhysicalDeviceDepthClipEnableFeaturesEXT; + using NativeType = VkImageViewCaptureDescriptorDataInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCaptureDescriptorDataInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , depthClipEnable( depthClipEnable_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageView{ imageView_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageViewCaptureDescriptorDataInfoEXT( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceDepthClipEnableFeaturesEXT( *reinterpret_cast( &rhs ) ) + ImageViewCaptureDescriptorDataInfoEXT( VkImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageViewCaptureDescriptorDataInfoEXT & operator=( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageViewCaptureDescriptorDataInfoEXT & operator=( VkImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageViewCaptureDescriptorDataInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT { - depthClipEnable = depthClipEnable_; + imageView = imageView_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkImageViewCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkImageViewCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, depthClipEnable ); + return std::tie( sType, pNext, imageView ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const & ) const = default; + auto operator<=>( ImageViewCaptureDescriptorDataInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipEnable == rhs.depthClipEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ); # endif } - bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCaptureDescriptorDataInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDepthClipEnableFeaturesEXT; + using Type = ImageViewCaptureDescriptorDataInfoEXT; }; - struct PhysicalDeviceDepthStencilResolveProperties + struct ImageViewCreateInfo { - using NativeType = VkPhysicalDeviceDepthStencilResolveProperties; + using NativeType = VkImageViewCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , supportedDepthResolveModes( supportedDepthResolveModes_ ) - , supportedStencilResolveModes( supportedStencilResolveModes_ ) - , independentResolveNone( independentResolveNone_ ) - , independentResolve( independentResolve_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , image{ image_ } + , viewType{ viewType_ } + , format{ format_ } + , components{ components_ } + , subresourceRange{ subresourceRange_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceDepthStencilResolveProperties( *reinterpret_cast( &rhs ) ) + ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDepthStencilResolveProperties & operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceDepthStencilResolveProperties & operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT + ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT + { + viewType = viewType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + { + components = components_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -48301,488 +50964,341 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + const void * const &, + VULKAN_HPP_NAMESPACE::ImageViewCreateFlags const &, + VULKAN_HPP_NAMESPACE::Image const &, + VULKAN_HPP_NAMESPACE::ImageViewType const &, + VULKAN_HPP_NAMESPACE::Format const &, + VULKAN_HPP_NAMESPACE::ComponentMapping const &, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, supportedDepthResolveModes, supportedStencilResolveModes, independentResolveNone, independentResolve ); + return std::tie( sType, pNext, flags, image, viewType, format, components, subresourceRange ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const & ) const = default; + auto operator<=>( ImageViewCreateInfo const & ) const = default; #else - bool operator==( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && - ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && ( independentResolveNone == rhs.independentResolveNone ) && - ( independentResolve == rhs.independentResolve ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( image == rhs.image ) && ( viewType == rhs.viewType ) && + ( format == rhs.format ) && ( components == rhs.components ) && ( subresourceRange == rhs.subresourceRange ); # endif } - bool operator!=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; - VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; - VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDepthStencilResolveProperties; + using Type = ImageViewCreateInfo; }; - using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties; - struct PhysicalDeviceDescriptorIndexingFeatures + struct ImageViewHandleInfoNVX { - using NativeType = VkPhysicalDeviceDescriptorIndexingFeatures; + using NativeType = VkImageViewHandleInfoNVX; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) - , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) - , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) - , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) - , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) - , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) - , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) - , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) - , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) - , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) - , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) - , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) - , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) - , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) - , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) - , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) - , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) - , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) - , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) - , runtimeDescriptorArray( runtimeDescriptorArray_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageView{ imageView_ } + , descriptorType{ descriptorType_ } + , sampler{ sampler_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceDescriptorIndexingFeatures( *reinterpret_cast( &rhs ) ) + ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewHandleInfoNVX( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDescriptorIndexingFeatures & operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageViewHandleInfoNVX & operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceDescriptorIndexingFeatures & operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT { - shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; + imageView = imageView_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT { - shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; + descriptorType = descriptorType_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT { - shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; + sampler = sampler_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + operator VkImageViewHandleInfoNVX const &() const VULKAN_HPP_NOEXCEPT { - shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT { - shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; - return *this; + return std::tie( sType, pNext, imageView, descriptorType, sampler ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageViewHandleInfoNVX const & ) const = default; +#else + bool operator==( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( descriptorType == rhs.descriptorType ) && + ( sampler == rhs.sampler ); +# endif } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT - { - shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; - return *this; - } + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + }; - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT - { - shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; - return *this; - } + template <> + struct CppType + { + using Type = ImageViewHandleInfoNVX; + }; - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT - { - descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; - return *this; - } + struct ImageViewMinLodCreateInfoEXT + { + using NativeType = VkImageViewMinLodCreateInfoEXT; - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewMinLodCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( float minLod_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minLod{ minLod_ } { - descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageViewMinLodCreateInfoEXT( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewMinLodCreateInfoEXT( *reinterpret_cast( &rhs ) ) { - descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + ImageViewMinLodCreateInfoEXT & operator=( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ImageViewMinLodCreateInfoEXT & operator=( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind( - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind( - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT { - descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; + minLod = minLod_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT + operator VkImageViewMinLodCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT + operator VkImageViewMinLodCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT - { - descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & - setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT - { - runtimeDescriptorArray = runtimeDescriptorArray_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - shaderInputAttachmentArrayDynamicIndexing, - shaderUniformTexelBufferArrayDynamicIndexing, - shaderStorageTexelBufferArrayDynamicIndexing, - shaderUniformBufferArrayNonUniformIndexing, - shaderSampledImageArrayNonUniformIndexing, - shaderStorageBufferArrayNonUniformIndexing, - shaderStorageImageArrayNonUniformIndexing, - shaderInputAttachmentArrayNonUniformIndexing, - shaderUniformTexelBufferArrayNonUniformIndexing, - shaderStorageTexelBufferArrayNonUniformIndexing, - descriptorBindingUniformBufferUpdateAfterBind, - descriptorBindingSampledImageUpdateAfterBind, - descriptorBindingStorageImageUpdateAfterBind, - descriptorBindingStorageBufferUpdateAfterBind, - descriptorBindingUniformTexelBufferUpdateAfterBind, - descriptorBindingStorageTexelBufferUpdateAfterBind, - descriptorBindingUpdateUnusedWhilePending, - descriptorBindingPartiallyBound, - descriptorBindingVariableDescriptorCount, - runtimeDescriptorArray ); + return std::tie( sType, pNext, minLod ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const & ) const = default; + auto operator<=>( ImageViewMinLodCreateInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && - ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) && - ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) && - ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) && - ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) && - ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) && - ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) && - ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) && - ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) && - ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) && - ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) && - ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) && - ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) && - ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) && - ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) && - ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) && - ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) && - ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) && - ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) && - ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) && - ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minLod == rhs.minLod ); # endif } - bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; - VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewMinLodCreateInfoEXT; + const void * pNext = {}; + float minLod = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDescriptorIndexingFeatures; + using Type = ImageViewMinLodCreateInfoEXT; }; - using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures; - struct PhysicalDeviceDescriptorIndexingProperties + struct ImageViewSampleWeightCreateInfoQCOM { - using NativeType = VkPhysicalDeviceDescriptorIndexingProperties; + using NativeType = VkImageViewSampleWeightCreateInfoQCOM; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewSampleWeightCreateInfoQCOM; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, - uint32_t maxPerStageUpdateAfterBindResources_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) - , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) - , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) - , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) - , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) - , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) - , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) - , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) - , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) - , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) - , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) - , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) - , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) - , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) - , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) - , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM( VULKAN_HPP_NAMESPACE::Offset2D filterCenter_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D filterSize_ = {}, + uint32_t numPhases_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , filterCenter{ filterCenter_ } + , filterSize{ filterSize_ } + , numPhases{ numPhases_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceDescriptorIndexingProperties( *reinterpret_cast( &rhs ) ) + ImageViewSampleWeightCreateInfoQCOM( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewSampleWeightCreateInfoQCOM( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDescriptorIndexingProperties & operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageViewSampleWeightCreateInfoQCOM & operator=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceDescriptorIndexingProperties & operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + ImageViewSampleWeightCreateInfoQCOM & operator=( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterCenter( VULKAN_HPP_NAMESPACE::Offset2D const & filterCenter_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + filterCenter = filterCenter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterSize( VULKAN_HPP_NAMESPACE::Extent2D const & filterSize_ ) VULKAN_HPP_NOEXCEPT + { + filterSize = filterSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setNumPhases( uint32_t numPhases_ ) VULKAN_HPP_NOEXCEPT + { + numPhases = numPhases_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImageViewSampleWeightCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewSampleWeightCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -48790,3117 +51306,3059 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - maxUpdateAfterBindDescriptorsInAllPools, - shaderUniformBufferArrayNonUniformIndexingNative, - shaderSampledImageArrayNonUniformIndexingNative, - shaderStorageBufferArrayNonUniformIndexingNative, - shaderStorageImageArrayNonUniformIndexingNative, - shaderInputAttachmentArrayNonUniformIndexingNative, - robustBufferAccessUpdateAfterBind, - quadDivergentImplicitLod, - maxPerStageDescriptorUpdateAfterBindSamplers, - maxPerStageDescriptorUpdateAfterBindUniformBuffers, - maxPerStageDescriptorUpdateAfterBindStorageBuffers, - maxPerStageDescriptorUpdateAfterBindSampledImages, - maxPerStageDescriptorUpdateAfterBindStorageImages, - maxPerStageDescriptorUpdateAfterBindInputAttachments, - maxPerStageUpdateAfterBindResources, - maxDescriptorSetUpdateAfterBindSamplers, - maxDescriptorSetUpdateAfterBindUniformBuffers, - maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, - maxDescriptorSetUpdateAfterBindStorageBuffers, - maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, - maxDescriptorSetUpdateAfterBindSampledImages, - maxDescriptorSetUpdateAfterBindStorageImages, - maxDescriptorSetUpdateAfterBindInputAttachments ); + return std::tie( sType, pNext, filterCenter, filterSize, numPhases ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const & ) const = default; + auto operator<=>( ImageViewSampleWeightCreateInfoQCOM const & ) const = default; #else - bool operator==( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) && - ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) && - ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) && - ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) && - ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) && - ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) && - ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) && - ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) && - ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) && - ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) && - ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) && - ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) && - ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) && - ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) && - ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) && - ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) && - ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) && - ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) && - ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) && - ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) && - ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) && - ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCenter == rhs.filterCenter ) && ( filterSize == rhs.filterSize ) && + ( numPhases == rhs.numPhases ); # endif } - bool operator!=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; - void * pNext = {}; - uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; - uint32_t maxPerStageUpdateAfterBindResources = {}; - uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; - uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewSampleWeightCreateInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Offset2D filterCenter = {}; + VULKAN_HPP_NAMESPACE::Extent2D filterSize = {}; + uint32_t numPhases = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDescriptorIndexingProperties; + using Type = ImageViewSampleWeightCreateInfoQCOM; }; - using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties; - struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE + struct ImageViewSlicedCreateInfoEXT { - using NativeType = VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + using NativeType = VkImageViewSlicedCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewSlicedCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , descriptorSetHostMapping( descriptorSetHostMapping_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ImageViewSlicedCreateInfoEXT( uint32_t sliceOffset_ = {}, uint32_t sliceCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , sliceOffset{ sliceOffset_ } + , sliceCount{ sliceCount_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageViewSlicedCreateInfoEXT( ImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( *reinterpret_cast( &rhs ) ) + ImageViewSlicedCreateInfoEXT( VkImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewSlicedCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & - operator=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageViewSlicedCreateInfoEXT & operator=( ImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & operator=( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + ImageViewSlicedCreateInfoEXT & operator=( VkImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & - setDescriptorSetHostMapping( VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setSliceOffset( uint32_t sliceOffset_ ) VULKAN_HPP_NOEXCEPT { - descriptorSetHostMapping = descriptorSetHostMapping_; + sliceOffset = sliceOffset_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setSliceCount( uint32_t sliceCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + sliceCount = sliceCount_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE &() VULKAN_HPP_NOEXCEPT + operator VkImageViewSlicedCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkImageViewSlicedCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, descriptorSetHostMapping ); + return std::tie( sType, pNext, sliceOffset, sliceCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & ) const = default; + auto operator<=>( ImageViewSlicedCreateInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageViewSlicedCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetHostMapping == rhs.descriptorSetHostMapping ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sliceOffset == rhs.sliceOffset ) && ( sliceCount == rhs.sliceCount ); # endif } - bool operator!=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageViewSlicedCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewSlicedCreateInfoEXT; + const void * pNext = {}; + uint32_t sliceOffset = {}; + uint32_t sliceCount = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + using Type = ImageViewSlicedCreateInfoEXT; }; - struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV + struct ImageViewUsageCreateInfo { - using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + using NativeType = VkImageViewUsageCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceGeneratedCommands( deviceGeneratedCommands_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , usage{ usage_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( *reinterpret_cast( &rhs ) ) + ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageViewUsageCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & - setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT { - deviceGeneratedCommands = deviceGeneratedCommands_; + usage = usage_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT + operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, deviceGeneratedCommands ); + return std::tie( sType, pNext, usage ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & ) const = default; + auto operator<=>( ImageViewUsageCreateInfo const & ) const = default; #else - bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); # endif } - bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + using Type = ImageViewUsageCreateInfo; }; - struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV + using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct ImportAndroidHardwareBufferInfoANDROID { - using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + using NativeType = VkImportAndroidHardwareBufferInfoANDROID; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportAndroidHardwareBufferInfoANDROID; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( uint32_t maxGraphicsShaderGroupCount_ = {}, - uint32_t maxIndirectSequenceCount_ = {}, - uint32_t maxIndirectCommandsTokenCount_ = {}, - uint32_t maxIndirectCommandsStreamCount_ = {}, - uint32_t maxIndirectCommandsTokenOffset_ = {}, - uint32_t maxIndirectCommandsStreamStride_ = {}, - uint32_t minSequencesCountBufferOffsetAlignment_ = {}, - uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, - uint32_t minIndirectCommandsBufferOffsetAlignment_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ ) - , maxIndirectSequenceCount( maxIndirectSequenceCount_ ) - , maxIndirectCommandsTokenCount( maxIndirectCommandsTokenCount_ ) - , maxIndirectCommandsStreamCount( maxIndirectCommandsStreamCount_ ) - , maxIndirectCommandsTokenOffset( maxIndirectCommandsTokenOffset_ ) - , maxIndirectCommandsStreamStride( maxIndirectCommandsStreamStride_ ) - , minSequencesCountBufferOffsetAlignment( minSequencesCountBufferOffsetAlignment_ ) - , minSequencesIndexBufferOffsetAlignment( minSequencesIndexBufferOffsetAlignment_ ) - , minIndirectCommandsBufferOffsetAlignment( minIndirectCommandsBufferOffsetAlignment_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer * buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , buffer{ buffer_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( *reinterpret_cast( &rhs ) ) + ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportAndroidHardwareBufferInfoANDROID( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & - operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportAndroidHardwareBufferInfoANDROID & operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + ImportAndroidHardwareBufferInfoANDROID & operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer * buffer_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + buffer = buffer_; + return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + operator VkImportAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - maxGraphicsShaderGroupCount, - maxIndirectSequenceCount, - maxIndirectCommandsTokenCount, - maxIndirectCommandsStreamCount, - maxIndirectCommandsTokenOffset, - maxIndirectCommandsStreamStride, - minSequencesCountBufferOffsetAlignment, - minSequencesIndexBufferOffsetAlignment, - minIndirectCommandsBufferOffsetAlignment ); + return *reinterpret_cast( this ); } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & ) const = default; -#else - bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT { + return *reinterpret_cast( this ); + } + # if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) && - ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) && ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) && - ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount ) && - ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) && - ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride ) && - ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment ) && - ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment ) && - ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment ); +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, buffer ); + } # endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const & ) const = default; +# else + bool operator==( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif } - bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; - void * pNext = {}; - uint32_t maxGraphicsShaderGroupCount = {}; - uint32_t maxIndirectSequenceCount = {}; - uint32_t maxIndirectCommandsTokenCount = {}; - uint32_t maxIndirectCommandsStreamCount = {}; - uint32_t maxIndirectCommandsTokenOffset = {}; - uint32_t maxIndirectCommandsStreamStride = {}; - uint32_t minSequencesCountBufferOffsetAlignment = {}; - uint32_t minSequencesIndexBufferOffsetAlignment = {}; - uint32_t minIndirectCommandsBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; + const void * pNext = {}; + struct AHardwareBuffer * buffer = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + using Type = ImportAndroidHardwareBufferInfoANDROID; }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - struct PhysicalDeviceDeviceMemoryReportFeaturesEXT + struct ImportFenceFdInfoKHR { - using NativeType = VkPhysicalDeviceDeviceMemoryReportFeaturesEXT; + using NativeType = VkImportFenceFdInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceMemoryReport( deviceMemoryReport_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + int fd_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fence{ fence_ } + , flags{ flags_ } + , handleType{ handleType_ } + , fd{ fd_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDeviceMemoryReportFeaturesEXT( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceDeviceMemoryReportFeaturesEXT( *reinterpret_cast( &rhs ) ) + ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportFenceFdInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & - setDeviceMemoryReport( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT { - deviceMemoryReport = deviceMemoryReport_; + fence = fence_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; } - operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, deviceMemoryReport ); + return std::tie( sType, pNext, fence, flags, handleType, fd ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & ) const = default; + auto operator<=>( ImportFenceFdInfoKHR const & ) const = default; #else - bool operator==( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMemoryReport == rhs.deviceMemoryReport ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( fd == rhs.fd ); # endif } - bool operator!=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + int fd = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT; + using Type = ImportFenceFdInfoKHR; }; - struct PhysicalDeviceDiagnosticsConfigFeaturesNV +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportFenceWin32HandleInfoKHR { - using NativeType = VkPhysicalDeviceDiagnosticsConfigFeaturesNV; + using NativeType = VkImportFenceWin32HandleInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , diagnosticsConfig( diagnosticsConfig_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fence{ fence_ } + , flags{ flags_ } + , handleType{ handleType_ } + , handle{ handle_ } + , name{ name_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceDiagnosticsConfigFeaturesNV( *reinterpret_cast( &rhs ) ) + ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportFenceWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportFenceWin32HandleInfoKHR & operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & - setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT { - diagnosticsConfig = diagnosticsConfig_; + fence = fence_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; } - operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handleType = handleType_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, diagnosticsConfig ); + return std::tie( sType, pNext, fence, flags, handleType, handle, name ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const & ) const = default; -#else - bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportFenceWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( diagnosticsConfig == rhs.diagnosticsConfig ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( handle == rhs.handle ) && ( name == rhs.name ); +# endif } - bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV; + using Type = ImportFenceWin32HandleInfoKHR; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct PhysicalDeviceDiscardRectanglePropertiesEXT +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImportMemoryBufferCollectionFUCHSIA { - using NativeType = VkPhysicalDeviceDiscardRectanglePropertiesEXT; + using NativeType = VkImportMemoryBufferCollectionFUCHSIA; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryBufferCollectionFUCHSIA; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxDiscardRectangles( maxDiscardRectangles_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, + uint32_t index_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , collection{ collection_ } + , index{ index_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceDiscardRectanglePropertiesEXT( *reinterpret_cast( &rhs ) ) + ImportMemoryBufferCollectionFUCHSIA( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryBufferCollectionFUCHSIA( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportMemoryBufferCollectionFUCHSIA & operator=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImportMemoryBufferCollectionFUCHSIA & operator=( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setCollection( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + collection = collection_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportMemoryBufferCollectionFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryBufferCollectionFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxDiscardRectangles ); + return std::tie( sType, pNext, collection, index ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const & ) const = default; -#else - bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryBufferCollectionFUCHSIA const & ) const = default; +# else + bool operator==( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDiscardRectangles == rhs.maxDiscardRectangles ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index ); +# endif } - bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; - void * pNext = {}; - uint32_t maxDiscardRectangles = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryBufferCollectionFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection = {}; + uint32_t index = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDiscardRectanglePropertiesEXT; + using Type = ImportMemoryBufferCollectionFUCHSIA; }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct PhysicalDeviceDriverProperties + struct ImportMemoryFdInfoKHR { - using NativeType = VkPhysicalDeviceDriverProperties; + using NativeType = VkImportMemoryFdInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, - std::array const & driverName_ = {}, - std::array const & driverInfo_ = {}, - VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , driverID( driverID_ ) - , driverName( driverName_ ) - , driverInfo( driverInfo_ ) - , conformanceVersion( conformanceVersion_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + int fd_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + , fd{ fd_ } { } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceDriverProperties( *reinterpret_cast( &rhs ) ) + ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryFdInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - VULKAN_HPP_NAMESPACE::ConformanceVersion const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, driverID, driverName, driverInfo, conformanceVersion ); + return std::tie( sType, pNext, handleType, fd ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDriverProperties const & ) const = default; + auto operator<=>( ImportMemoryFdInfoKHR const & ) const = default; #else - bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( driverName == rhs.driverName ) && - ( driverInfo == rhs.driverInfo ) && ( conformanceVersion == rhs.conformanceVersion ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd ); # endif } - bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; - VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + int fd = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDriverProperties; + using Type = ImportMemoryFdInfoKHR; }; - using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties; - struct PhysicalDeviceDrmPropertiesEXT + struct ImportMemoryHostPointerInfoEXT { - using NativeType = VkPhysicalDeviceDrmPropertiesEXT; + using NativeType = VkImportMemoryHostPointerInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDrmPropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 hasPrimary_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 hasRender_ = {}, - int64_t primaryMajor_ = {}, - int64_t primaryMinor_ = {}, - int64_t renderMajor_ = {}, - int64_t renderMinor_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , hasPrimary( hasPrimary_ ) - , hasRender( hasRender_ ) - , primaryMajor( primaryMajor_ ) - , primaryMinor( primaryMinor_ ) - , renderMajor( renderMajor_ ) - , renderMinor( renderMinor_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + void * pHostPointer_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + , pHostPointer{ pHostPointer_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDrmPropertiesEXT( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceDrmPropertiesEXT( *reinterpret_cast( &rhs ) ) + ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryHostPointerInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDrmPropertiesEXT & operator=( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceDrmPropertiesEXT & operator=( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceDrmPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceDrmPropertiesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT + { + pHostPointer = pHostPointer_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std:: + tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, hasPrimary, hasRender, primaryMajor, primaryMinor, renderMajor, renderMinor ); + return std::tie( sType, pNext, handleType, pHostPointer ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDrmPropertiesEXT const & ) const = default; + auto operator<=>( ImportMemoryHostPointerInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasPrimary == rhs.hasPrimary ) && ( hasRender == rhs.hasRender ) && - ( primaryMajor == rhs.primaryMajor ) && ( primaryMinor == rhs.primaryMinor ) && ( renderMajor == rhs.renderMajor ) && - ( renderMinor == rhs.renderMinor ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( pHostPointer == rhs.pHostPointer ); # endif } - bool operator!=( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDrmPropertiesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 hasPrimary = {}; - VULKAN_HPP_NAMESPACE::Bool32 hasRender = {}; - int64_t primaryMajor = {}; - int64_t primaryMinor = {}; - int64_t renderMajor = {}; - int64_t renderMinor = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + void * pHostPointer = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDrmPropertiesEXT; + using Type = ImportMemoryHostPointerInfoEXT; }; - struct PhysicalDeviceDynamicRenderingFeatures +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ImportMemoryMetalHandleInfoEXT { - using NativeType = VkPhysicalDeviceDynamicRenderingFeatures; + using NativeType = VkImportMemoryMetalHandleInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryMetalHandleInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dynamicRendering( dynamicRendering_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryMetalHandleInfoEXT( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + void * handle_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + , handle{ handle_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportMemoryMetalHandleInfoEXT( ImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDynamicRenderingFeatures( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceDynamicRenderingFeatures( *reinterpret_cast( &rhs ) ) + ImportMemoryMetalHandleInfoEXT( VkImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryMetalHandleInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDynamicRenderingFeatures & operator=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportMemoryMetalHandleInfoEXT & operator=( ImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceDynamicRenderingFeatures & operator=( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + ImportMemoryMetalHandleInfoEXT & operator=( VkImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - dynamicRendering = dynamicRendering_; + handleType = handleType_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceDynamicRenderingFeatures const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT & setHandle( void * handle_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handle = handle_; + return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceDynamicRenderingFeatures &() VULKAN_HPP_NOEXCEPT + operator VkImportMemoryMetalHandleInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkImportMemoryMetalHandleInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std:: + tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, dynamicRendering ); + return std::tie( sType, pNext, handleType, handle ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDynamicRenderingFeatures const & ) const = default; -#else - bool operator==( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryMetalHandleInfoEXT const & ) const = default; +# else + bool operator==( ImportMemoryMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRendering == rhs.dynamicRendering ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ); +# endif } - bool operator!=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportMemoryMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryMetalHandleInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + void * handle = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDynamicRenderingFeatures; + using Type = ImportMemoryMetalHandleInfoEXT; }; - using PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct PhysicalDeviceExclusiveScissorFeaturesNV +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportMemoryWin32HandleInfoKHR { - using NativeType = VkPhysicalDeviceExclusiveScissorFeaturesNV; + using NativeType = VkImportMemoryWin32HandleInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , exclusiveScissor( exclusiveScissor_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + , handle{ handle_ } + , name{ name_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceExclusiveScissorFeaturesNV( *reinterpret_cast( &rhs ) ) + ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportMemoryWin32HandleInfoKHR & operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - exclusiveScissor = exclusiveScissor_; + handleType = handleType_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handle = handle_; + return *this; } - operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + name = name_; + return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkImportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, exclusiveScissor ); + return std::tie( sType, pNext, handleType, handle, name ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const & ) const = default; -#else - bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissor == rhs.exclusiveScissor ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && ( name == rhs.name ); +# endif } - bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceExclusiveScissorFeaturesNV; + using Type = ImportMemoryWin32HandleInfoKHR; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct PhysicalDeviceExtendedDynamicState2FeaturesEXT +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportMemoryWin32HandleInfoNV { - using NativeType = VkPhysicalDeviceExtendedDynamicState2FeaturesEXT; + using NativeType = VkImportMemoryWin32HandleInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , extendedDynamicState2( extendedDynamicState2_ ) - , extendedDynamicState2LogicOp( extendedDynamicState2LogicOp_ ) - , extendedDynamicState2PatchControlPoints( extendedDynamicState2PatchControlPoints_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, + HANDLE handle_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + , handle{ handle_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceExtendedDynamicState2FeaturesEXT( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceExtendedDynamicState2FeaturesEXT( *reinterpret_cast( &rhs ) ) + ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryWin32HandleInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportMemoryWin32HandleInfoNV & operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & - setExtendedDynamicState2( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ ) VULKAN_HPP_NOEXCEPT - { - extendedDynamicState2 = extendedDynamicState2_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & - setExtendedDynamicState2LogicOp( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT { - extendedDynamicState2LogicOp = extendedDynamicState2LogicOp_; + handleType = handleType_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & - setExtendedDynamicState2PatchControlPoints( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT { - extendedDynamicState2PatchControlPoints = extendedDynamicState2PatchControlPoints_; + handle = handle_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkImportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, extendedDynamicState2, extendedDynamicState2LogicOp, extendedDynamicState2PatchControlPoints ); + return std::tie( sType, pNext, handleType, handle ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & ) const = default; -#else - bool operator==( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMemoryWin32HandleInfoNV const & ) const = default; +# else + bool operator==( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState2 == rhs.extendedDynamicState2 ) && - ( extendedDynamicState2LogicOp == rhs.extendedDynamicState2LogicOp ) && - ( extendedDynamicState2PatchControlPoints == rhs.extendedDynamicState2PatchControlPoints ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ); +# endif } - bool operator!=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2 = {}; - VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp = {}; - VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {}; + HANDLE handle = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT; + using Type = ImportMemoryWin32HandleInfoNV; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct PhysicalDeviceExtendedDynamicStateFeaturesEXT +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImportMemoryZirconHandleInfoFUCHSIA { - using NativeType = VkPhysicalDeviceExtendedDynamicStateFeaturesEXT; + using NativeType = VkImportMemoryZirconHandleInfoFUCHSIA; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , extendedDynamicState( extendedDynamicState_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + zx_handle_t handle_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + , handle{ handle_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceExtendedDynamicStateFeaturesEXT( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceExtendedDynamicStateFeaturesEXT( *reinterpret_cast( &rhs ) ) + ImportMemoryZirconHandleInfoFUCHSIA( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMemoryZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportMemoryZirconHandleInfoFUCHSIA & operator=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImportMemoryZirconHandleInfoFUCHSIA & operator=( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & - setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - extendedDynamicState = extendedDynamicState_; + handleType = handleType_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setHandle( zx_handle_t handle_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handle = handle_; + return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkImportMemoryZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkImportMemoryZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, extendedDynamicState ); + return std::tie( sType, pNext, handleType, handle ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & ) const = default; -#else - bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState == rhs.extendedDynamicState ); + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } # endif + + bool operator==( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && + ( memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ) == 0 ); } - bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + zx_handle_t handle = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT; + using Type = ImportMemoryZirconHandleInfoFUCHSIA; }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct PhysicalDeviceExternalBufferInfo +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ImportMetalBufferInfoEXT { - using NativeType = VkPhysicalDeviceExternalBufferInfo; + using NativeType = VkImportMetalBufferInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalBufferInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( - VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , usage( usage_ ) - , handleType( handleType_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT( MTLBuffer_id mtlBuffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mtlBuffer{ mtlBuffer_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceExternalBufferInfo( *reinterpret_cast( &rhs ) ) + ImportMetalBufferInfoEXT( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMetalBufferInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportMetalBufferInfoEXT & operator=( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ImportMetalBufferInfoEXT & operator=( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT - { - usage = usage_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & - setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) VULKAN_HPP_NOEXCEPT { - handleType = handleType_; + mtlBuffer = mtlBuffer_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkImportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT + operator VkImportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, usage, handleType ); + return std::tie( sType, pNext, mtlBuffer ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceExternalBufferInfo const & ) const = default; -#else - bool operator==( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMetalBufferInfoEXT const & ) const = default; +# else + bool operator==( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && ( handleType == rhs.handleType ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlBuffer == rhs.mtlBuffer ); +# endif } - bool operator!=( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalBufferInfoEXT; + const void * pNext = {}; + MTLBuffer_id mtlBuffer = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceExternalBufferInfo; + using Type = ImportMetalBufferInfoEXT; }; - using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct PhysicalDeviceExternalFenceInfo +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ImportMetalIOSurfaceInfoEXT { - using NativeType = VkPhysicalDeviceExternalFenceInfo; + using NativeType = VkImportMetalIOSurfaceInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalIoSurfaceInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleType( handleType_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT( IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , ioSurface{ ioSurface_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceExternalFenceInfo( *reinterpret_cast( &rhs ) ) + ImportMetalIOSurfaceInfoEXT( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMetalIOSurfaceInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportMetalIOSurfaceInfoEXT & operator=( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ImportMetalIOSurfaceInfoEXT & operator=( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & - setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) VULKAN_HPP_NOEXCEPT { - handleType = handleType_; + ioSurface = ioSurface_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkImportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT + operator VkImportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, handleType ); + return std::tie( sType, pNext, ioSurface ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceExternalFenceInfo const & ) const = default; -#else - bool operator==( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMetalIOSurfaceInfoEXT const & ) const = default; +# else + bool operator==( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ioSurface == rhs.ioSurface ); +# endif } - bool operator!=( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalIoSurfaceInfoEXT; + const void * pNext = {}; + IOSurfaceRef ioSurface = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceExternalFenceInfo; + using Type = ImportMetalIOSurfaceInfoEXT; }; - using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct PhysicalDeviceExternalImageFormatInfo +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ImportMetalSharedEventInfoEXT { - using NativeType = VkPhysicalDeviceExternalImageFormatInfo; + using NativeType = VkImportMetalSharedEventInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalImageFormatInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalSharedEventInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleType( handleType_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT( MTLSharedEvent_id mtlSharedEvent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mtlSharedEvent{ mtlSharedEvent_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceExternalImageFormatInfo( *reinterpret_cast( &rhs ) ) + ImportMetalSharedEventInfoEXT( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMetalSharedEventInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportMetalSharedEventInfoEXT & operator=( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceExternalImageFormatInfo & operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ImportMetalSharedEventInfoEXT & operator=( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & - setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) VULKAN_HPP_NOEXCEPT { - handleType = handleType_; + mtlSharedEvent = mtlSharedEvent_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkImportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT + operator VkImportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, handleType ); + return std::tie( sType, pNext, mtlSharedEvent ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceExternalImageFormatInfo const & ) const = default; -#else - bool operator==( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMetalSharedEventInfoEXT const & ) const = default; +# else + bool operator==( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlSharedEvent == rhs.mtlSharedEvent ); +# endif } - bool operator!=( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalSharedEventInfoEXT; + const void * pNext = {}; + MTLSharedEvent_id mtlSharedEvent = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceExternalImageFormatInfo; + using Type = ImportMetalSharedEventInfoEXT; }; - using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct PhysicalDeviceExternalMemoryHostPropertiesEXT +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct ImportMetalTextureInfoEXT { - using NativeType = VkPhysicalDeviceExternalMemoryHostPropertiesEXT; + using NativeType = VkImportMetalTextureInfoEXT; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalTextureInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minImportedHostPointerAlignment( minImportedHostPointerAlignment_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, + MTLTexture_id mtlTexture_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , plane{ plane_ } + , mtlTexture{ mtlTexture_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceExternalMemoryHostPropertiesEXT( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceExternalMemoryHostPropertiesEXT( *reinterpret_cast( &rhs ) ) + ImportMetalTextureInfoEXT( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportMetalTextureInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportMetalTextureInfoEXT & operator=( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImportMetalTextureInfoEXT & operator=( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPlane( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + plane = plane_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) VULKAN_HPP_NOEXCEPT + { + mtlTexture = mtlTexture_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, minImportedHostPointerAlignment ); + return std::tie( sType, pNext, plane, mtlTexture ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const & ) const = default; -#else - bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportMetalTextureInfoEXT const & ) const = default; +# else + bool operator==( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( plane == rhs.plane ) && ( mtlTexture == rhs.mtlTexture ); +# endif } - bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMetalTextureInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + MTLTexture_id mtlTexture = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT; + using Type = ImportMetalTextureInfoEXT; }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - struct PhysicalDeviceExternalMemoryRDMAFeaturesNV +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + struct ImportScreenBufferInfoQNX { - using NativeType = VkPhysicalDeviceExternalMemoryRDMAFeaturesNV; + using NativeType = VkImportScreenBufferInfoQNX; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportScreenBufferInfoQNX; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , externalMemoryRDMA( externalMemoryRDMA_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportScreenBufferInfoQNX( struct _screen_buffer * buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , buffer{ buffer_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportScreenBufferInfoQNX( ImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceExternalMemoryRDMAFeaturesNV( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceExternalMemoryRDMAFeaturesNV( *reinterpret_cast( &rhs ) ) + ImportScreenBufferInfoQNX( VkImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportScreenBufferInfoQNX( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportScreenBufferInfoQNX & operator=( ImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + ImportScreenBufferInfoQNX & operator=( VkImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportScreenBufferInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & - setExternalMemoryRDMA( VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportScreenBufferInfoQNX & setBuffer( struct _screen_buffer * buffer_ ) VULKAN_HPP_NOEXCEPT { - externalMemoryRDMA = externalMemoryRDMA_; + buffer = buffer_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + operator VkImportScreenBufferInfoQNX const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV &() VULKAN_HPP_NOEXCEPT + operator VkImportScreenBufferInfoQNX &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, externalMemoryRDMA ); + return std::tie( sType, pNext, buffer ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & ) const = default; -#else - bool operator==( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportScreenBufferInfoQNX const & ) const = default; +# else + bool operator==( ImportScreenBufferInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryRDMA == rhs.externalMemoryRDMA ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); +# endif } - bool operator!=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportScreenBufferInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportScreenBufferInfoQNX; + const void * pNext = {}; + struct _screen_buffer * buffer = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceExternalMemoryRDMAFeaturesNV; + using Type = ImportScreenBufferInfoQNX; }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - struct PhysicalDeviceExternalSemaphoreInfo + struct ImportSemaphoreFdInfoKHR { - using NativeType = VkPhysicalDeviceExternalSemaphoreInfo; + using NativeType = VkImportSemaphoreFdInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + int fd_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleType( handleType_ ) + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , flags{ flags_ } + , handleType{ handleType_ } + , fd{ fd_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceExternalSemaphoreInfo( *reinterpret_cast( &rhs ) ) + ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreFdInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceExternalSemaphoreInfo & operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & - setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + fd = fd_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT + operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, handleType ); + return std::tie( sType, pNext, semaphore, flags, handleType, fd ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const & ) const = default; + auto operator<=>( ImportSemaphoreFdInfoKHR const & ) const = default; #else - bool operator==( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( fd == rhs.fd ); # endif } - bool operator!=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR; const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + int fd = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceExternalSemaphoreInfo; + using Type = ImportSemaphoreFdInfoKHR; }; - using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; - struct PhysicalDeviceFeatures2 +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct ImportSemaphoreWin32HandleInfoKHR { - using NativeType = VkPhysicalDeviceFeatures2; + using NativeType = VkImportSemaphoreWin32HandleInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreWin32HandleInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , features( features_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , flags{ flags_ } + , handleType{ handleType_ } + , handle{ handle_ } + , name{ name_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceFeatures2( *reinterpret_cast( &rhs ) ) + ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportSemaphoreWin32HandleInfoKHR & operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT { - features = features_; + semaphore = semaphore_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; } - operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handleType = handleType_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, features ); + return std::tie( sType, pNext, semaphore, flags, handleType, handle, name ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceFeatures2 const & ) const = default; -#else - bool operator==( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( features == rhs.features ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( handle == rhs.handle ) && ( name == rhs.name ); +# endif } - bool operator!=( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceFeatures2; + using Type = ImportSemaphoreWin32HandleInfoKHR; }; - using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct PhysicalDeviceFloatControlsProperties +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct ImportSemaphoreZirconHandleInfoFUCHSIA { - using NativeType = VkPhysicalDeviceFloatControlsProperties; + using NativeType = VkImportSemaphoreZirconHandleInfoFUCHSIA; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFloatControlsProperties; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , denormBehaviorIndependence( denormBehaviorIndependence_ ) - , roundingModeIndependence( roundingModeIndependence_ ) - , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) - , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) - , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) - , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) - , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) - , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) - , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) - , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) - , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) - , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) - , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) - , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) - , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) - , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) - , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + zx_handle_t zirconHandle_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , flags{ flags_ } + , handleType{ handleType_ } + , zirconHandle{ zirconHandle_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceFloatControlsProperties( *reinterpret_cast( &rhs ) ) + ImportSemaphoreZirconHandleInfoFUCHSIA( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImportSemaphoreZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceFloatControlsProperties & operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceFloatControlsProperties & operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + semaphore = semaphore_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setZirconHandle( zx_handle_t zirconHandle_ ) VULKAN_HPP_NOEXCEPT + { + zirconHandle = zirconHandle_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkImportSemaphoreZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + const void * const &, + VULKAN_HPP_NAMESPACE::Semaphore const &, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags const &, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &, + zx_handle_t const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - denormBehaviorIndependence, - roundingModeIndependence, - shaderSignedZeroInfNanPreserveFloat16, - shaderSignedZeroInfNanPreserveFloat32, - shaderSignedZeroInfNanPreserveFloat64, - shaderDenormPreserveFloat16, - shaderDenormPreserveFloat32, - shaderDenormPreserveFloat64, - shaderDenormFlushToZeroFloat16, - shaderDenormFlushToZeroFloat32, - shaderDenormFlushToZeroFloat64, - shaderRoundingModeRTEFloat16, - shaderRoundingModeRTEFloat32, - shaderRoundingModeRTEFloat64, - shaderRoundingModeRTZFloat16, - shaderRoundingModeRTZFloat32, - shaderRoundingModeRTZFloat64 ); + return std::tie( sType, pNext, semaphore, flags, handleType, zirconHandle ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceFloatControlsProperties const & ) const = default; -#else - bool operator==( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && - ( roundingModeIndependence == rhs.roundingModeIndependence ) && - ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && - ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && - ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) && - ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) && - ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) && - ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) && - ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) && - ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) && - ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) && - ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ); + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = semaphore <=> rhs.semaphore; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } # endif + + bool operator==( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && + ( memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ) == 0 ); } - bool operator!=( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + zx_handle_t zirconHandle = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceFloatControlsProperties; + using Type = ImportSemaphoreZirconHandleInfoFUCHSIA; }; - using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct PhysicalDeviceFragmentDensityMap2FeaturesEXT + struct IndirectCommandsExecutionSetTokenEXT { - using NativeType = VkPhysicalDeviceFragmentDensityMap2FeaturesEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; + using NativeType = VkIndirectCommandsExecutionSetTokenEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentDensityMapDeferred( fragmentDensityMapDeferred_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsExecutionSetTokenEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines, + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ = {} ) VULKAN_HPP_NOEXCEPT + : type{ type_ } + , shaderStages{ shaderStages_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR IndirectCommandsExecutionSetTokenEXT( IndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceFragmentDensityMap2FeaturesEXT( *reinterpret_cast( &rhs ) ) + IndirectCommandsExecutionSetTokenEXT( VkIndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsExecutionSetTokenEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + IndirectCommandsExecutionSetTokenEXT & operator=( IndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + IndirectCommandsExecutionSetTokenEXT & operator=( VkIndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsExecutionSetTokenEXT & setType( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + type = type_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & - setFragmentDensityMapDeferred( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsExecutionSetTokenEXT & setShaderStages( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ ) VULKAN_HPP_NOEXCEPT { - fragmentDensityMapDeferred = fragmentDensityMapDeferred_; + shaderStages = shaderStages_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsExecutionSetTokenEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsExecutionSetTokenEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, fragmentDensityMapDeferred ); + return std::tie( type, shaderStages ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & ) const = default; + auto operator<=>( IndirectCommandsExecutionSetTokenEXT const & ) const = default; #else - bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectCommandsExecutionSetTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred ); + return ( type == rhs.type ) && ( shaderStages == rhs.shaderStages ); # endif } - bool operator!=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectCommandsExecutionSetTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred = {}; + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines; + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages = {}; }; - template <> - struct CppType + struct IndirectCommandsIndexBufferTokenEXT { - using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT; - }; + using NativeType = VkIndirectCommandsIndexBufferTokenEXT; - struct PhysicalDeviceFragmentDensityMap2PropertiesEXT - { - using NativeType = VkPhysicalDeviceFragmentDensityMap2PropertiesEXT; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + IndirectCommandsIndexBufferTokenEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT mode_ = + VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer ) VULKAN_HPP_NOEXCEPT + : mode{ mode_ } + { + } - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; + VULKAN_HPP_CONSTEXPR IndirectCommandsIndexBufferTokenEXT( IndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {}, - uint32_t maxSubsampledArrayLayers_ = {}, - uint32_t maxDescriptorSetSubsampledSamplers_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , subsampledLoads( subsampledLoads_ ) - , subsampledCoarseReconstructionEarlyAccess( subsampledCoarseReconstructionEarlyAccess_ ) - , maxSubsampledArrayLayers( maxSubsampledArrayLayers_ ) - , maxDescriptorSetSubsampledSamplers( maxDescriptorSetSubsampledSamplers_ ) + IndirectCommandsIndexBufferTokenEXT( VkIndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsIndexBufferTokenEXT( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceFragmentDensityMap2PropertiesEXT( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + IndirectCommandsIndexBufferTokenEXT & operator=( IndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceFragmentDensityMap2PropertiesEXT( *reinterpret_cast( &rhs ) ) + IndirectCommandsIndexBufferTokenEXT & operator=( VkIndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsIndexBufferTokenEXT & + setMode( VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT mode_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + mode = mode_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsIndexBufferTokenEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsIndexBufferTokenEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, subsampledLoads, subsampledCoarseReconstructionEarlyAccess, maxSubsampledArrayLayers, maxDescriptorSetSubsampledSamplers ); + return std::tie( mode ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & ) const = default; + auto operator<=>( IndirectCommandsIndexBufferTokenEXT const & ) const = default; #else - bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectCommandsIndexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subsampledLoads == rhs.subsampledLoads ) && - ( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess ) && - ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers ) && ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers ); + return ( mode == rhs.mode ); # endif } - bool operator!=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectCommandsIndexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads = {}; - VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess = {}; - uint32_t maxSubsampledArrayLayers = {}; - uint32_t maxDescriptorSetSubsampledSamplers = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT mode = VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer; }; - template <> - struct CppType - { - using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT; - }; - - struct PhysicalDeviceFragmentDensityMapFeaturesEXT + struct PushConstantRange { - using NativeType = VkPhysicalDeviceFragmentDensityMapFeaturesEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; + using NativeType = VkPushConstantRange; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentDensityMap( fragmentDensityMap_ ) - , fragmentDensityMapDynamic( fragmentDensityMapDynamic_ ) - , fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT + : stageFlags{ stageFlags_ } + , offset{ offset_ } + , size{ size_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceFragmentDensityMapFeaturesEXT( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT : PushConstantRange( *reinterpret_cast( &rhs ) ) {} - PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & - setFragmentDensityMap( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT { - fragmentDensityMap = fragmentDensityMap_; + stageFlags = stageFlags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & - setFragmentDensityMapDynamic( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT { - fragmentDensityMapDynamic = fragmentDensityMapDynamic_; + offset = offset_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & - setFragmentDensityMapNonSubsampledImages( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT { - fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_; + size = size_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, fragmentDensityMap, fragmentDensityMapDynamic, fragmentDensityMapNonSubsampledImages ); + return std::tie( stageFlags, offset, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const & ) const = default; + auto operator<=>( PushConstantRange const & ) const = default; #else - bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMap == rhs.fragmentDensityMap ) && - ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic ) && - ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages ); + return ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && ( size == rhs.size ); # endif } - bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + uint32_t offset = {}; + uint32_t size = {}; }; - struct PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM + struct IndirectCommandsPushConstantTokenEXT { - using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + using NativeType = VkIndirectCommandsPushConstantTokenEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentDensityMapOffset( fragmentDensityMapOffset_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsPushConstantTokenEXT( VULKAN_HPP_NAMESPACE::PushConstantRange updateRange_ = {} ) VULKAN_HPP_NOEXCEPT + : updateRange{ updateRange_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR IndirectCommandsPushConstantTokenEXT( IndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( *reinterpret_cast( &rhs ) ) + IndirectCommandsPushConstantTokenEXT( VkIndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsPushConstantTokenEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & - operator=( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & operator=( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + IndirectCommandsPushConstantTokenEXT & operator=( IndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + IndirectCommandsPushConstantTokenEXT & operator=( VkIndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & - setFragmentDensityMapOffset( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsPushConstantTokenEXT & + setUpdateRange( VULKAN_HPP_NAMESPACE::PushConstantRange const & updateRange_ ) VULKAN_HPP_NOEXCEPT { - fragmentDensityMapOffset = fragmentDensityMapOffset_; + updateRange = updateRange_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsPushConstantTokenEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsPushConstantTokenEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, fragmentDensityMapOffset ); + return std::tie( updateRange ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & ) const = default; + auto operator<=>( IndirectCommandsPushConstantTokenEXT const & ) const = default; #else - bool operator==( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectCommandsPushConstantTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapOffset == rhs.fragmentDensityMapOffset ); + return ( updateRange == rhs.updateRange ); # endif } - bool operator!=( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectCommandsPushConstantTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset = {}; + VULKAN_HPP_NAMESPACE::PushConstantRange updateRange = {}; }; - template <> - struct CppType + struct IndirectCommandsVertexBufferTokenEXT { - using Type = PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; - }; + using NativeType = VkIndirectCommandsVertexBufferTokenEXT; - struct PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM - { - using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsVertexBufferTokenEXT( uint32_t vertexBindingUnit_ = {} ) VULKAN_HPP_NOEXCEPT : vertexBindingUnit{ vertexBindingUnit_ } + { + } - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + VULKAN_HPP_CONSTEXPR IndirectCommandsVertexBufferTokenEXT( IndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentDensityOffsetGranularity( fragmentDensityOffsetGranularity_ ) + IndirectCommandsVertexBufferTokenEXT( VkIndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsVertexBufferTokenEXT( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + IndirectCommandsVertexBufferTokenEXT & operator=( IndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( *reinterpret_cast( &rhs ) ) + IndirectCommandsVertexBufferTokenEXT & operator=( VkIndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM & - operator=( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM & operator=( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsVertexBufferTokenEXT & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + vertexBindingUnit = vertexBindingUnit_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsVertexBufferTokenEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM &() VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsVertexBufferTokenEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, fragmentDensityOffsetGranularity ); + return std::tie( vertexBindingUnit ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & ) const = default; + auto operator<=>( IndirectCommandsVertexBufferTokenEXT const & ) const = default; #else - bool operator==( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectCommandsVertexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityOffsetGranularity == rhs.fragmentDensityOffsetGranularity ); + return ( vertexBindingUnit == rhs.vertexBindingUnit ); # endif } - bool operator!=( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectCommandsVertexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + uint32_t vertexBindingUnit = {}; }; - struct PhysicalDeviceFragmentDensityMapPropertiesEXT + union IndirectCommandsTokenDataEXT { - using NativeType = VkPhysicalDeviceFragmentDensityMapPropertiesEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; + using NativeType = VkIndirectCommandsTokenDataEXT; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minFragmentDensityTexelSize( minFragmentDensityTexelSize_ ) - , maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ ) - , fragmentDensityInvocations( fragmentDensityInvocations_ ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT * pPushConstant_ = {} ) + : pPushConstant( pPushConstant_ ) { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceFragmentDensityMapPropertiesEXT( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT * pVertexBuffer_ ) + : pVertexBuffer( pVertexBuffer_ ) + { + } - PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceFragmentDensityMapPropertiesEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT * pIndexBuffer_ ) + : pIndexBuffer( pIndexBuffer_ ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT * pExecutionSet_ ) + : pExecutionSet( pExecutionSet_ ) + { + } +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & + setPPushConstant( const VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT * pPushConstant_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + pPushConstant = pPushConstant_; return *this; } - operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & + setPVertexBuffer( const VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT * pVertexBuffer_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pVertexBuffer = pVertexBuffer_; + return *this; } - operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & + setPIndexBuffer( const VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT * pIndexBuffer_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pIndexBuffer = pIndexBuffer_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & + setPExecutionSet( const VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT * pExecutionSet_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, minFragmentDensityTexelSize, maxFragmentDensityTexelSize, fragmentDensityInvocations ); + pExecutionSet = pExecutionSet_; + return *this; } -#endif +#endif /*VULKAN_HPP_NO_SETTERS*/ -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const & ) const = default; -#else - bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsTokenDataEXT const &() const { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize ) && - ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) && ( fragmentDensityInvocations == rhs.fragmentDensityInvocations ); -# endif + return *reinterpret_cast( this ); } - bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsTokenDataEXT &() { - return !operator==( rhs ); + return *reinterpret_cast( this ); } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {}; - }; - template <> - struct CppType - { - using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT; +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + const VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT * pPushConstant; + const VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT * pVertexBuffer; + const VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT * pIndexBuffer; + const VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT * pExecutionSet; +#else + const VkIndirectCommandsPushConstantTokenEXT * pPushConstant; + const VkIndirectCommandsVertexBufferTokenEXT * pVertexBuffer; + const VkIndirectCommandsIndexBufferTokenEXT * pIndexBuffer; + const VkIndirectCommandsExecutionSetTokenEXT * pExecutionSet; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; - struct PhysicalDeviceFragmentShaderBarycentricFeaturesKHR + struct IndirectCommandsLayoutTokenEXT { - using NativeType = VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + using NativeType = VkIndirectCommandsLayoutTokenEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentShaderBarycentric( fragmentShaderBarycentric_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT( + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT type_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT::eExecutionSet, + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT data_ = {}, + uint32_t offset_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , data{ data_ } + , offset{ offset_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT( IndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( *reinterpret_cast( &rhs ) ) + IndirectCommandsLayoutTokenEXT( VkIndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsLayoutTokenEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & - operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + IndirectCommandsLayoutTokenEXT & operator=( IndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenEXT & operator=( VkIndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & - setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT type_ ) VULKAN_HPP_NOEXCEPT { - fragmentShaderBarycentric = fragmentShaderBarycentric_; + type = type_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setData( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT const & data_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + data = data_; + return *this; } - operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + offset = offset_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsLayoutTokenEXT const &() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, fragmentShaderBarycentric ); + return *reinterpret_cast( this ); } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & ) const = default; -#else - bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsLayoutTokenEXT &() VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric ); -# endif + return *reinterpret_cast( this ); } - bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + return std::tie( sType, pNext, type, data, offset ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT type = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT::eExecutionSet; + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT data = {}; + uint32_t offset = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + using Type = IndirectCommandsLayoutTokenEXT; }; - using PhysicalDeviceFragmentShaderBarycentricFeaturesNV = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; - struct PhysicalDeviceFragmentShaderBarycentricPropertiesKHR + struct IndirectCommandsLayoutCreateInfoEXT { - using NativeType = VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + using NativeType = VkIndirectCommandsLayoutCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 triStripVertexOrderIndependentOfProvokingVertex_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , triStripVertexOrderIndependentOfProvokingVertex( triStripVertexOrderIndependentOfProvokingVertex_ ) - { - } - - VULKAN_HPP_CONSTEXPR - PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoEXT; - PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ = {}, + uint32_t indirectStride_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + uint32_t tokenCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT * pTokens_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , shaderStages{ shaderStages_ } + , indirectStride{ indirectStride_ } + , pipelineLayout{ pipelineLayout_ } + , tokenCount{ tokenCount_ } + , pTokens{ pTokens_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceFragmentShaderBarycentricPropertiesKHR & - operator=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceFragmentShaderBarycentricPropertiesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoEXT( IndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoEXT( VkIndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsLayoutCreateInfoEXT( *reinterpret_cast( &rhs ) ) { - return *reinterpret_cast( this ); } - operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoEXT( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_, + uint32_t indirectStride_, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tokens_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , shaderStages( shaderStages_ ) + , indirectStride( indirectStride_ ) + , pipelineLayout( pipelineLayout_ ) + , tokenCount( static_cast( tokens_.size() ) ) + , pTokens( tokens_.data() ) { - return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, triStripVertexOrderIndependentOfProvokingVertex ); - } -#endif + IndirectCommandsLayoutCreateInfoEXT & operator=( IndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & ) const = default; -#else - bool operator==( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoEXT & operator=( VkIndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && - ( triStripVertexOrderIndependentOfProvokingVertex == rhs.triStripVertexOrderIndependentOfProvokingVertex ); -# endif + *this = *reinterpret_cast( &rhs ); + return *this; } - bool operator!=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + pNext = pNext_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 triStripVertexOrderIndependentOfProvokingVertex = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceFragmentShaderBarycentricPropertiesKHR; - }; - - struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT - { - using NativeType = VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ ) - , fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ ) - , fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { + flags = flags_; + return *this; } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceFragmentShaderInterlockFeaturesEXT( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceFragmentShaderInterlockFeaturesEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setShaderStages( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ ) VULKAN_HPP_NOEXCEPT { + shaderStages = shaderStages_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setIndirectStride( uint32_t indirectStride_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + indirectStride = indirectStride_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + pipelineLayout = pipelineLayout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & - setFragmentShaderSampleInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT { - fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_; + tokenCount = tokenCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & - setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & + setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT * pTokens_ ) VULKAN_HPP_NOEXCEPT { - fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_; + pTokens = pTokens_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & - setFragmentShaderShadingRateInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoEXT & + setTokens( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tokens_ ) VULKAN_HPP_NOEXCEPT { - fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_; + tokenCount = static_cast( tokens_.size() ); + pTokens = tokens_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsLayoutCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsLayoutCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -51908,349 +54366,494 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + const void * const &, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT const &, + VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::PipelineLayout const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, fragmentShaderSampleInterlock, fragmentShaderPixelInterlock, fragmentShaderShadingRateInterlock ); + return std::tie( sType, pNext, flags, shaderStages, indirectStride, pipelineLayout, tokenCount, pTokens ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & ) const = default; + auto operator<=>( IndirectCommandsLayoutCreateInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectCommandsLayoutCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) && - ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock ) && - ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( shaderStages == rhs.shaderStages ) && + ( indirectStride == rhs.indirectStride ) && ( pipelineLayout == rhs.pipelineLayout ) && ( tokenCount == rhs.tokenCount ) && + ( pTokens == rhs.pTokens ); # endif } - bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectCommandsLayoutCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages = {}; + uint32_t indirectStride = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; + uint32_t tokenCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT * pTokens = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT; + using Type = IndirectCommandsLayoutCreateInfoEXT; }; - struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV + struct IndirectCommandsLayoutTokenNV { - using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + using NativeType = VkIndirectCommandsLayoutTokenNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentShadingRateEnums( fragmentShadingRateEnums_ ) - , supersampleFragmentShadingRates( supersampleFragmentShadingRates_ ) - , noInvocationFragmentShadingRates( noInvocationFragmentShadingRates_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, + uint32_t stream_ = {}, + uint32_t offset_ = {}, + uint32_t vertexBindingUnit_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, + uint32_t pushconstantOffset_ = {}, + uint32_t pushconstantSize_ = {}, + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, + uint32_t indexTypeCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ = {}, + const uint32_t * pIndexTypeValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , tokenType{ tokenType_ } + , stream{ stream_ } + , offset{ offset_ } + , vertexBindingUnit{ vertexBindingUnit_ } + , vertexDynamicStride{ vertexDynamicStride_ } + , pushconstantPipelineLayout{ pushconstantPipelineLayout_ } + , pushconstantShaderStageFlags{ pushconstantShaderStageFlags_ } + , pushconstantOffset{ pushconstantOffset_ } + , pushconstantSize{ pushconstantSize_ } + , indirectStateFlags{ indirectStateFlags_ } + , indexTypeCount{ indexTypeCount_ } + , pIndexTypes{ pIndexTypes_ } + , pIndexTypeValues{ pIndexTypeValues_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( *reinterpret_cast( &rhs ) ) + IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsLayoutTokenNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_, + uint32_t stream_, + uint32_t offset_, + uint32_t vertexBindingUnit_, + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_, + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_, + uint32_t pushconstantOffset_, + uint32_t pushconstantSize_, + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypes_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypeValues_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , tokenType( tokenType_ ) + , stream( stream_ ) + , offset( offset_ ) + , vertexBindingUnit( vertexBindingUnit_ ) + , vertexDynamicStride( vertexDynamicStride_ ) + , pushconstantPipelineLayout( pushconstantPipelineLayout_ ) + , pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ) + , pushconstantOffset( pushconstantOffset_ ) + , pushconstantSize( pushconstantSize_ ) + , indirectStateFlags( indirectStateFlags_ ) + , indexTypeCount( static_cast( indexTypes_.size() ) ) + , pIndexTypes( indexTypes_.data() ) + , pIndexTypeValues( indexTypeValues_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( indexTypes_.size() == indexTypeValues_.size() ); +# else + if ( indexTypes_.size() != indexTypeValues_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutTokenNV & operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & - setFragmentShadingRateEnums( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT { - fragmentShadingRateEnums = fragmentShadingRateEnums_; + tokenType = tokenType_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & - setSupersampleFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setStream( uint32_t stream_ ) VULKAN_HPP_NOEXCEPT { - supersampleFragmentShadingRates = supersampleFragmentShadingRates_; + stream = stream_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & - setNoInvocationFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT { - noInvocationFragmentShadingRates = noInvocationFragmentShadingRates_; + offset = offset_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + vertexBindingUnit = vertexBindingUnit_; + return *this; } - operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + vertexDynamicStride = vertexDynamicStride_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setPushconstantPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, fragmentShadingRateEnums, supersampleFragmentShadingRates, noInvocationFragmentShadingRates ); + pushconstantPipelineLayout = pushconstantPipelineLayout_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & ) const = default; -#else - bool operator==( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setPushconstantShaderStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShadingRateEnums == rhs.fragmentShadingRateEnums ) && - ( supersampleFragmentShadingRates == rhs.supersampleFragmentShadingRates ) && - ( noInvocationFragmentShadingRates == rhs.noInvocationFragmentShadingRates ); -# endif + pushconstantShaderStageFlags = pushconstantShaderStageFlags_; + return *this; } - bool operator!=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + pushconstantOffset = pushconstantOffset_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums = {}; - VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates = {}; - VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV; - }; - - struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV - { - using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( - VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxFragmentShadingRateInvocationCount( maxFragmentShadingRateInvocationCount_ ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT { + pushconstantSize = pushconstantSize_; + return *this; } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & + setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT + { + indirectStateFlags = indirectStateFlags_; + return *this; + } - PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT { + indexTypeCount = indexTypeCount_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & - operator=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ ) VULKAN_HPP_NOEXCEPT + { + pIndexTypes = pIndexTypes_; + return *this; + } - PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV & + setIndexTypes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypes_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + indexTypeCount = static_cast( indexTypes_.size() ); + pIndexTypes = indexTypes_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t * pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + pIndexTypeValues = pIndexTypeValues_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & - setMaxFragmentShadingRateInvocationCount( VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutTokenNV & + setIndexTypeValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT { - maxFragmentShadingRateInvocationCount = maxFragmentShadingRateInvocationCount_; + indexTypeCount = static_cast( indexTypeValues_.size() ); + pIndexTypeValues = indexTypeValues_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsLayoutTokenNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV &() VULKAN_HPP_NOEXCEPT + operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxFragmentShadingRateInvocationCount ); + return std::tie( sType, + pNext, + tokenType, + stream, + offset, + vertexBindingUnit, + vertexDynamicStride, + pushconstantPipelineLayout, + pushconstantShaderStageFlags, + pushconstantOffset, + pushconstantSize, + indirectStateFlags, + indexTypeCount, + pIndexTypes, + pIndexTypeValues ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & ) const = default; + auto operator<=>( IndirectCommandsLayoutTokenNV const & ) const = default; #else - bool operator==( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxFragmentShadingRateInvocationCount == rhs.maxFragmentShadingRateInvocationCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tokenType == rhs.tokenType ) && ( stream == rhs.stream ) && ( offset == rhs.offset ) && + ( vertexBindingUnit == rhs.vertexBindingUnit ) && ( vertexDynamicStride == rhs.vertexDynamicStride ) && + ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout ) && ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags ) && + ( pushconstantOffset == rhs.pushconstantOffset ) && ( pushconstantSize == rhs.pushconstantSize ) && + ( indirectStateFlags == rhs.indirectStateFlags ) && ( indexTypeCount == rhs.indexTypeCount ) && ( pIndexTypes == rhs.pIndexTypes ) && + ( pIndexTypeValues == rhs.pIndexTypeValues ); # endif } - bool operator!=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup; + uint32_t stream = {}; + uint32_t offset = {}; + uint32_t vertexBindingUnit = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {}; + uint32_t pushconstantOffset = {}; + uint32_t pushconstantSize = {}; + VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {}; + uint32_t indexTypeCount = {}; + const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes = {}; + const uint32_t * pIndexTypeValues = {}; }; - struct PhysicalDeviceFragmentShadingRateFeaturesKHR + template <> + struct CppType { - using NativeType = VkPhysicalDeviceFragmentShadingRateFeaturesKHR; + using Type = IndirectCommandsLayoutTokenNV; + }; + + struct IndirectCommandsLayoutCreateInfoNV + { + using NativeType = VkIndirectCommandsLayoutCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelineFragmentShadingRate( pipelineFragmentShadingRate_ ) - , primitiveFragmentShadingRate( primitiveFragmentShadingRate_ ) - , attachmentFragmentShadingRate( attachmentFragmentShadingRate_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + IndirectCommandsLayoutCreateInfoNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t tokenCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ = {}, + uint32_t streamCount_ = {}, + const uint32_t * pStreamStrides_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , tokenCount{ tokenCount_ } + , pTokens{ pTokens_ } + , streamCount{ streamCount_ } + , pStreamStrides{ pStreamStrides_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceFragmentShadingRateFeaturesKHR( *reinterpret_cast( &rhs ) ) + IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsLayoutCreateInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tokens_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streamStrides_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , tokenCount( static_cast( tokens_.size() ) ) + , pTokens( tokens_.data() ) + , streamCount( static_cast( streamStrides_.size() ) ) + , pStreamStrides( streamStrides_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + IndirectCommandsLayoutCreateInfoNV & operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectCommandsLayoutCreateInfoNV & operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & - setPipelineFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { - pipelineFragmentShadingRate = pipelineFragmentShadingRate_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & - setPrimitiveFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT { - primitiveFragmentShadingRate = primitiveFragmentShadingRate_; + pipelineBindPoint = pipelineBindPoint_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & - setAttachmentFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT { - attachmentFragmentShadingRate = attachmentFragmentShadingRate_; + tokenCount = tokenCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & + setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pTokens = pTokens_; + return *this; } - operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV & + setTokens( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tokens_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + tokenCount = static_cast( tokens_.size() ); + pTokens = tokens_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = streamCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t * pStreamStrides_ ) VULKAN_HPP_NOEXCEPT + { + pStreamStrides = pStreamStrides_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoNV & + setStreamStrides( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & streamStrides_ ) VULKAN_HPP_NOEXCEPT + { + streamCount = static_cast( streamStrides_.size() ); + pStreamStrides = streamStrides_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectCommandsLayoutCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -52258,872 +54861,964 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + const void * const &, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV const &, + VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * const &, + uint32_t const &, + const uint32_t * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pipelineFragmentShadingRate, primitiveFragmentShadingRate, attachmentFragmentShadingRate ); + return std::tie( sType, pNext, flags, pipelineBindPoint, tokenCount, pTokens, streamCount, pStreamStrides ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceFragmentShadingRateFeaturesKHR const & ) const = default; + auto operator<=>( IndirectCommandsLayoutCreateInfoNV const & ) const = default; #else - bool operator==( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate ) && - ( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate ) && ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( tokenCount == rhs.tokenCount ) && ( pTokens == rhs.pTokens ) && ( streamCount == rhs.streamCount ) && ( pStreamStrides == rhs.pStreamStrides ); # endif } - bool operator!=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate = {}; - VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate = {}; - VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t tokenCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV * pTokens = {}; + uint32_t streamCount = {}; + const uint32_t * pStreamStrides = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR; + using Type = IndirectCommandsLayoutCreateInfoNV; }; - struct PhysicalDeviceFragmentShadingRateKHR + struct IndirectExecutionSetPipelineInfoEXT { - using NativeType = VkPhysicalDeviceFragmentShadingRateKHR; + using NativeType = VkIndirectExecutionSetPipelineInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetPipelineInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , sampleCounts( sampleCounts_ ) - , fragmentSize( fragmentSize_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ = {}, + uint32_t maxPipelineCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , initialPipeline{ initialPipeline_ } + , maxPipelineCount{ maxPipelineCount_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceFragmentShadingRateKHR( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceFragmentShadingRateKHR( *reinterpret_cast( &rhs ) ) + IndirectExecutionSetPipelineInfoEXT( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectExecutionSetPipelineInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceFragmentShadingRateKHR & operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + IndirectExecutionSetPipelineInfoEXT & operator=( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceFragmentShadingRateKHR & operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT + IndirectExecutionSetPipelineInfoEXT & operator=( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceFragmentShadingRateKHR const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setInitialPipeline( VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + initialPipeline = initialPipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setMaxPipelineCount( uint32_t maxPipelineCount_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineCount = maxPipelineCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectExecutionSetPipelineInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetPipelineInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std:: - tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, sampleCounts, fragmentSize ); + return std::tie( sType, pNext, initialPipeline, maxPipelineCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceFragmentShadingRateKHR const & ) const = default; + auto operator<=>( IndirectExecutionSetPipelineInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleCounts == rhs.sampleCounts ) && ( fragmentSize == rhs.fragmentSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( initialPipeline == rhs.initialPipeline ) && ( maxPipelineCount == rhs.maxPipelineCount ); # endif } - bool operator!=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; - VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetPipelineInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline initialPipeline = {}; + uint32_t maxPipelineCount = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceFragmentShadingRateKHR; + using Type = IndirectExecutionSetPipelineInfoEXT; }; - struct PhysicalDeviceFragmentShadingRatePropertiesKHR + struct IndirectExecutionSetShaderLayoutInfoEXT { - using NativeType = VkPhysicalDeviceFragmentShadingRatePropertiesKHR; + using NativeType = VkIndirectExecutionSetShaderLayoutInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( - VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize_ = {}, - uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize_ = {}, - uint32_t maxFragmentSizeAspectRatio_ = {}, - uint32_t maxFragmentShadingRateCoverageSamples_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minFragmentShadingRateAttachmentTexelSize( minFragmentShadingRateAttachmentTexelSize_ ) - , maxFragmentShadingRateAttachmentTexelSize( maxFragmentShadingRateAttachmentTexelSize_ ) - , maxFragmentShadingRateAttachmentTexelSizeAspectRatio( maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ ) - , primitiveFragmentShadingRateWithMultipleViewports( primitiveFragmentShadingRateWithMultipleViewports_ ) - , layeredShadingRateAttachments( layeredShadingRateAttachments_ ) - , fragmentShadingRateNonTrivialCombinerOps( fragmentShadingRateNonTrivialCombinerOps_ ) - , maxFragmentSize( maxFragmentSize_ ) - , maxFragmentSizeAspectRatio( maxFragmentSizeAspectRatio_ ) - , maxFragmentShadingRateCoverageSamples( maxFragmentShadingRateCoverageSamples_ ) - , maxFragmentShadingRateRasterizationSamples( maxFragmentShadingRateRasterizationSamples_ ) - , fragmentShadingRateWithShaderDepthStencilWrites( fragmentShadingRateWithShaderDepthStencilWrites_ ) - , fragmentShadingRateWithSampleMask( fragmentShadingRateWithSampleMask_ ) - , fragmentShadingRateWithShaderSampleMask( fragmentShadingRateWithShaderSampleMask_ ) - , fragmentShadingRateWithConservativeRasterization( fragmentShadingRateWithConservativeRasterization_ ) - , fragmentShadingRateWithFragmentShaderInterlock( fragmentShadingRateWithFragmentShaderInterlock_ ) - , fragmentShadingRateWithCustomSampleLocations( fragmentShadingRateWithCustomSampleLocations_ ) - , fragmentShadingRateStrictMultiplyCombiner( fragmentShadingRateStrictMultiplyCombiner_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( uint32_t setLayoutCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , setLayoutCount{ setLayoutCount_ } + , pSetLayouts{ pSetLayouts_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceFragmentShadingRatePropertiesKHR( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceFragmentShadingRatePropertiesKHR( *reinterpret_cast( &rhs ) ) + IndirectExecutionSetShaderLayoutInfoEXT( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectExecutionSetShaderLayoutInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderLayoutInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), setLayoutCount( static_cast( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + IndirectExecutionSetShaderLayoutInfoEXT & operator=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectExecutionSetShaderLayoutInfoEXT & operator=( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + setLayoutCount = setLayoutCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & + setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayouts = pSetLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderLayoutInfoEXT & + setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectExecutionSetShaderLayoutInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetShaderLayoutInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - minFragmentShadingRateAttachmentTexelSize, - maxFragmentShadingRateAttachmentTexelSize, - maxFragmentShadingRateAttachmentTexelSizeAspectRatio, - primitiveFragmentShadingRateWithMultipleViewports, - layeredShadingRateAttachments, - fragmentShadingRateNonTrivialCombinerOps, - maxFragmentSize, - maxFragmentSizeAspectRatio, - maxFragmentShadingRateCoverageSamples, - maxFragmentShadingRateRasterizationSamples, - fragmentShadingRateWithShaderDepthStencilWrites, - fragmentShadingRateWithSampleMask, - fragmentShadingRateWithShaderSampleMask, - fragmentShadingRateWithConservativeRasterization, - fragmentShadingRateWithFragmentShaderInterlock, - fragmentShadingRateWithCustomSampleLocations, - fragmentShadingRateStrictMultiplyCombiner ); + return std::tie( sType, pNext, setLayoutCount, pSetLayouts ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceFragmentShadingRatePropertiesKHR const & ) const = default; + auto operator<=>( IndirectExecutionSetShaderLayoutInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && - ( minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize ) && - ( maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize ) && - ( maxFragmentShadingRateAttachmentTexelSizeAspectRatio == rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio ) && - ( primitiveFragmentShadingRateWithMultipleViewports == rhs.primitiveFragmentShadingRateWithMultipleViewports ) && - ( layeredShadingRateAttachments == rhs.layeredShadingRateAttachments ) && - ( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps ) && ( maxFragmentSize == rhs.maxFragmentSize ) && - ( maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio ) && - ( maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples ) && - ( maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples ) && - ( fragmentShadingRateWithShaderDepthStencilWrites == rhs.fragmentShadingRateWithShaderDepthStencilWrites ) && - ( fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask ) && - ( fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask ) && - ( fragmentShadingRateWithConservativeRasterization == rhs.fragmentShadingRateWithConservativeRasterization ) && - ( fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock ) && - ( fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations ) && - ( fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( setLayoutCount == rhs.setLayoutCount ) && ( pSetLayouts == rhs.pSetLayouts ); # endif } - bool operator!=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize = {}; - uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {}; - VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports = {}; - VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize = {}; - uint32_t maxFragmentSizeAspectRatio = {}; - uint32_t maxFragmentShadingRateCoverageSamples = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations = {}; - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; + const void * pNext = {}; + uint32_t setLayoutCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR; + using Type = IndirectExecutionSetShaderLayoutInfoEXT; }; - struct PhysicalDeviceGlobalPriorityQueryFeaturesKHR + struct IndirectExecutionSetShaderInfoEXT { - using NativeType = VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR; + using NativeType = VkIndirectExecutionSetShaderInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , globalPriorityQuery( globalPriorityQuery_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( uint32_t shaderCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ = {}, + uint32_t maxShaderCount_ = {}, + uint32_t pushConstantRangeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderCount{ shaderCount_ } + , pInitialShaders{ pInitialShaders_ } + , pSetLayoutInfos{ pSetLayoutInfos_ } + , maxShaderCount{ maxShaderCount_ } + , pushConstantRangeCount{ pushConstantRangeCount_ } + , pPushConstantRanges{ pPushConstantRanges_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceGlobalPriorityQueryFeaturesKHR( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceGlobalPriorityQueryFeaturesKHR( *reinterpret_cast( &rhs ) ) + IndirectExecutionSetShaderInfoEXT( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectExecutionSetShaderInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceGlobalPriorityQueryFeaturesKHR & operator=( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceGlobalPriorityQueryFeaturesKHR & operator=( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialShaders_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayoutInfos_ = {}, + uint32_t maxShaderCount_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , shaderCount( static_cast( initialShaders_.size() ) ) + , pInitialShaders( initialShaders_.data() ) + , pSetLayoutInfos( setLayoutInfos_.data() ) + , maxShaderCount( maxShaderCount_ ) + , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) + , pPushConstantRanges( pushConstantRanges_.data() ) { - *this = *reinterpret_cast( &rhs ); - return *this; +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( setLayoutInfos_.empty() || ( initialShaders_.size() == setLayoutInfos_.size() ) ); +# else + if ( !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::IndirectExecutionSetShaderInfoEXT::IndirectExecutionSetShaderInfoEXT: !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + IndirectExecutionSetShaderInfoEXT & operator=( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesKHR & - setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT + IndirectExecutionSetShaderInfoEXT & operator=( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - globalPriorityQuery = globalPriorityQuery_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setShaderCount( uint32_t shaderCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + shaderCount = shaderCount_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & + setPInitialShaders( const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, globalPriorityQuery ); + pInitialShaders = pInitialShaders_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & ) const = default; -#else - bool operator==( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderInfoEXT & + setInitialShaders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialShaders_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriorityQuery == rhs.globalPriorityQuery ); -# endif + shaderCount = static_cast( initialShaders_.size() ); + pInitialShaders = initialShaders_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - bool operator!=( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & + setPSetLayoutInfos( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + pSetLayoutInfos = pSetLayoutInfos_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceGlobalPriorityQueryFeaturesKHR; - }; - using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeaturesKHR; - - struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT - { - using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , graphicsPipelineLibrary( graphicsPipelineLibrary_ ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderInfoEXT & setSetLayoutInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayoutInfos_ ) + VULKAN_HPP_NOEXCEPT { + shaderCount = static_cast( setLayoutInfos_.size() ); + pSetLayoutInfos = setLayoutInfos_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR - PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setMaxShaderCount( uint32_t maxShaderCount_ ) VULKAN_HPP_NOEXCEPT { + maxShaderCount = maxShaderCount_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + pushConstantRangeCount = pushConstantRangeCount_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & + setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + pPushConstantRanges = pPushConstantRanges_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & - setGraphicsPipelineLibrary( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderInfoEXT & setPushConstantRanges( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT { - graphicsPipelineLibrary = graphicsPipelineLibrary_; + pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); + pPushConstantRanges = pushConstantRanges_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkIndirectExecutionSetShaderInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkIndirectExecutionSetShaderInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, graphicsPipelineLibrary ); + return std::tie( sType, pNext, shaderCount, pInitialShaders, pSetLayoutInfos, maxShaderCount, pushConstantRangeCount, pPushConstantRanges ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & ) const = default; + auto operator<=>( IndirectExecutionSetShaderInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( graphicsPipelineLibrary == rhs.graphicsPipelineLibrary ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCount == rhs.shaderCount ) && ( pInitialShaders == rhs.pInitialShaders ) && + ( pSetLayoutInfos == rhs.pSetLayoutInfos ) && ( maxShaderCount == rhs.maxShaderCount ) && + ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && ( pPushConstantRanges == rhs.pPushConstantRanges ); # endif } - bool operator!=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderInfoEXT; + const void * pNext = {}; + uint32_t shaderCount = {}; + const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders = {}; + const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos = {}; + uint32_t maxShaderCount = {}; + uint32_t pushConstantRangeCount = {}; + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + using Type = IndirectExecutionSetShaderInfoEXT; }; - struct PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT + union IndirectExecutionSetInfoEXT { - using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + using NativeType = VkIndirectExecutionSetInfoEXT; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , graphicsPipelineLibraryFastLinking( graphicsPipelineLibraryFastLinking_ ) - , graphicsPipelineLibraryIndependentInterpolationDecoration( graphicsPipelineLibraryIndependentInterpolationDecoration_ ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ = {} ) + : pPipelineInfo( pPipelineInfo_ ) { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) + : pShaderInfo( pShaderInfo_ ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & - operator=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & + setPPipelineInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + pPipelineInfo = pPipelineInfo_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & + setPShaderInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + pShaderInfo = pShaderInfo_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & - setGraphicsPipelineLibraryFastLinking( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking_ ) VULKAN_HPP_NOEXCEPT + operator VkIndirectExecutionSetInfoEXT const &() const { - graphicsPipelineLibraryFastLinking = graphicsPipelineLibraryFastLinking_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & setGraphicsPipelineLibraryIndependentInterpolationDecoration( - VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration_ ) VULKAN_HPP_NOEXCEPT + operator VkIndirectExecutionSetInfoEXT &() { - graphicsPipelineLibraryIndependentInterpolationDecoration = graphicsPipelineLibraryIndependentInterpolationDecoration_; - return *this; + return *reinterpret_cast( this ); } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo; + const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo; +#else + const VkIndirectExecutionSetPipelineInfoEXT * pPipelineInfo; + const VkIndirectExecutionSetShaderInfoEXT * pShaderInfo; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct IndirectExecutionSetCreateInfoEXT + { + using NativeType = VkIndirectExecutionSetCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines, + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , info{ info_ } { - return *reinterpret_cast( this ); } - operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectExecutionSetCreateInfoEXT( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectExecutionSetCreateInfoEXT( *reinterpret_cast( &rhs ) ) { - return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + IndirectExecutionSetCreateInfoEXT & operator=( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + IndirectExecutionSetCreateInfoEXT & operator=( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, graphicsPipelineLibraryFastLinking, graphicsPipelineLibraryIndependentInterpolationDecoration ); + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & ) const = default; -#else - bool operator==( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( graphicsPipelineLibraryFastLinking == rhs.graphicsPipelineLibraryFastLinking ) && - ( graphicsPipelineLibraryIndependentInterpolationDecoration == rhs.graphicsPipelineLibraryIndependentInterpolationDecoration ); -# endif + pNext = pNext_; + return *this; } - bool operator!=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setType( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setInfo( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT const & info_ ) VULKAN_HPP_NOEXCEPT + { + info = info_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkIndirectExecutionSetCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, info ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking = {}; - VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines; + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + using Type = IndirectExecutionSetCreateInfoEXT; }; - struct PhysicalDeviceGroupProperties + struct InitializePerformanceApiInfoINTEL { - using NativeType = VkPhysicalDeviceGroupProperties; + using NativeType = VkInitializePerformanceApiInfoINTEL; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInitializePerformanceApiInfoINTEL; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 - PhysicalDeviceGroupProperties( uint32_t physicalDeviceCount_ = {}, - std::array const & physicalDevices_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , physicalDeviceCount( physicalDeviceCount_ ) - , physicalDevices( physicalDevices_ ) - , subsetAllocation( subsetAllocation_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void * pUserData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pUserData{ pUserData_ } { } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceGroupProperties( *reinterpret_cast( &rhs ) ) + InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : InitializePerformanceApiInfoINTEL( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + InitializePerformanceApiInfoINTEL & operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPUserData( void * pUserData_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pUserData = pUserData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkInitializePerformanceApiInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &, - VULKAN_HPP_NAMESPACE::Bool32 const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, physicalDeviceCount, physicalDevices, subsetAllocation ); + return std::tie( sType, pNext, pUserData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceGroupProperties const & ) const = default; + auto operator<=>( InitializePerformanceApiInfoINTEL const & ) const = default; #else - bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && - ( physicalDevices == rhs.physicalDevices ) && ( subsetAllocation == rhs.subsetAllocation ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pUserData == rhs.pUserData ); # endif } - bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties; - void * pNext = {}; - uint32_t physicalDeviceCount = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D physicalDevices = {}; - VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL; + const void * pNext = {}; + void * pUserData = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceGroupProperties; + using Type = InitializePerformanceApiInfoINTEL; }; - using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; - struct PhysicalDeviceHostQueryResetFeatures + struct InputAttachmentAspectReference { - using NativeType = VkPhysicalDeviceHostQueryResetFeatures; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostQueryResetFeatures; + using NativeType = VkInputAttachmentAspectReference; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , hostQueryReset( hostQueryReset_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( uint32_t subpass_ = {}, + uint32_t inputAttachmentIndex_ = {}, + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT + : subpass{ subpass_ } + , inputAttachmentIndex{ inputAttachmentIndex_ } + , aspectMask{ aspectMask_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceHostQueryResetFeatures( *reinterpret_cast( &rhs ) ) + InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + : InputAttachmentAspectReference( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceHostQueryResetFeatures & operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + InputAttachmentAspectReference & operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceHostQueryResetFeatures & operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + subpass = subpass_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT { - hostQueryReset = hostQueryReset_; + inputAttachmentIndex = inputAttachmentIndex_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + aspectMask = aspectMask_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT + operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, hostQueryReset ); + return std::tie( subpass, inputAttachmentIndex, aspectMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceHostQueryResetFeatures const & ) const = default; + auto operator<=>( InputAttachmentAspectReference const & ) const = default; #else - bool operator==( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostQueryReset == rhs.hostQueryReset ); + return ( subpass == rhs.subpass ) && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) && ( aspectMask == rhs.aspectMask ); # endif } - bool operator!=( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; + uint32_t subpass = {}; + uint32_t inputAttachmentIndex = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; }; - template <> - struct CppType - { - using Type = PhysicalDeviceHostQueryResetFeatures; - }; - using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures; + using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; - struct PhysicalDeviceIDProperties + struct InstanceCreateInfo { - using NativeType = VkPhysicalDeviceIDProperties; + using NativeType = VkInstanceCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( std::array const & deviceUUID_ = {}, - std::array const & driverUUID_ = {}, - std::array const & deviceLUID_ = {}, - uint32_t deviceNodeMask_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ = {}, + uint32_t enabledLayerCount_ = {}, + const char * const * ppEnabledLayerNames_ = {}, + uint32_t enabledExtensionCount_ = {}, + const char * const * ppEnabledExtensionNames_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pApplicationInfo{ pApplicationInfo_ } + , enabledLayerCount{ enabledLayerCount_ } + , ppEnabledLayerNames{ ppEnabledLayerNames_ } + , enabledExtensionCount{ enabledExtensionCount_ } + , ppEnabledExtensionNames{ ppEnabledExtensionNames_ } + { + } + + VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : InstanceCreateInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_, + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , deviceUUID( deviceUUID_ ) - , driverUUID( driverUUID_ ) - , deviceLUID( deviceLUID_ ) - , deviceNodeMask( deviceNodeMask_ ) - , deviceLUIDValid( deviceLUIDValid_ ) + , flags( flags_ ) + , pApplicationInfo( pApplicationInfo_ ) + , enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ) + , ppEnabledLayerNames( pEnabledLayerNames_.data() ) + , enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ) + , ppEnabledExtensionNames( pEnabledExtensionNames_.data() ) { } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + InstanceCreateInfo & operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceIDProperties( *reinterpret_cast( &rhs ) ) + InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } - PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + flags = flags_; return *this; } - operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pApplicationInfo = pApplicationInfo_; + return *this; } - operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + enabledLayerCount = enabledLayerCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledLayerNames = ppEnabledLayerNames_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo & + setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = static_cast( pEnabledLayerNames_.size() ); + ppEnabledLayerNames = pEnabledLayerNames_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = enabledExtensionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledExtensionNames = ppEnabledExtensionNames_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + InstanceCreateInfo & + setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); + ppEnabledExtensionNames = pEnabledExtensionNames_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -53131,2447 +55826,24298 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + const void * const &, + VULKAN_HPP_NAMESPACE::InstanceCreateFlags const &, + const VULKAN_HPP_NAMESPACE::ApplicationInfo * const &, uint32_t const &, - VULKAN_HPP_NAMESPACE::Bool32 const &> + const char * const * const &, + uint32_t const &, + const char * const * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, deviceUUID, driverUUID, deviceLUID, deviceNodeMask, deviceLUIDValid ); + return std::tie( sType, pNext, flags, pApplicationInfo, enabledLayerCount, ppEnabledLayerNames, enabledExtensionCount, ppEnabledExtensionNames ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceIDProperties const & ) const = default; -#else - bool operator==( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && ( driverUUID == rhs.driverUUID ) && - ( deviceLUID == rhs.deviceLUID ) && ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = pApplicationInfo <=> rhs.pApplicationInfo; cmp != 0 ) + return cmp; + if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < enabledLayerCount; ++i ) + { + if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] ) + if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 ) + return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; + } + if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < enabledExtensionCount; ++i ) + { + if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] ) + if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 ) + return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; + } + + return std::strong_ordering::equivalent; } +#endif - bool operator!=( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pApplicationInfo == rhs.pApplicationInfo ) && + ( enabledLayerCount == rhs.enabledLayerCount ) && + std::equal( ppEnabledLayerNames, + ppEnabledLayerNames + enabledLayerCount, + rhs.ppEnabledLayerNames, + []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ) && + ( enabledExtensionCount == rhs.enabledExtensionCount ) && + std::equal( ppEnabledExtensionNames, + ppEnabledExtensionNames + enabledExtensionCount, + rhs.ppEnabledExtensionNames, + []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ); + } + + bool operator!=( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; - uint32_t deviceNodeMask = {}; - VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {}; + const VULKAN_HPP_NAMESPACE::ApplicationInfo * pApplicationInfo = {}; + uint32_t enabledLayerCount = {}; + const char * const * ppEnabledLayerNames = {}; + uint32_t enabledExtensionCount = {}; + const char * const * ppEnabledExtensionNames = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceIDProperties; + using Type = InstanceCreateInfo; }; - using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; - struct PhysicalDeviceImage2DViewOf3DFeaturesEXT + struct LatencySleepInfoNV { - using NativeType = VkPhysicalDeviceImage2DViewOf3DFeaturesEXT; + using NativeType = VkLatencySleepInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySleepInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image2DViewOf3D( image2DViewOf3D_ ) - , sampler2DViewOf3D( sampler2DViewOf3D_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + LatencySleepInfoNV( VULKAN_HPP_NAMESPACE::Semaphore signalSemaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , signalSemaphore{ signalSemaphore_ } + , value{ value_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR LatencySleepInfoNV( LatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceImage2DViewOf3DFeaturesEXT( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceImage2DViewOf3DFeaturesEXT( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + LatencySleepInfoNV( VkLatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : LatencySleepInfoNV( *reinterpret_cast( &rhs ) ) {} - PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + LatencySleepInfoNV & operator=( LatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + LatencySleepInfoNV & operator=( VkLatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setImage2DViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setSignalSemaphore( VULKAN_HPP_NAMESPACE::Semaphore signalSemaphore_ ) VULKAN_HPP_NOEXCEPT { - image2DViewOf3D = image2DViewOf3D_; + signalSemaphore = signalSemaphore_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & - setSampler2DViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT { - sampler2DViewOf3D = sampler2DViewOf3D_; + value = value_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkLatencySleepInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkLatencySleepInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, image2DViewOf3D, sampler2DViewOf3D ); + return std::tie( sType, pNext, signalSemaphore, value ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & ) const = default; + auto operator<=>( LatencySleepInfoNV const & ) const = default; #else - bool operator==( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( LatencySleepInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image2DViewOf3D == rhs.image2DViewOf3D ) && ( sampler2DViewOf3D == rhs.sampler2DViewOf3D ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( signalSemaphore == rhs.signalSemaphore ) && ( value == rhs.value ); # endif } - bool operator!=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( LatencySleepInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D = {}; - VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySleepInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore signalSemaphore = {}; + uint64_t value = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceImage2DViewOf3DFeaturesEXT; + using Type = LatencySleepInfoNV; }; - struct PhysicalDeviceImageCompressionControlFeaturesEXT + struct LatencySleepModeInfoNV { - using NativeType = VkPhysicalDeviceImageCompressionControlFeaturesEXT; + using NativeType = VkLatencySleepModeInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySleepModeInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageCompressionControl( imageCompressionControl_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LatencySleepModeInfoNV( VULKAN_HPP_NAMESPACE::Bool32 lowLatencyMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 lowLatencyBoost_ = {}, + uint32_t minimumIntervalUs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , lowLatencyMode{ lowLatencyMode_ } + , lowLatencyBoost{ lowLatencyBoost_ } + , minimumIntervalUs{ minimumIntervalUs_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceImageCompressionControlFeaturesEXT( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR LatencySleepModeInfoNV( LatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceImageCompressionControlFeaturesEXT( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceImageCompressionControlFeaturesEXT( *reinterpret_cast( &rhs ) ) + LatencySleepModeInfoNV( VkLatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : LatencySleepModeInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + LatencySleepModeInfoNV & operator=( LatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + LatencySleepModeInfoNV & operator=( VkLatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT & - setImageCompressionControl( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setLowLatencyMode( VULKAN_HPP_NAMESPACE::Bool32 lowLatencyMode_ ) VULKAN_HPP_NOEXCEPT { - imageCompressionControl = imageCompressionControl_; + lowLatencyMode = lowLatencyMode_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceImageCompressionControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setLowLatencyBoost( VULKAN_HPP_NAMESPACE::Bool32 lowLatencyBoost_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + lowLatencyBoost = lowLatencyBoost_; + return *this; } - operator VkPhysicalDeviceImageCompressionControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setMinimumIntervalUs( uint32_t minimumIntervalUs_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + minimumIntervalUs = minimumIntervalUs_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkLatencySleepModeInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLatencySleepModeInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, imageCompressionControl ); + return std::tie( sType, pNext, lowLatencyMode, lowLatencyBoost, minimumIntervalUs ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceImageCompressionControlFeaturesEXT const & ) const = default; + auto operator<=>( LatencySleepModeInfoNV const & ) const = default; #else - bool operator==( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( LatencySleepModeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionControl == rhs.imageCompressionControl ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lowLatencyMode == rhs.lowLatencyMode ) && ( lowLatencyBoost == rhs.lowLatencyBoost ) && + ( minimumIntervalUs == rhs.minimumIntervalUs ); # endif } - bool operator!=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( LatencySleepModeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySleepModeInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 lowLatencyMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 lowLatencyBoost = {}; + uint32_t minimumIntervalUs = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceImageCompressionControlFeaturesEXT; + using Type = LatencySleepModeInfoNV; }; - struct PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT + struct LatencySubmissionPresentIdNV { - using NativeType = VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + using NativeType = VkLatencySubmissionPresentIdNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySubmissionPresentIdNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageCompressionControlSwapchain( imageCompressionControlSwapchain_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LatencySubmissionPresentIdNV( uint64_t presentID_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentID{ presentID_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) - VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR LatencySubmissionPresentIdNV( LatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( - *reinterpret_cast( &rhs ) ) + LatencySubmissionPresentIdNV( VkLatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT + : LatencySubmissionPresentIdNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & - operator=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + LatencySubmissionPresentIdNV & operator=( LatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & - operator=( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + LatencySubmissionPresentIdNV & operator=( VkLatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencySubmissionPresentIdNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & - setImageCompressionControlSwapchain( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 LatencySubmissionPresentIdNV & setPresentID( uint64_t presentID_ ) VULKAN_HPP_NOEXCEPT { - imageCompressionControlSwapchain = imageCompressionControlSwapchain_; + presentID = presentID_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkLatencySubmissionPresentIdNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkLatencySubmissionPresentIdNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, imageCompressionControlSwapchain ); + return std::tie( sType, pNext, presentID ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & ) const = default; + auto operator<=>( LatencySubmissionPresentIdNV const & ) const = default; #else - bool operator==( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( LatencySubmissionPresentIdNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionControlSwapchain == rhs.imageCompressionControlSwapchain ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentID == rhs.presentID ); # endif } - bool operator!=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( LatencySubmissionPresentIdNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySubmissionPresentIdNV; + const void * pNext = {}; + uint64_t presentID = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + using Type = LatencySubmissionPresentIdNV; }; - struct PhysicalDeviceImageDrmFormatModifierInfoEXT + struct LatencySurfaceCapabilitiesNV { - using NativeType = VkPhysicalDeviceImageDrmFormatModifierInfoEXT; + using NativeType = VkLatencySurfaceCapabilitiesNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySurfaceCapabilitiesNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = {}, - VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = {}, - const uint32_t * pQueueFamilyIndices_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , drmFormatModifier( drmFormatModifier_ ) - , sharingMode( sharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LatencySurfaceCapabilitiesNV( uint32_t presentModeCount_ = {}, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentModeCount{ presentModeCount_ } + , pPresentModes{ pPresentModes_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR LatencySurfaceCapabilitiesNV( LatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceImageDrmFormatModifierInfoEXT( *reinterpret_cast( &rhs ) ) + LatencySurfaceCapabilitiesNV( VkLatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : LatencySurfaceCapabilitiesNV( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_, - VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , drmFormatModifier( drmFormatModifier_ ) - , sharingMode( sharingMode_ ) - , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) - , pQueueFamilyIndices( queueFamilyIndices_.data() ) + LatencySurfaceCapabilitiesNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), presentModeCount( static_cast( presentModes_.size() ) ), pPresentModes( presentModes_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + LatencySurfaceCapabilitiesNV & operator=( LatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + LatencySurfaceCapabilitiesNV & operator=( VkLatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT - { - drmFormatModifier = drmFormatModifier_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT - { - sharingMode = sharingMode_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT { - queueFamilyIndexCount = queueFamilyIndexCount_; + presentModeCount = presentModeCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPPresentModes( VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT { - pQueueFamilyIndices = pQueueFamilyIndices_; + pPresentModes = pPresentModes_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PhysicalDeviceImageDrmFormatModifierInfoEXT & - setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + LatencySurfaceCapabilitiesNV & + setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_ ) VULKAN_HPP_NOEXCEPT { - queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); - pQueueFamilyIndices = queueFamilyIndices_.data(); + presentModeCount = static_cast( presentModes_.size() ); + pPresentModes = presentModes_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkLatencySurfaceCapabilitiesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkLatencySurfaceCapabilitiesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, drmFormatModifier, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices ); + return std::tie( sType, pNext, presentModeCount, pPresentModes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const & ) const = default; + auto operator<=>( LatencySurfaceCapabilitiesNV const & ) const = default; #else - bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( LatencySurfaceCapabilitiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && ( sharingMode == rhs.sharingMode ) && - ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeCount == rhs.presentModeCount ) && ( pPresentModes == rhs.pPresentModes ); # endif } - bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( LatencySurfaceCapabilitiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; - const void * pNext = {}; - uint64_t drmFormatModifier = {}; - VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; - uint32_t queueFamilyIndexCount = {}; - const uint32_t * pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLatencySurfaceCapabilitiesNV; + const void * pNext = {}; + uint32_t presentModeCount = {}; + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT; + using Type = LatencySurfaceCapabilitiesNV; }; - struct PhysicalDeviceImageFormatInfo2 + struct LayerProperties { - using NativeType = VkPhysicalDeviceImageFormatInfo2; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2; + using NativeType = VkLayerProperties; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, - VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , format( format_ ) - , type( type_ ) - , tiling( tiling_ ) - , usage( usage_ ) - , flags( flags_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 LayerProperties( std::array const & layerName_ = {}, + uint32_t specVersion_ = {}, + uint32_t implementationVersion_ = {}, + std::array const & description_ = {} ) VULKAN_HPP_NOEXCEPT + : layerName{ layerName_ } + , specVersion{ specVersion_ } + , implementationVersion{ implementationVersion_ } + , description{ description_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceImageFormatInfo2( *reinterpret_cast( &rhs ) ) + LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT : LayerProperties( *reinterpret_cast( &rhs ) ) {} + + LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } - PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); - return *this; + return *reinterpret_cast( this ); } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return std::tie( layerName, specVersion, implementationVersion, description ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - format = format_; - return *this; + if ( auto cmp = strcmp( layerName, rhs.layerName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 ) + return cmp; + if ( auto cmp = implementationVersion <=> rhs.implementationVersion; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; } +#endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT + bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - type = type_; - return *this; + return ( strcmp( layerName, rhs.layerName ) == 0 ) && ( specVersion == rhs.specVersion ) && ( implementationVersion == rhs.implementationVersion ) && + ( strcmp( description, rhs.description ) == 0 ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - tiling = tiling_; - return *this; + return !operator==( rhs ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D layerName = {}; + uint32_t specVersion = {}; + uint32_t implementationVersion = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + }; + + struct LayerSettingEXT + { + using NativeType = VkLayerSettingEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LayerSettingEXT( const char * pLayerName_ = {}, + const char * pSettingName_ = {}, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_ = VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT::eBool32, + uint32_t valueCount_ = {}, + const void * pValues_ = {} ) VULKAN_HPP_NOEXCEPT + : pLayerName{ pLayerName_ } + , pSettingName{ pSettingName_ } + , type{ type_ } + , valueCount{ valueCount_ } + , pValues{ pValues_ } { - usage = usage_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR LayerSettingEXT( LayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LayerSettingEXT( VkLayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT : LayerSettingEXT( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + // NOTE: you need to provide the type because vk::Bool32 and uint32_t are indistinguishable! + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + vk::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) { - flags = flags_; - return *this; + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + vk::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) { - return *reinterpret_cast( this ); + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); } - operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + vk::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) { - return *reinterpret_cast( this ); + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + vk::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) { - return std::tie( sType, pNext, format, type, tiling, usage, flags ); + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceImageFormatInfo2 const & ) const = default; -#else - bool operator==( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + vk::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && ( tiling == rhs.tiling ) && - ( usage == rhs.usage ) && ( flags == rhs.flags ); -# endif + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); } - bool operator!=( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + vk::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) { - return !operator==( rhs ); + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); } -#endif - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; - VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; - VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; - }; + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + vk::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) + { + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - template <> - struct CppType - { - using Type = PhysicalDeviceImageFormatInfo2; - }; - using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; + LayerSettingEXT & operator=( LayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - struct PhysicalDeviceImageProcessingFeaturesQCOM - { - using NativeType = VkPhysicalDeviceImageProcessingFeaturesQCOM; + LayerSettingEXT & operator=( VkLayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setPLayerName( const char * pLayerName_ ) VULKAN_HPP_NOEXCEPT + { + pLayerName = pLayerName_; + return *this; + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , textureSampleWeighted( textureSampleWeighted_ ) - , textureBoxFilter( textureBoxFilter_ ) - , textureBlockMatch( textureBlockMatch_ ) + VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setPSettingName( const char * pSettingName_ ) VULKAN_HPP_NOEXCEPT { + pSettingName = pSettingName_; + return *this; } - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setType( VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } - PhysicalDeviceImageProcessingFeaturesQCOM( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceImageProcessingFeaturesQCOM( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setValueCount( uint32_t valueCount_ ) VULKAN_HPP_NOEXCEPT { + valueCount = valueCount_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceImageProcessingFeaturesQCOM & operator=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + valueCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } - PhysicalDeviceImageProcessingFeaturesQCOM & operator=( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + valueCount = static_cast( values_.size() ); + pValues = values_.data(); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + valueCount = static_cast( values_.size() ); + pValues = values_.data(); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & - setTextureSampleWeighted( VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted_ ) VULKAN_HPP_NOEXCEPT + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT { - textureSampleWeighted = textureSampleWeighted_; + valueCount = static_cast( values_.size() ); + pValues = values_.data(); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & - setTextureBoxFilter( VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter_ ) VULKAN_HPP_NOEXCEPT + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT { - textureBoxFilter = textureBoxFilter_; + valueCount = static_cast( values_.size() ); + pValues = values_.data(); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & - setTextureBlockMatch( VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch_ ) VULKAN_HPP_NOEXCEPT + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT { - textureBlockMatch = textureBlockMatch_; + valueCount = static_cast( values_.size() ); + pValues = values_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceImageProcessingFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + valueCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceImageProcessingFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + operator VkLayerSettingEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkLayerSettingEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, textureSampleWeighted, textureBoxFilter, textureBlockMatch ); + return std::tie( pLayerName, pSettingName, type, valueCount, pValues ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceImageProcessingFeaturesQCOM const & ) const = default; -#else - bool operator==( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureSampleWeighted == rhs.textureSampleWeighted ) && - ( textureBoxFilter == rhs.textureBoxFilter ) && ( textureBlockMatch == rhs.textureBlockMatch ); -# endif + if ( pLayerName != rhs.pLayerName ) + if ( auto cmp = strcmp( pLayerName, rhs.pLayerName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( pSettingName != rhs.pSettingName ) + if ( auto cmp = strcmp( pSettingName, rhs.pSettingName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = type <=> rhs.type; cmp != 0 ) + return cmp; + if ( auto cmp = valueCount <=> rhs.valueCount; cmp != 0 ) + return cmp; + if ( auto cmp = pValues <=> rhs.pValues; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; } +#endif - bool operator!=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( ( pLayerName == rhs.pLayerName ) || ( strcmp( pLayerName, rhs.pLayerName ) == 0 ) ) && + ( ( pSettingName == rhs.pSettingName ) || ( strcmp( pSettingName, rhs.pSettingName ) == 0 ) ) && ( type == rhs.type ) && + ( valueCount == rhs.valueCount ) && ( pValues == rhs.pValues ); + } + + bool operator!=( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted = {}; - VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter = {}; - VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch = {}; + const char * pLayerName = {}; + const char * pSettingName = {}; + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type = VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT::eBool32; + uint32_t valueCount = {}; + const void * pValues = {}; }; - template <> - struct CppType + struct LayerSettingsCreateInfoEXT { - using Type = PhysicalDeviceImageProcessingFeaturesQCOM; - }; + using NativeType = VkLayerSettingsCreateInfoEXT; - struct PhysicalDeviceImageProcessingPropertiesQCOM - { - using NativeType = VkPhysicalDeviceImageProcessingPropertiesQCOM; + static const bool allowDuplicate = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLayerSettingsCreateInfoEXT; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR LayerSettingsCreateInfoEXT( uint32_t settingCount_ = {}, + const VULKAN_HPP_NAMESPACE::LayerSettingEXT * pSettings_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , settingCount{ settingCount_ } + , pSettings{ pSettings_ } + { + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM( uint32_t maxWeightFilterPhases_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxWeightFilterDimension_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchRegion_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxBoxFilterBlockSize_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxWeightFilterPhases( maxWeightFilterPhases_ ) - , maxWeightFilterDimension( maxWeightFilterDimension_ ) - , maxBlockMatchRegion( maxBlockMatchRegion_ ) - , maxBoxFilterBlockSize( maxBoxFilterBlockSize_ ) + VULKAN_HPP_CONSTEXPR LayerSettingsCreateInfoEXT( LayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + LayerSettingsCreateInfoEXT( VkLayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : LayerSettingsCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + LayerSettingsCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & settings_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), settingCount( static_cast( settings_.size() ) ), pSettings( settings_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceImageProcessingPropertiesQCOM( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceImageProcessingPropertiesQCOM( *reinterpret_cast( &rhs ) ) + LayerSettingsCreateInfoEXT & operator=( LayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + LayerSettingsCreateInfoEXT & operator=( VkLayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceImageProcessingPropertiesQCOM & operator=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } - PhysicalDeviceImageProcessingPropertiesQCOM & operator=( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setSettingCount( uint32_t settingCount_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + settingCount = settingCount_; return *this; } - operator VkPhysicalDeviceImageProcessingPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setPSettings( const VULKAN_HPP_NAMESPACE::LayerSettingEXT * pSettings_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pSettings = pSettings_; + return *this; } - operator VkPhysicalDeviceImageProcessingPropertiesQCOM &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + LayerSettingsCreateInfoEXT & + setSettings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & settings_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + settingCount = static_cast( settings_.size() ); + pSettings = settings_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkLayerSettingsCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLayerSettingsCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxWeightFilterPhases, maxWeightFilterDimension, maxBlockMatchRegion, maxBoxFilterBlockSize ); + return std::tie( sType, pNext, settingCount, pSettings ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceImageProcessingPropertiesQCOM const & ) const = default; + auto operator<=>( LayerSettingsCreateInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( LayerSettingsCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxWeightFilterPhases == rhs.maxWeightFilterPhases ) && - ( maxWeightFilterDimension == rhs.maxWeightFilterDimension ) && ( maxBlockMatchRegion == rhs.maxBlockMatchRegion ) && - ( maxBoxFilterBlockSize == rhs.maxBoxFilterBlockSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( settingCount == rhs.settingCount ) && ( pSettings == rhs.pSettings ); # endif } - bool operator!=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( LayerSettingsCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM; - void * pNext = {}; - uint32_t maxWeightFilterPhases = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxWeightFilterDimension = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchRegion = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxBoxFilterBlockSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eLayerSettingsCreateInfoEXT; + const void * pNext = {}; + uint32_t settingCount = {}; + const VULKAN_HPP_NAMESPACE::LayerSettingEXT * pSettings = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceImageProcessingPropertiesQCOM; + using Type = LayerSettingsCreateInfoEXT; }; - struct PhysicalDeviceImageRobustnessFeatures +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + struct MacOSSurfaceCreateInfoMVK { - using NativeType = VkPhysicalDeviceImageRobustnessFeatures; + using NativeType = VkMacOSSurfaceCreateInfoMVK; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageRobustnessFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , robustImageAccess( robustImageAccess_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, + const void * pView_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pView{ pView_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceImageRobustnessFeatures( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceImageRobustnessFeatures( *reinterpret_cast( &rhs ) ) + MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + : MacOSSurfaceCreateInfoMVK( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceImageRobustnessFeatures & operator=( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + MacOSSurfaceCreateInfoMVK & operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceImageRobustnessFeatures & operator=( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT { - robustImageAccess = robustImageAccess_; + flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceImageRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pView = pView_; + return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceImageRobustnessFeatures &() VULKAN_HPP_NOEXCEPT + operator VkMacOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std:: + tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, robustImageAccess ); + return std::tie( sType, pNext, flags, pView ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceImageRobustnessFeatures const & ) const = default; -#else - bool operator==( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MacOSSurfaceCreateInfoMVK const & ) const = default; +# else + bool operator==( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); +# endif } - bool operator!=( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageRobustnessFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {}; + const void * pView = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceImageRobustnessFeatures; + using Type = MacOSSurfaceCreateInfoMVK; }; - using PhysicalDeviceImageRobustnessFeaturesEXT = PhysicalDeviceImageRobustnessFeatures; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - struct PhysicalDeviceImageViewImageFormatInfoEXT + struct MappedMemoryRange { - using NativeType = VkPhysicalDeviceImageViewImageFormatInfoEXT; + using NativeType = VkMappedMemoryRange; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageViewType( imageViewType_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MappedMemoryRange( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } + , offset{ offset_ } + , size{ size_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceImageViewImageFormatInfoEXT( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT : MappedMemoryRange( *reinterpret_cast( &rhs ) ) {} - PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & - setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT { - imageViewType = imageViewType_; + memory = memory_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + offset = offset_; + return *this; } - operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, imageViewType ); + return std::tie( sType, pNext, memory, offset, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const & ) const = default; + auto operator<=>( MappedMemoryRange const & ) const = default; #else - bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageViewType == rhs.imageViewType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) && ( size == rhs.size ); # endif } - bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceImageViewImageFormatInfoEXT; + using Type = MappedMemoryRange; }; - struct PhysicalDeviceImageViewMinLodFeaturesEXT + struct MemoryAllocateFlagsInfo { - using NativeType = VkPhysicalDeviceImageViewMinLodFeaturesEXT; + using NativeType = VkMemoryAllocateFlagsInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 minLod_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minLod( minLod_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, + uint32_t deviceMask_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , deviceMask{ deviceMask_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceImageViewMinLodFeaturesEXT( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceImageViewMinLodFeaturesEXT( *reinterpret_cast( &rhs ) ) + MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryAllocateFlagsInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setMinLod( VULKAN_HPP_NAMESPACE::Bool32 minLod_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - minLod = minLod_; + flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceImageViewMinLodFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + deviceMask = deviceMask_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceImageViewMinLodFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, minLod ); + return std::tie( sType, pNext, flags, deviceMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceImageViewMinLodFeaturesEXT const & ) const = default; + auto operator<=>( MemoryAllocateFlagsInfo const & ) const = default; #else - bool operator==( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minLod == rhs.minLod ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( deviceMask == rhs.deviceMask ); # endif } - bool operator!=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 minLod = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {}; + uint32_t deviceMask = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceImageViewMinLodFeaturesEXT; + using Type = MemoryAllocateFlagsInfo; }; - struct PhysicalDeviceImagelessFramebufferFeatures + using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; + + struct MemoryAllocateInfo { - using NativeType = VkPhysicalDeviceImagelessFramebufferFeatures; + using NativeType = VkMemoryAllocateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imagelessFramebuffer( imagelessFramebuffer_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, + uint32_t memoryTypeIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , allocationSize{ allocationSize_ } + , memoryTypeIndex{ memoryTypeIndex_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceImagelessFramebufferFeatures( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryAllocateInfo( *reinterpret_cast( &rhs ) ) {} - PhysicalDeviceImagelessFramebufferFeatures & operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceImagelessFramebufferFeatures & operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & - setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT { - imagelessFramebuffer = imagelessFramebuffer_; + allocationSize = allocationSize_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + memoryTypeIndex = memoryTypeIndex_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT + operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, imagelessFramebuffer ); + return std::tie( sType, pNext, allocationSize, memoryTypeIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const & ) const = default; + auto operator<=>( MemoryAllocateInfo const & ) const = default; #else - bool operator==( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeIndex == rhs.memoryTypeIndex ); # endif } - bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeIndex = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceImagelessFramebufferFeatures; + using Type = MemoryAllocateInfo; }; - using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures; - struct PhysicalDeviceIndexTypeUint8FeaturesEXT + struct MemoryBarrier { - using NativeType = VkPhysicalDeviceIndexTypeUint8FeaturesEXT; + using NativeType = VkMemoryBarrier; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , indexTypeUint8( indexTypeUint8_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcAccessMask{ srcAccessMask_ } + , dstAccessMask{ dstAccessMask_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceIndexTypeUint8FeaturesEXT( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryBarrier( *reinterpret_cast( &rhs ) ) {} - PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT { - indexTypeUint8 = indexTypeUint8_; + srcAccessMask = srcAccessMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, indexTypeUint8 ); + return std::tie( sType, pNext, srcAccessMask, dstAccessMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesEXT const & ) const = default; + auto operator<=>( MemoryBarrier const & ) const = default; #else - bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indexTypeUint8 == rhs.indexTypeUint8 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ); # endif } - bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceIndexTypeUint8FeaturesEXT; + using Type = MemoryBarrier; }; - struct PhysicalDeviceInheritedViewportScissorFeaturesNV + struct MemoryBarrierAccessFlags3KHR { - using NativeType = VkPhysicalDeviceInheritedViewportScissorFeaturesNV; + using NativeType = VkMemoryBarrierAccessFlags3KHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrierAccessFlags3KHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , inheritedViewportScissor2D( inheritedViewportScissor2D_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryBarrierAccessFlags3KHR( VULKAN_HPP_NAMESPACE::AccessFlags3KHR srcAccessMask3_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags3KHR dstAccessMask3_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcAccessMask3{ srcAccessMask3_ } + , dstAccessMask3{ dstAccessMask3_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceInheritedViewportScissorFeaturesNV( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR MemoryBarrierAccessFlags3KHR( MemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceInheritedViewportScissorFeaturesNV( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceInheritedViewportScissorFeaturesNV( *reinterpret_cast( &rhs ) ) + MemoryBarrierAccessFlags3KHR( VkMemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryBarrierAccessFlags3KHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + MemoryBarrierAccessFlags3KHR & operator=( MemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryBarrierAccessFlags3KHR & operator=( VkMemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & - setInheritedViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR & setSrcAccessMask3( VULKAN_HPP_NAMESPACE::AccessFlags3KHR srcAccessMask3_ ) VULKAN_HPP_NOEXCEPT { - inheritedViewportScissor2D = inheritedViewportScissor2D_; + srcAccessMask3 = srcAccessMask3_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR & setDstAccessMask3( VULKAN_HPP_NAMESPACE::AccessFlags3KHR dstAccessMask3_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dstAccessMask3 = dstAccessMask3_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT + operator VkMemoryBarrierAccessFlags3KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkMemoryBarrierAccessFlags3KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, inheritedViewportScissor2D ); + return std::tie( sType, pNext, srcAccessMask3, dstAccessMask3 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceInheritedViewportScissorFeaturesNV const & ) const = default; + auto operator<=>( MemoryBarrierAccessFlags3KHR const & ) const = default; #else - bool operator==( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryBarrierAccessFlags3KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inheritedViewportScissor2D == rhs.inheritedViewportScissor2D ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask3 == rhs.srcAccessMask3 ) && ( dstAccessMask3 == rhs.dstAccessMask3 ); # endif } - bool operator!=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryBarrierAccessFlags3KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrierAccessFlags3KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags3KHR srcAccessMask3 = {}; + VULKAN_HPP_NAMESPACE::AccessFlags3KHR dstAccessMask3 = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceInheritedViewportScissorFeaturesNV; + using Type = MemoryBarrierAccessFlags3KHR; }; - struct PhysicalDeviceInlineUniformBlockFeatures + struct MemoryDedicatedAllocateInfo { - using NativeType = VkPhysicalDeviceInlineUniformBlockFeatures; + using NativeType = VkMemoryDedicatedAllocateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , inlineUniformBlock( inlineUniformBlock_ ) - , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image{ image_ } + , buffer{ buffer_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceInlineUniformBlockFeatures( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceInlineUniformBlockFeatures( *reinterpret_cast( &rhs ) ) + MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryDedicatedAllocateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceInlineUniformBlockFeatures & operator=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceInlineUniformBlockFeatures & operator=( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & - setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT { - inlineUniformBlock = inlineUniformBlock_; + image = image_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setDescriptorBindingInlineUniformBlockUpdateAfterBind( - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT { - descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; + buffer = buffer_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceInlineUniformBlockFeatures const &() const VULKAN_HPP_NOEXCEPT + operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceInlineUniformBlockFeatures &() VULKAN_HPP_NOEXCEPT + operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, inlineUniformBlock, descriptorBindingInlineUniformBlockUpdateAfterBind ); + return std::tie( sType, pNext, image, buffer ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceInlineUniformBlockFeatures const & ) const = default; + auto operator<=>( MemoryDedicatedAllocateInfo const & ) const = default; #else - bool operator==( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inlineUniformBlock == rhs.inlineUniformBlock ) && - ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); # endif } - bool operator!=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceInlineUniformBlockFeatures; + using Type = MemoryDedicatedAllocateInfo; }; - using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures; - struct PhysicalDeviceInlineUniformBlockProperties + using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; + + struct MemoryDedicatedRequirements { - using NativeType = VkPhysicalDeviceInlineUniformBlockProperties; + using NativeType = VkMemoryDedicatedRequirements; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockProperties; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( uint32_t maxInlineUniformBlockSize_ = {}, - uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, - uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ) - , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ) - , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ) - , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ) - , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , prefersDedicatedAllocation{ prefersDedicatedAllocation_ } + , requiresDedicatedAllocation{ requiresDedicatedAllocation_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceInlineUniformBlockProperties( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceInlineUniformBlockProperties( *reinterpret_cast( &rhs ) ) + MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryDedicatedRequirements( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceInlineUniformBlockProperties & operator=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceInlineUniformBlockProperties & operator=( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceInlineUniformBlockProperties const &() const VULKAN_HPP_NOEXCEPT + operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceInlineUniformBlockProperties &() VULKAN_HPP_NOEXCEPT + operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - maxInlineUniformBlockSize, - maxPerStageDescriptorInlineUniformBlocks, - maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, - maxDescriptorSetInlineUniformBlocks, - maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + return std::tie( sType, pNext, prefersDedicatedAllocation, requiresDedicatedAllocation ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceInlineUniformBlockProperties const & ) const = default; + auto operator<=>( MemoryDedicatedRequirements const & ) const = default; #else - bool operator==( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) && - ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) && - ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) && - ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) && - ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) && + ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation ); # endif } - bool operator!=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockProperties; - void * pNext = {}; - uint32_t maxInlineUniformBlockSize = {}; - uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; - uint32_t maxDescriptorSetInlineUniformBlocks = {}; - uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceInlineUniformBlockProperties; + using Type = MemoryDedicatedRequirements; }; - using PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties; - struct PhysicalDeviceInvocationMaskFeaturesHUAWEI + using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; + + struct MemoryFdPropertiesKHR { - using NativeType = VkPhysicalDeviceInvocationMaskFeaturesHUAWEI; + using NativeType = VkMemoryFdPropertiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , invocationMask( invocationMask_ ) - { - } - - VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR; - PhysicalDeviceInvocationMaskFeaturesHUAWEI( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceInvocationMaskFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryTypeBits{ memoryTypeBits_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryFdPropertiesKHR( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setInvocationMask( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ ) VULKAN_HPP_NOEXCEPT + MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - invocationMask = invocationMask_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, invocationMask ); + return std::tie( sType, pNext, memoryTypeBits ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & ) const = default; + auto operator<=>( MemoryFdPropertiesKHR const & ) const = default; #else - bool operator==( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( invocationMask == rhs.invocationMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); # endif } - bool operator!=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR; void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 invocationMask = {}; + uint32_t memoryTypeBits = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceInvocationMaskFeaturesHUAWEI; + using Type = MemoryFdPropertiesKHR; }; - struct PhysicalDeviceLegacyDitheringFeaturesEXT +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct MemoryGetAndroidHardwareBufferInfoANDROID { - using NativeType = VkPhysicalDeviceLegacyDitheringFeaturesEXT; + using NativeType = VkMemoryGetAndroidHardwareBufferInfoANDROID; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , legacyDithering( legacyDithering_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceLegacyDitheringFeaturesEXT( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceLegacyDitheringFeaturesEXT( *reinterpret_cast( &rhs ) ) + MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetAndroidHardwareBufferInfoANDROID( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + MemoryGetAndroidHardwareBufferInfoANDROID & operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setLegacyDithering( VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT { - legacyDithering = legacyDithering_; + memory = memory_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceLegacyDitheringFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkMemoryGetAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceLegacyDitheringFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, legacyDithering ); + return std::tie( sType, pNext, memory ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceLegacyDitheringFeaturesEXT const & ) const = default; -#else - bool operator==( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const & ) const = default; +# else + bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( legacyDithering == rhs.legacyDithering ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); +# endif } - bool operator!=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 legacyDithering = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceLegacyDitheringFeaturesEXT; + using Type = MemoryGetAndroidHardwareBufferInfoANDROID; }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - struct PhysicalDeviceLimits + struct MemoryGetFdInfoKHR { - using NativeType = VkPhysicalDeviceLimits; + using NativeType = VkMemoryGetFdInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {}, - uint32_t maxImageDimension2D_ = {}, - uint32_t maxImageDimension3D_ = {}, - uint32_t maxImageDimensionCube_ = {}, - uint32_t maxImageArrayLayers_ = {}, - uint32_t maxTexelBufferElements_ = {}, - uint32_t maxUniformBufferRange_ = {}, - uint32_t maxStorageBufferRange_ = {}, - uint32_t maxPushConstantsSize_ = {}, - uint32_t maxMemoryAllocationCount_ = {}, - uint32_t maxSamplerAllocationCount_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, - uint32_t maxBoundDescriptorSets_ = {}, - uint32_t maxPerStageDescriptorSamplers_ = {}, - uint32_t maxPerStageDescriptorUniformBuffers_ = {}, - uint32_t maxPerStageDescriptorStorageBuffers_ = {}, - uint32_t maxPerStageDescriptorSampledImages_ = {}, - uint32_t maxPerStageDescriptorStorageImages_ = {}, - uint32_t maxPerStageDescriptorInputAttachments_ = {}, - uint32_t maxPerStageResources_ = {}, - uint32_t maxDescriptorSetSamplers_ = {}, - uint32_t maxDescriptorSetUniformBuffers_ = {}, - uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, - uint32_t maxDescriptorSetStorageBuffers_ = {}, - uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, - uint32_t maxDescriptorSetSampledImages_ = {}, - uint32_t maxDescriptorSetStorageImages_ = {}, - uint32_t maxDescriptorSetInputAttachments_ = {}, - uint32_t maxVertexInputAttributes_ = {}, - uint32_t maxVertexInputBindings_ = {}, - uint32_t maxVertexInputAttributeOffset_ = {}, - uint32_t maxVertexInputBindingStride_ = {}, - uint32_t maxVertexOutputComponents_ = {}, - uint32_t maxTessellationGenerationLevel_ = {}, - uint32_t maxTessellationPatchSize_ = {}, - uint32_t maxTessellationControlPerVertexInputComponents_ = {}, - uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, - uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, - uint32_t maxTessellationControlTotalOutputComponents_ = {}, - uint32_t maxTessellationEvaluationInputComponents_ = {}, - uint32_t maxTessellationEvaluationOutputComponents_ = {}, - uint32_t maxGeometryShaderInvocations_ = {}, - uint32_t maxGeometryInputComponents_ = {}, - uint32_t maxGeometryOutputComponents_ = {}, - uint32_t maxGeometryOutputVertices_ = {}, - uint32_t maxGeometryTotalOutputComponents_ = {}, - uint32_t maxFragmentInputComponents_ = {}, - uint32_t maxFragmentOutputAttachments_ = {}, - uint32_t maxFragmentDualSrcAttachments_ = {}, - uint32_t maxFragmentCombinedOutputResources_ = {}, - uint32_t maxComputeSharedMemorySize_ = {}, - std::array const & maxComputeWorkGroupCount_ = {}, - uint32_t maxComputeWorkGroupInvocations_ = {}, - std::array const & maxComputeWorkGroupSize_ = {}, - uint32_t subPixelPrecisionBits_ = {}, - uint32_t subTexelPrecisionBits_ = {}, - uint32_t mipmapPrecisionBits_ = {}, - uint32_t maxDrawIndexedIndexValue_ = {}, - uint32_t maxDrawIndirectCount_ = {}, - float maxSamplerLodBias_ = {}, - float maxSamplerAnisotropy_ = {}, - uint32_t maxViewports_ = {}, - std::array const & maxViewportDimensions_ = {}, - std::array const & viewportBoundsRange_ = {}, - uint32_t viewportSubPixelBits_ = {}, - size_t minMemoryMapAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, - int32_t minTexelOffset_ = {}, - uint32_t maxTexelOffset_ = {}, - int32_t minTexelGatherOffset_ = {}, - uint32_t maxTexelGatherOffset_ = {}, - float minInterpolationOffset_ = {}, - float maxInterpolationOffset_ = {}, - uint32_t subPixelInterpolationOffsetBits_ = {}, - uint32_t maxFramebufferWidth_ = {}, - uint32_t maxFramebufferHeight_ = {}, - uint32_t maxFramebufferLayers_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, - uint32_t maxColorAttachments_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, - uint32_t maxSampleMaskWords_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, - float timestampPeriod_ = {}, - uint32_t maxClipDistances_ = {}, - uint32_t maxCullDistances_ = {}, - uint32_t maxCombinedClipAndCullDistances_ = {}, - uint32_t discreteQueuePriorities_ = {}, - std::array const & pointSizeRange_ = {}, - std::array const & lineWidthRange_ = {}, - float pointSizeGranularity_ = {}, - float lineWidthGranularity_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT - : maxImageDimension1D( maxImageDimension1D_ ) - , maxImageDimension2D( maxImageDimension2D_ ) - , maxImageDimension3D( maxImageDimension3D_ ) - , maxImageDimensionCube( maxImageDimensionCube_ ) - , maxImageArrayLayers( maxImageArrayLayers_ ) - , maxTexelBufferElements( maxTexelBufferElements_ ) - , maxUniformBufferRange( maxUniformBufferRange_ ) - , maxStorageBufferRange( maxStorageBufferRange_ ) - , maxPushConstantsSize( maxPushConstantsSize_ ) - , maxMemoryAllocationCount( maxMemoryAllocationCount_ ) - , maxSamplerAllocationCount( maxSamplerAllocationCount_ ) - , bufferImageGranularity( bufferImageGranularity_ ) - , sparseAddressSpaceSize( sparseAddressSpaceSize_ ) - , maxBoundDescriptorSets( maxBoundDescriptorSets_ ) - , maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ ) - , maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ ) - , maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ ) - , maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ ) - , maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ ) - , maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ ) - , maxPerStageResources( maxPerStageResources_ ) - , maxDescriptorSetSamplers( maxDescriptorSetSamplers_ ) - , maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ ) - , maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ ) - , maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ ) - , maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ ) - , maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ ) - , maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ ) - , maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ ) - , maxVertexInputAttributes( maxVertexInputAttributes_ ) - , maxVertexInputBindings( maxVertexInputBindings_ ) - , maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ ) - , maxVertexInputBindingStride( maxVertexInputBindingStride_ ) - , maxVertexOutputComponents( maxVertexOutputComponents_ ) - , maxTessellationGenerationLevel( maxTessellationGenerationLevel_ ) - , maxTessellationPatchSize( maxTessellationPatchSize_ ) - , maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ ) - , maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ ) - , maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ ) - , maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ ) - , maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ ) - , maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ ) - , maxGeometryShaderInvocations( maxGeometryShaderInvocations_ ) - , maxGeometryInputComponents( maxGeometryInputComponents_ ) - , maxGeometryOutputComponents( maxGeometryOutputComponents_ ) - , maxGeometryOutputVertices( maxGeometryOutputVertices_ ) - , maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ ) - , maxFragmentInputComponents( maxFragmentInputComponents_ ) - , maxFragmentOutputAttachments( maxFragmentOutputAttachments_ ) - , maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ ) - , maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ ) - , maxComputeSharedMemorySize( maxComputeSharedMemorySize_ ) - , maxComputeWorkGroupCount( maxComputeWorkGroupCount_ ) - , maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ ) - , maxComputeWorkGroupSize( maxComputeWorkGroupSize_ ) - , subPixelPrecisionBits( subPixelPrecisionBits_ ) - , subTexelPrecisionBits( subTexelPrecisionBits_ ) - , mipmapPrecisionBits( mipmapPrecisionBits_ ) - , maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ ) - , maxDrawIndirectCount( maxDrawIndirectCount_ ) - , maxSamplerLodBias( maxSamplerLodBias_ ) - , maxSamplerAnisotropy( maxSamplerAnisotropy_ ) - , maxViewports( maxViewports_ ) - , maxViewportDimensions( maxViewportDimensions_ ) - , viewportBoundsRange( viewportBoundsRange_ ) - , viewportSubPixelBits( viewportSubPixelBits_ ) - , minMemoryMapAlignment( minMemoryMapAlignment_ ) - , minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ ) - , minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ ) - , minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ ) - , minTexelOffset( minTexelOffset_ ) - , maxTexelOffset( maxTexelOffset_ ) - , minTexelGatherOffset( minTexelGatherOffset_ ) - , maxTexelGatherOffset( maxTexelGatherOffset_ ) - , minInterpolationOffset( minInterpolationOffset_ ) - , maxInterpolationOffset( maxInterpolationOffset_ ) - , subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ ) - , maxFramebufferWidth( maxFramebufferWidth_ ) - , maxFramebufferHeight( maxFramebufferHeight_ ) - , maxFramebufferLayers( maxFramebufferLayers_ ) - , framebufferColorSampleCounts( framebufferColorSampleCounts_ ) - , framebufferDepthSampleCounts( framebufferDepthSampleCounts_ ) - , framebufferStencilSampleCounts( framebufferStencilSampleCounts_ ) - , framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ ) - , maxColorAttachments( maxColorAttachments_ ) - , sampledImageColorSampleCounts( sampledImageColorSampleCounts_ ) - , sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ ) - , sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ ) - , sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ ) - , storageImageSampleCounts( storageImageSampleCounts_ ) - , maxSampleMaskWords( maxSampleMaskWords_ ) - , timestampComputeAndGraphics( timestampComputeAndGraphics_ ) - , timestampPeriod( timestampPeriod_ ) - , maxClipDistances( maxClipDistances_ ) - , maxCullDistances( maxCullDistances_ ) - , maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ ) - , discreteQueuePriorities( discreteQueuePriorities_ ) - , pointSizeRange( pointSizeRange_ ) - , lineWidthRange( lineWidthRange_ ) - , pointSizeGranularity( pointSizeGranularity_ ) - , lineWidthGranularity( lineWidthGranularity_ ) - , strictLines( strictLines_ ) - , standardSampleLocations( standardSampleLocations_ ) - , optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ ) - , optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ ) - , nonCoherentAtomSize( nonCoherentAtomSize_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } + , handleType{ handleType_ } { } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceLimits( *reinterpret_cast( &rhs ) ) + MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryGetFdInfoKHR( *reinterpret_cast( &rhs ) ) {} + + MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } - PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + memory = memory_; return *this; } - operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + handleType = handleType_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT + operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - float const &, - float const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - uint32_t const &, - size_t const &, - VULKAN_HPP_NAMESPACE::DeviceSize const &, - VULKAN_HPP_NAMESPACE::DeviceSize const &, - VULKAN_HPP_NAMESPACE::DeviceSize const &, - int32_t const &, - uint32_t const &, - int32_t const &, - uint32_t const &, - float const &, - float const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::SampleCountFlags const &, - VULKAN_HPP_NAMESPACE::SampleCountFlags const &, - VULKAN_HPP_NAMESPACE::SampleCountFlags const &, - VULKAN_HPP_NAMESPACE::SampleCountFlags const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::SampleCountFlags const &, - VULKAN_HPP_NAMESPACE::SampleCountFlags const &, - VULKAN_HPP_NAMESPACE::SampleCountFlags const &, - VULKAN_HPP_NAMESPACE::SampleCountFlags const &, - VULKAN_HPP_NAMESPACE::SampleCountFlags const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - float const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - float const &, - float const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::DeviceSize const &, - VULKAN_HPP_NAMESPACE::DeviceSize const &, - VULKAN_HPP_NAMESPACE::DeviceSize const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( maxImageDimension1D, - maxImageDimension2D, - maxImageDimension3D, - maxImageDimensionCube, - maxImageArrayLayers, - maxTexelBufferElements, - maxUniformBufferRange, - maxStorageBufferRange, - maxPushConstantsSize, - maxMemoryAllocationCount, - maxSamplerAllocationCount, - bufferImageGranularity, - sparseAddressSpaceSize, - maxBoundDescriptorSets, - maxPerStageDescriptorSamplers, - maxPerStageDescriptorUniformBuffers, - maxPerStageDescriptorStorageBuffers, - maxPerStageDescriptorSampledImages, - maxPerStageDescriptorStorageImages, - maxPerStageDescriptorInputAttachments, - maxPerStageResources, - maxDescriptorSetSamplers, - maxDescriptorSetUniformBuffers, - maxDescriptorSetUniformBuffersDynamic, - maxDescriptorSetStorageBuffers, - maxDescriptorSetStorageBuffersDynamic, - maxDescriptorSetSampledImages, - maxDescriptorSetStorageImages, - maxDescriptorSetInputAttachments, - maxVertexInputAttributes, - maxVertexInputBindings, - maxVertexInputAttributeOffset, - maxVertexInputBindingStride, - maxVertexOutputComponents, - maxTessellationGenerationLevel, - maxTessellationPatchSize, - maxTessellationControlPerVertexInputComponents, - maxTessellationControlPerVertexOutputComponents, - maxTessellationControlPerPatchOutputComponents, - maxTessellationControlTotalOutputComponents, - maxTessellationEvaluationInputComponents, - maxTessellationEvaluationOutputComponents, - maxGeometryShaderInvocations, - maxGeometryInputComponents, - maxGeometryOutputComponents, - maxGeometryOutputVertices, - maxGeometryTotalOutputComponents, - maxFragmentInputComponents, - maxFragmentOutputAttachments, - maxFragmentDualSrcAttachments, - maxFragmentCombinedOutputResources, - maxComputeSharedMemorySize, - maxComputeWorkGroupCount, - maxComputeWorkGroupInvocations, - maxComputeWorkGroupSize, - subPixelPrecisionBits, - subTexelPrecisionBits, - mipmapPrecisionBits, - maxDrawIndexedIndexValue, - maxDrawIndirectCount, - maxSamplerLodBias, - maxSamplerAnisotropy, - maxViewports, - maxViewportDimensions, - viewportBoundsRange, - viewportSubPixelBits, - minMemoryMapAlignment, - minTexelBufferOffsetAlignment, - minUniformBufferOffsetAlignment, - minStorageBufferOffsetAlignment, - minTexelOffset, - maxTexelOffset, - minTexelGatherOffset, - maxTexelGatherOffset, - minInterpolationOffset, - maxInterpolationOffset, - subPixelInterpolationOffsetBits, - maxFramebufferWidth, - maxFramebufferHeight, - maxFramebufferLayers, - framebufferColorSampleCounts, - framebufferDepthSampleCounts, - framebufferStencilSampleCounts, - framebufferNoAttachmentsSampleCounts, - maxColorAttachments, - sampledImageColorSampleCounts, - sampledImageIntegerSampleCounts, - sampledImageDepthSampleCounts, - sampledImageStencilSampleCounts, - storageImageSampleCounts, - maxSampleMaskWords, - timestampComputeAndGraphics, - timestampPeriod, - maxClipDistances, - maxCullDistances, - maxCombinedClipAndCullDistances, - discreteQueuePriorities, - pointSizeRange, - lineWidthRange, - pointSizeGranularity, - lineWidthGranularity, - strictLines, - standardSampleLocations, - optimalBufferCopyOffsetAlignment, - optimalBufferCopyRowPitchAlignment, - nonCoherentAtomSize ); + return std::tie( sType, pNext, memory, handleType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceLimits const & ) const = default; + auto operator<=>( MemoryGetFdInfoKHR const & ) const = default; #else - bool operator==( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( maxImageDimension1D == rhs.maxImageDimension1D ) && ( maxImageDimension2D == rhs.maxImageDimension2D ) && - ( maxImageDimension3D == rhs.maxImageDimension3D ) && ( maxImageDimensionCube == rhs.maxImageDimensionCube ) && - ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( maxTexelBufferElements == rhs.maxTexelBufferElements ) && - ( maxUniformBufferRange == rhs.maxUniformBufferRange ) && ( maxStorageBufferRange == rhs.maxStorageBufferRange ) && - ( maxPushConstantsSize == rhs.maxPushConstantsSize ) && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) && - ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) && ( bufferImageGranularity == rhs.bufferImageGranularity ) && - ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) && - ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) && - ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) && - ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) && - ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) && - ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) && - ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) && ( maxPerStageResources == rhs.maxPerStageResources ) && - ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) && - ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) && - ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) && - ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) && - ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) && - ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) && - ( maxVertexInputBindings == rhs.maxVertexInputBindings ) && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) && - ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) && - ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) && - ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) && - ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents ) && - ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) && - ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) && - ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) && - ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) && - ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) && - ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) && - ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) && - ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) && - ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) && - ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) && ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount ) && - ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) && ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize ) && - ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) && - ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) && - ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) && - ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) && ( maxViewports == rhs.maxViewports ) && - ( maxViewportDimensions == rhs.maxViewportDimensions ) && ( viewportBoundsRange == rhs.viewportBoundsRange ) && - ( viewportSubPixelBits == rhs.viewportSubPixelBits ) && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) && - ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) && - ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) && - ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) && ( minTexelOffset == rhs.minTexelOffset ) && - ( maxTexelOffset == rhs.maxTexelOffset ) && ( minTexelGatherOffset == rhs.minTexelGatherOffset ) && - ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) && ( minInterpolationOffset == rhs.minInterpolationOffset ) && - ( maxInterpolationOffset == rhs.maxInterpolationOffset ) && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) && - ( maxFramebufferWidth == rhs.maxFramebufferWidth ) && ( maxFramebufferHeight == rhs.maxFramebufferHeight ) && - ( maxFramebufferLayers == rhs.maxFramebufferLayers ) && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) && - ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) && - ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) && ( maxColorAttachments == rhs.maxColorAttachments ) && - ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) && - ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) && - ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) && - ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) && ( storageImageSampleCounts == rhs.storageImageSampleCounts ) && - ( maxSampleMaskWords == rhs.maxSampleMaskWords ) && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) && - ( timestampPeriod == rhs.timestampPeriod ) && ( maxClipDistances == rhs.maxClipDistances ) && ( maxCullDistances == rhs.maxCullDistances ) && - ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) && ( discreteQueuePriorities == rhs.discreteQueuePriorities ) && - ( pointSizeRange == rhs.pointSizeRange ) && ( lineWidthRange == rhs.lineWidthRange ) && ( pointSizeGranularity == rhs.pointSizeGranularity ) && - ( lineWidthGranularity == rhs.lineWidthGranularity ) && ( strictLines == rhs.strictLines ) && - ( standardSampleLocations == rhs.standardSampleLocations ) && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) && - ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); # endif } - bool operator!=( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t maxImageDimension1D = {}; - uint32_t maxImageDimension2D = {}; - uint32_t maxImageDimension3D = {}; - uint32_t maxImageDimensionCube = {}; - uint32_t maxImageArrayLayers = {}; - uint32_t maxTexelBufferElements = {}; - uint32_t maxUniformBufferRange = {}; - uint32_t maxStorageBufferRange = {}; - uint32_t maxPushConstantsSize = {}; - uint32_t maxMemoryAllocationCount = {}; - uint32_t maxSamplerAllocationCount = {}; - VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {}; - VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {}; - uint32_t maxBoundDescriptorSets = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = MemoryGetFdInfoKHR; + }; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct MemoryGetMetalHandleInfoEXT + { + using NativeType = VkMemoryGetMetalHandleInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetMetalHandleInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetMetalHandleInfoEXT( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryGetMetalHandleInfoEXT( MemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetMetalHandleInfoEXT( VkMemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetMetalHandleInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MemoryGetMetalHandleInfoEXT & operator=( MemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryGetMetalHandleInfoEXT & operator=( VkMemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryGetMetalHandleInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetMetalHandleInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetMetalHandleInfoEXT const & ) const = default; +# else + bool operator==( MemoryGetMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( MemoryGetMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetMetalHandleInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = MemoryGetMetalHandleInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + struct MemoryGetRemoteAddressInfoNV + { + using NativeType = VkMemoryGetRemoteAddressInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetRemoteAddressInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetRemoteAddressInfoNV( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetRemoteAddressInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + MemoryGetRemoteAddressInfoNV & operator=( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryGetRemoteAddressInfoNV & operator=( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryGetRemoteAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetRemoteAddressInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetRemoteAddressInfoNV const & ) const = default; +#else + bool operator==( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetRemoteAddressInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = MemoryGetRemoteAddressInfoNV; + }; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct MemoryGetWin32HandleInfoKHR + { + using NativeType = VkMemoryGetWin32HandleInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetWin32HandleInfoKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = MemoryGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct MemoryGetZirconHandleInfoFUCHSIA + { + using NativeType = VkMemoryGetZirconHandleInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memory{ memory_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryGetZirconHandleInfoFUCHSIA( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryGetZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + MemoryGetZirconHandleInfoFUCHSIA & operator=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryGetZirconHandleInfoFUCHSIA & operator=( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memory, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryGetZirconHandleInfoFUCHSIA const & ) const = default; +# else + bool operator==( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = MemoryGetZirconHandleInfoFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct MemoryHeap + { + using NativeType = VkMemoryHeap; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : size{ size_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryHeap( *reinterpret_cast( &rhs ) ) {} + + MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( size, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryHeap const & ) const = default; +#else + bool operator==( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( size == rhs.size ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {}; + }; + + struct MemoryHostPointerPropertiesEXT + { + using NativeType = VkMemoryHostPointerPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryTypeBits{ memoryTypeBits_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryHostPointerPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryTypeBits ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryHostPointerPropertiesEXT const & ) const = default; +#else + bool operator==( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = MemoryHostPointerPropertiesEXT; + }; + + struct MemoryMapInfo + { + using NativeType = VkMemoryMapInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryMapInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryMapInfo( VULKAN_HPP_NAMESPACE::MemoryMapFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , memory{ memory_ } + , offset{ offset_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryMapInfo( MemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryMapInfo( VkMemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryMapInfo( *reinterpret_cast( &rhs ) ) {} + + MemoryMapInfo & operator=( MemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryMapInfo & operator=( VkMemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryMapFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryMapInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryMapInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, memory, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryMapInfo const & ) const = default; +#else + bool operator==( MemoryMapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) && + ( size == rhs.size ); +# endif + } + + bool operator!=( MemoryMapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryMapInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryMapFlags flags = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = MemoryMapInfo; + }; + + using MemoryMapInfoKHR = MemoryMapInfo; + + struct MemoryMapPlacedInfoEXT + { + using NativeType = VkMemoryMapPlacedInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryMapPlacedInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryMapPlacedInfoEXT( void * pPlacedAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pPlacedAddress{ pPlacedAddress_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryMapPlacedInfoEXT( MemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryMapPlacedInfoEXT( VkMemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryMapPlacedInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MemoryMapPlacedInfoEXT & operator=( MemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryMapPlacedInfoEXT & operator=( VkMemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryMapPlacedInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryMapPlacedInfoEXT & setPPlacedAddress( void * pPlacedAddress_ ) VULKAN_HPP_NOEXCEPT + { + pPlacedAddress = pPlacedAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryMapPlacedInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryMapPlacedInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pPlacedAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryMapPlacedInfoEXT const & ) const = default; +#else + bool operator==( MemoryMapPlacedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pPlacedAddress == rhs.pPlacedAddress ); +# endif + } + + bool operator!=( MemoryMapPlacedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryMapPlacedInfoEXT; + const void * pNext = {}; + void * pPlacedAddress = {}; + }; + + template <> + struct CppType + { + using Type = MemoryMapPlacedInfoEXT; + }; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct MemoryMetalHandlePropertiesEXT + { + using NativeType = VkMemoryMetalHandlePropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryMetalHandlePropertiesEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryMetalHandlePropertiesEXT( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryTypeBits{ memoryTypeBits_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryMetalHandlePropertiesEXT( MemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryMetalHandlePropertiesEXT( VkMemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryMetalHandlePropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + MemoryMetalHandlePropertiesEXT & operator=( MemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryMetalHandlePropertiesEXT & operator=( VkMemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryMetalHandlePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryMetalHandlePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryTypeBits ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryMetalHandlePropertiesEXT const & ) const = default; +# else + bool operator==( MemoryMetalHandlePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryMetalHandlePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryMetalHandlePropertiesEXT; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = MemoryMetalHandlePropertiesEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + struct MemoryOpaqueCaptureAddressAllocateInfo + { + using NativeType = VkMemoryOpaqueCaptureAddressAllocateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , opaqueCaptureAddress{ opaqueCaptureAddress_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryOpaqueCaptureAddressAllocateInfo( *reinterpret_cast( &rhs ) ) + { + } + + MemoryOpaqueCaptureAddressAllocateInfo & operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryOpaqueCaptureAddressAllocateInfo & operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT + { + opaqueCaptureAddress = opaqueCaptureAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, opaqueCaptureAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const & ) const = default; +#else + bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); +# endif + } + + bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; + const void * pNext = {}; + uint64_t opaqueCaptureAddress = {}; + }; + + template <> + struct CppType + { + using Type = MemoryOpaqueCaptureAddressAllocateInfo; + }; + + using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo; + + struct MemoryPriorityAllocateInfoEXT + { + using NativeType = VkMemoryPriorityAllocateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , priority{ priority_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryPriorityAllocateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MemoryPriorityAllocateInfoEXT & operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT + { + priority = priority_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryPriorityAllocateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, priority ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryPriorityAllocateInfoEXT const & ) const = default; +#else + bool operator==( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priority == rhs.priority ); +# endif + } + + bool operator!=( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT; + const void * pNext = {}; + float priority = {}; + }; + + template <> + struct CppType + { + using Type = MemoryPriorityAllocateInfoEXT; + }; + + struct MemoryRequirements + { + using NativeType = VkMemoryRequirements; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, + uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : size{ size_ } + , alignment{ alignment_ } + , memoryTypeBits{ memoryTypeBits_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryRequirements( *reinterpret_cast( &rhs ) ) {} + + MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( size, alignment, memoryTypeBits ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryRequirements const & ) const = default; +#else + bool operator==( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( size == rhs.size ) && ( alignment == rhs.alignment ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize alignment = {}; + uint32_t memoryTypeBits = {}; + }; + + struct MemoryRequirements2 + { + using NativeType = VkMemoryRequirements2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryRequirements{ memoryRequirements_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryRequirements2( *reinterpret_cast( &rhs ) ) + { + } + + MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryRequirements ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryRequirements2 const & ) const = default; +#else + bool operator==( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements ); +# endif + } + + bool operator!=( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; + }; + + template <> + struct CppType + { + using Type = MemoryRequirements2; + }; + + using MemoryRequirements2KHR = MemoryRequirements2; + + struct MemoryType + { + using NativeType = VkMemoryType; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : propertyFlags{ propertyFlags_ } + , heapIndex{ heapIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryType( *reinterpret_cast( &rhs ) ) {} + + MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryType &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( propertyFlags, heapIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryType const & ) const = default; +#else + bool operator==( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( propertyFlags == rhs.propertyFlags ) && ( heapIndex == rhs.heapIndex ); +# endif + } + + bool operator!=( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {}; + uint32_t heapIndex = {}; + }; + + struct MemoryUnmapInfo + { + using NativeType = VkMemoryUnmapInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryUnmapInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryUnmapInfo( VULKAN_HPP_NAMESPACE::MemoryUnmapFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , memory{ memory_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryUnmapInfo( MemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryUnmapInfo( VkMemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryUnmapInfo( *reinterpret_cast( &rhs ) ) {} + + MemoryUnmapInfo & operator=( MemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryUnmapInfo & operator=( VkMemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryUnmapFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMemoryUnmapInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryUnmapInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, memory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryUnmapInfo const & ) const = default; +#else + bool operator==( MemoryUnmapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( memory == rhs.memory ); +# endif + } + + bool operator!=( MemoryUnmapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryUnmapInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryUnmapFlags flags = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + }; + + template <> + struct CppType + { + using Type = MemoryUnmapInfo; + }; + + using MemoryUnmapInfoKHR = MemoryUnmapInfo; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct MemoryWin32HandlePropertiesKHR + { + using NativeType = VkMemoryWin32HandlePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryTypeBits{ memoryTypeBits_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryWin32HandlePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryWin32HandlePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryTypeBits ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryWin32HandlePropertiesKHR const & ) const = default; +# else + bool operator==( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = MemoryWin32HandlePropertiesKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct MemoryZirconHandlePropertiesFUCHSIA + { + using NativeType = VkMemoryZirconHandlePropertiesFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryTypeBits{ memoryTypeBits_ } + { + } + + VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryZirconHandlePropertiesFUCHSIA( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryZirconHandlePropertiesFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + MemoryZirconHandlePropertiesFUCHSIA & operator=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MemoryZirconHandlePropertiesFUCHSIA & operator=( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMemoryZirconHandlePropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryZirconHandlePropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryTypeBits ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryZirconHandlePropertiesFUCHSIA const & ) const = default; +# else + bool operator==( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA; + void * pNext = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = MemoryZirconHandlePropertiesFUCHSIA; + }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + struct MetalSurfaceCreateInfoEXT + { + using NativeType = VkMetalSurfaceCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, + const CAMetalLayer * pLayer_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pLayer{ pLayer_ } + { + } + + VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MetalSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer * pLayer_ ) VULKAN_HPP_NOEXCEPT + { + pLayer = pLayer_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMetalSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, pLayer ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MetalSurfaceCreateInfoEXT const & ) const = default; +# else + bool operator==( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pLayer == rhs.pLayer ); +# endif + } + + bool operator!=( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {}; + const CAMetalLayer * pLayer = {}; + }; + + template <> + struct CppType + { + using Type = MetalSurfaceCreateInfoEXT; + }; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + struct MicromapBuildInfoEXT + { + using NativeType = VkMicromapBuildInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapBuildInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap, + VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT::eBuild, + VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_ = {}, + uint32_t usageCountsCount_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ = {}, + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , flags{ flags_ } + , mode{ mode_ } + , dstMicromap{ dstMicromap_ } + , usageCountsCount{ usageCountsCount_ } + , pUsageCounts{ pUsageCounts_ } + , ppUsageCounts{ ppUsageCounts_ } + , data{ data_ } + , scratchData{ scratchData_ } + , triangleArray{ triangleArray_ } + , triangleArrayStride{ triangleArrayStride_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapBuildInfoEXT( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MicromapBuildInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MicromapBuildInfoEXT( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_, + VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_, + VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , type( type_ ) + , flags( flags_ ) + , mode( mode_ ) + , dstMicromap( dstMicromap_ ) + , usageCountsCount( static_cast( !usageCounts_.empty() ? usageCounts_.size() : pUsageCounts_.size() ) ) + , pUsageCounts( usageCounts_.data() ) + , ppUsageCounts( pUsageCounts_.data() ) + , data( data_ ) + , scratchData( scratchData_ ) + , triangleArray( triangleArray_ ) + , triangleArrayStride( triangleArrayStride_ ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1 ); +# else + if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::MicromapBuildInfoEXT::MicromapBuildInfoEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + MicromapBuildInfoEXT & operator=( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MicromapBuildInfoEXT & operator=( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setType( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setMode( VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setDstMicromap( VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap_ ) VULKAN_HPP_NOEXCEPT + { + dstMicromap = dstMicromap_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = usageCountsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + pUsageCounts = pUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MicromapBuildInfoEXT & + setUsageCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & usageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( usageCounts_.size() ); + pUsageCounts = usageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPpUsageCounts( const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + ppUsageCounts = ppUsageCounts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MicromapBuildInfoEXT & setPUsageCounts( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT + { + usageCountsCount = static_cast( pUsageCounts_.size() ); + ppUsageCounts = pUsageCounts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT + { + scratchData = scratchData_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & + setTriangleArray( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & triangleArray_ ) VULKAN_HPP_NOEXCEPT + { + triangleArray = triangleArray_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setTriangleArrayStride( VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ ) VULKAN_HPP_NOEXCEPT + { + triangleArrayStride = triangleArrayStride_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMicromapBuildInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapBuildInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, type, flags, mode, dstMicromap, usageCountsCount, pUsageCounts, ppUsageCounts, data, scratchData, triangleArray, triangleArrayStride ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapBuildInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MicromapTypeEXT type = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap; + VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT mode = VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT::eBuild; + VULKAN_HPP_NAMESPACE::MicromapEXT dstMicromap = {}; + uint32_t usageCountsCount = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * pUsageCounts = {}; + const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray = {}; + VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride = {}; + }; + + template <> + struct CppType + { + using Type = MicromapBuildInfoEXT; + }; + + struct MicromapBuildSizesInfoEXT + { + using NativeType = VkMicromapBuildSizesInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapBuildSizesInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT( VULKAN_HPP_NAMESPACE::DeviceSize micromapSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 discardable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , micromapSize{ micromapSize_ } + , buildScratchSize{ buildScratchSize_ } + , discardable{ discardable_ } + { + } + + VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapBuildSizesInfoEXT( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MicromapBuildSizesInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MicromapBuildSizesInfoEXT & operator=( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MicromapBuildSizesInfoEXT & operator=( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setMicromapSize( VULKAN_HPP_NAMESPACE::DeviceSize micromapSize_ ) VULKAN_HPP_NOEXCEPT + { + micromapSize = micromapSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT + { + buildScratchSize = buildScratchSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setDiscardable( VULKAN_HPP_NAMESPACE::Bool32 discardable_ ) VULKAN_HPP_NOEXCEPT + { + discardable = discardable_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMicromapBuildSizesInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapBuildSizesInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, micromapSize, buildScratchSize, discardable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MicromapBuildSizesInfoEXT const & ) const = default; +#else + bool operator==( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( micromapSize == rhs.micromapSize ) && ( buildScratchSize == rhs.buildScratchSize ) && + ( discardable == rhs.discardable ); +# endif + } + + bool operator!=( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapBuildSizesInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize micromapSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 discardable = {}; + }; + + template <> + struct CppType + { + using Type = MicromapBuildSizesInfoEXT; + }; + + struct MicromapCreateInfoEXT + { + using NativeType = VkMicromapCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT( VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap, + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , createFlags{ createFlags_ } + , buffer{ buffer_ } + , offset{ offset_ } + , size{ size_ } + , type{ type_ } + , deviceAddress{ deviceAddress_ } + { + } + + VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapCreateInfoEXT( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MicromapCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MicromapCreateInfoEXT & operator=( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MicromapCreateInfoEXT & operator=( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setCreateFlags( VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags_ ) VULKAN_HPP_NOEXCEPT + { + createFlags = createFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setType( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMicromapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MicromapCreateInfoEXT const & ) const = default; +#else + bool operator==( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && + ( size == rhs.size ) && ( type == rhs.type ) && ( deviceAddress == rhs.deviceAddress ); +# endif + } + + bool operator!=( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT createFlags = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::MicromapTypeEXT type = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + }; + + template <> + struct CppType + { + using Type = MicromapCreateInfoEXT; + }; + + struct MicromapTriangleEXT + { + using NativeType = VkMicromapTriangleEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MicromapTriangleEXT( uint32_t dataOffset_ = {}, uint16_t subdivisionLevel_ = {}, uint16_t format_ = {} ) VULKAN_HPP_NOEXCEPT + : dataOffset{ dataOffset_ } + , subdivisionLevel{ subdivisionLevel_ } + , format{ format_ } + { + } + + VULKAN_HPP_CONSTEXPR MicromapTriangleEXT( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapTriangleEXT( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapTriangleEXT( *reinterpret_cast( &rhs ) ) + { + } + + MicromapTriangleEXT & operator=( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MicromapTriangleEXT & operator=( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setDataOffset( uint32_t dataOffset_ ) VULKAN_HPP_NOEXCEPT + { + dataOffset = dataOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setSubdivisionLevel( uint16_t subdivisionLevel_ ) VULKAN_HPP_NOEXCEPT + { + subdivisionLevel = subdivisionLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setFormat( uint16_t format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMicromapTriangleEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapTriangleEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( dataOffset, subdivisionLevel, format ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MicromapTriangleEXT const & ) const = default; +#else + bool operator==( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( dataOffset == rhs.dataOffset ) && ( subdivisionLevel == rhs.subdivisionLevel ) && ( format == rhs.format ); +# endif + } + + bool operator!=( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t dataOffset = {}; + uint16_t subdivisionLevel = {}; + uint16_t format = {}; + }; + + struct MicromapVersionInfoEXT + { + using NativeType = VkMicromapVersionInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapVersionInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT( const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pVersionData{ pVersionData_ } + { + } + + VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MicromapVersionInfoEXT( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MicromapVersionInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MicromapVersionInfoEXT & operator=( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MicromapVersionInfoEXT & operator=( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPVersionData( const uint8_t * pVersionData_ ) VULKAN_HPP_NOEXCEPT + { + pVersionData = pVersionData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMicromapVersionInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMicromapVersionInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pVersionData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MicromapVersionInfoEXT const & ) const = default; +#else + bool operator==( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVersionData == rhs.pVersionData ); +# endif + } + + bool operator!=( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMicromapVersionInfoEXT; + const void * pNext = {}; + const uint8_t * pVersionData = {}; + }; + + template <> + struct CppType + { + using Type = MicromapVersionInfoEXT; + }; + + struct MultiDrawIndexedInfoEXT + { + using NativeType = VkMultiDrawIndexedInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( uint32_t firstIndex_ = {}, uint32_t indexCount_ = {}, int32_t vertexOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : firstIndex{ firstIndex_ } + , indexCount{ indexCount_ } + , vertexOffset{ vertexOffset_ } + { + } + + VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiDrawIndexedInfoEXT( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MultiDrawIndexedInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MultiDrawIndexedInfoEXT & operator=( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MultiDrawIndexedInfoEXT & operator=( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT + { + firstIndex = firstIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT + { + indexCount = indexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT + { + vertexOffset = vertexOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMultiDrawIndexedInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultiDrawIndexedInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( firstIndex, indexCount, vertexOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultiDrawIndexedInfoEXT const & ) const = default; +#else + bool operator==( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( firstIndex == rhs.firstIndex ) && ( indexCount == rhs.indexCount ) && ( vertexOffset == rhs.vertexOffset ); +# endif + } + + bool operator!=( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t firstIndex = {}; + uint32_t indexCount = {}; + int32_t vertexOffset = {}; + }; + + struct MultiDrawInfoEXT + { + using NativeType = VkMultiDrawInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( uint32_t firstVertex_ = {}, uint32_t vertexCount_ = {} ) VULKAN_HPP_NOEXCEPT + : firstVertex{ firstVertex_ } + , vertexCount{ vertexCount_ } + { + } + + VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiDrawInfoEXT( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MultiDrawInfoEXT( *reinterpret_cast( &rhs ) ) {} + + MultiDrawInfoEXT & operator=( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MultiDrawInfoEXT & operator=( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + { + firstVertex = firstVertex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMultiDrawInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultiDrawInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( firstVertex, vertexCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultiDrawInfoEXT const & ) const = default; +#else + bool operator==( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( firstVertex == rhs.firstVertex ) && ( vertexCount == rhs.vertexCount ); +# endif + } + + bool operator!=( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t firstVertex = {}; + uint32_t vertexCount = {}; + }; + + struct MultisamplePropertiesEXT + { + using NativeType = VkMultisamplePropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxSampleLocationGridSize{ maxSampleLocationGridSize_ } + { + } + + VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MultisamplePropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxSampleLocationGridSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultisamplePropertiesEXT const & ) const = default; +#else + bool operator==( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); +# endif + } + + bool operator!=( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; + }; + + template <> + struct CppType + { + using Type = MultisamplePropertiesEXT; + }; + + struct MultisampledRenderToSingleSampledInfoEXT + { + using NativeType = VkMultisampledRenderToSingleSampledInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisampledRenderToSingleSampledInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + MultisampledRenderToSingleSampledInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , multisampledRenderToSingleSampledEnable{ multisampledRenderToSingleSampledEnable_ } + , rasterizationSamples{ rasterizationSamples_ } + { + } + + VULKAN_HPP_CONSTEXPR MultisampledRenderToSingleSampledInfoEXT( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultisampledRenderToSingleSampledInfoEXT( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MultisampledRenderToSingleSampledInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + MultisampledRenderToSingleSampledInfoEXT & operator=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MultisampledRenderToSingleSampledInfoEXT & operator=( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & + setMultisampledRenderToSingleSampledEnable( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable_ ) VULKAN_HPP_NOEXCEPT + { + multisampledRenderToSingleSampledEnable = multisampledRenderToSingleSampledEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & + setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationSamples = rasterizationSamples_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMultisampledRenderToSingleSampledInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultisampledRenderToSingleSampledInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multisampledRenderToSingleSampledEnable, rasterizationSamples ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultisampledRenderToSingleSampledInfoEXT const & ) const = default; +#else + bool operator==( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multisampledRenderToSingleSampledEnable == rhs.multisampledRenderToSingleSampledEnable ) && + ( rasterizationSamples == rhs.rasterizationSamples ); +# endif + } + + bool operator!=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisampledRenderToSingleSampledInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + }; + + template <> + struct CppType + { + using Type = MultisampledRenderToSingleSampledInfoEXT; + }; + + struct MultiviewPerViewAttributesInfoNVX + { + using NativeType = VkMultiviewPerViewAttributesInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultiviewPerViewAttributesInfoNVX; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , perViewAttributes{ perViewAttributes_ } + , perViewAttributesPositionXOnly{ perViewAttributesPositionXOnly_ } + { + } + + VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiviewPerViewAttributesInfoNVX( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : MultiviewPerViewAttributesInfoNVX( *reinterpret_cast( &rhs ) ) + { + } + + MultiviewPerViewAttributesInfoNVX & operator=( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MultiviewPerViewAttributesInfoNVX & operator=( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributes( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ ) VULKAN_HPP_NOEXCEPT + { + perViewAttributes = perViewAttributes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & + setPerViewAttributesPositionXOnly( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ ) VULKAN_HPP_NOEXCEPT + { + perViewAttributesPositionXOnly = perViewAttributesPositionXOnly_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMultiviewPerViewAttributesInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultiviewPerViewAttributesInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, perViewAttributes, perViewAttributesPositionXOnly ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultiviewPerViewAttributesInfoNVX const & ) const = default; +#else + bool operator==( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewAttributes == rhs.perViewAttributes ) && + ( perViewAttributesPositionXOnly == rhs.perViewAttributesPositionXOnly ); +# endif + } + + bool operator!=( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultiviewPerViewAttributesInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes = {}; + VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly = {}; + }; + + template <> + struct CppType + { + using Type = MultiviewPerViewAttributesInfoNVX; + }; + + struct MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM + { + using NativeType = VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( uint32_t perViewRenderAreaCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pPerViewRenderAreas_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , perViewRenderAreaCount{ perViewRenderAreaCount_ } + , pPerViewRenderAreas{ pPerViewRenderAreas_ } + { + } + + VULKAN_HPP_CONSTEXPR + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & perViewRenderAreas_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), perViewRenderAreaCount( static_cast( perViewRenderAreas_.size() ) ), pPerViewRenderAreas( perViewRenderAreas_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & + operator=( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & operator=( VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & + setPerViewRenderAreaCount( uint32_t perViewRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT + { + perViewRenderAreaCount = perViewRenderAreaCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & + setPPerViewRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D * pPerViewRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + pPerViewRenderAreas = pPerViewRenderAreas_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & + setPerViewRenderAreas( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & perViewRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + perViewRenderAreaCount = static_cast( perViewRenderAreas_.size() ); + pPerViewRenderAreas = perViewRenderAreas_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, perViewRenderAreaCount, pPerViewRenderAreas ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & ) const = default; +#else + bool operator==( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewRenderAreaCount == rhs.perViewRenderAreaCount ) && + ( pPerViewRenderAreas == rhs.pPerViewRenderAreas ); +# endif + } + + bool operator!=( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + const void * pNext = {}; + uint32_t perViewRenderAreaCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pPerViewRenderAreas = {}; + }; + + template <> + struct CppType + { + using Type = MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + }; + + struct MutableDescriptorTypeListEXT + { + using NativeType = VkMutableDescriptorTypeListEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT( uint32_t descriptorTypeCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : descriptorTypeCount{ descriptorTypeCount_ } + , pDescriptorTypes{ pDescriptorTypes_ } + { + } + + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeListEXT( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MutableDescriptorTypeListEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeListEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorTypes_ ) + : descriptorTypeCount( static_cast( descriptorTypes_.size() ) ), pDescriptorTypes( descriptorTypes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + MutableDescriptorTypeListEXT & operator=( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MutableDescriptorTypeListEXT & operator=( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT & setDescriptorTypeCount( uint32_t descriptorTypeCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorTypeCount = descriptorTypeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT & + setPDescriptorTypes( const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorTypes = pDescriptorTypes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeListEXT & setDescriptorTypes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorTypes_ ) VULKAN_HPP_NOEXCEPT + { + descriptorTypeCount = static_cast( descriptorTypes_.size() ); + pDescriptorTypes = descriptorTypes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMutableDescriptorTypeListEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMutableDescriptorTypeListEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( descriptorTypeCount, pDescriptorTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MutableDescriptorTypeListEXT const & ) const = default; +#else + bool operator==( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( descriptorTypeCount == rhs.descriptorTypeCount ) && ( pDescriptorTypes == rhs.pDescriptorTypes ); +# endif + } + + bool operator!=( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t descriptorTypeCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes = {}; + }; + + using MutableDescriptorTypeListVALVE = MutableDescriptorTypeListEXT; + + struct MutableDescriptorTypeCreateInfoEXT + { + using NativeType = VkMutableDescriptorTypeCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMutableDescriptorTypeCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT( uint32_t mutableDescriptorTypeListCount_ = {}, + const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mutableDescriptorTypeListCount{ mutableDescriptorTypeListCount_ } + , pMutableDescriptorTypeLists{ pMutableDescriptorTypeLists_ } + { + } + + VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MutableDescriptorTypeCreateInfoEXT( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MutableDescriptorTypeCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mutableDescriptorTypeLists_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , mutableDescriptorTypeListCount( static_cast( mutableDescriptorTypeLists_.size() ) ) + , pMutableDescriptorTypeLists( mutableDescriptorTypeLists_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + MutableDescriptorTypeCreateInfoEXT & operator=( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + MutableDescriptorTypeCreateInfoEXT & operator=( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & + setMutableDescriptorTypeListCount( uint32_t mutableDescriptorTypeListCount_ ) VULKAN_HPP_NOEXCEPT + { + mutableDescriptorTypeListCount = mutableDescriptorTypeListCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & + setPMutableDescriptorTypeLists( const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT + { + pMutableDescriptorTypeLists = pMutableDescriptorTypeLists_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + MutableDescriptorTypeCreateInfoEXT & setMutableDescriptorTypeLists( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & mutableDescriptorTypeLists_ ) + VULKAN_HPP_NOEXCEPT + { + mutableDescriptorTypeListCount = static_cast( mutableDescriptorTypeLists_.size() ); + pMutableDescriptorTypeLists = mutableDescriptorTypeLists_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkMutableDescriptorTypeCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMutableDescriptorTypeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mutableDescriptorTypeListCount, pMutableDescriptorTypeLists ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MutableDescriptorTypeCreateInfoEXT const & ) const = default; +#else + bool operator==( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorTypeListCount == rhs.mutableDescriptorTypeListCount ) && + ( pMutableDescriptorTypeLists == rhs.pMutableDescriptorTypeLists ); +# endif + } + + bool operator!=( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMutableDescriptorTypeCreateInfoEXT; + const void * pNext = {}; + uint32_t mutableDescriptorTypeListCount = {}; + const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists = {}; + }; + + template <> + struct CppType + { + using Type = MutableDescriptorTypeCreateInfoEXT; + }; + + using MutableDescriptorTypeCreateInfoVALVE = MutableDescriptorTypeCreateInfoEXT; + + struct OpaqueCaptureDescriptorDataCreateInfoEXT + { + using NativeType = VkOpaqueCaptureDescriptorDataCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpaqueCaptureDescriptorDataCreateInfoEXT( const void * opaqueCaptureDescriptorData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , opaqueCaptureDescriptorData{ opaqueCaptureDescriptorData_ } + { + } + + VULKAN_HPP_CONSTEXPR OpaqueCaptureDescriptorDataCreateInfoEXT( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpaqueCaptureDescriptorDataCreateInfoEXT( VkOpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : OpaqueCaptureDescriptorDataCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + OpaqueCaptureDescriptorDataCreateInfoEXT & operator=( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + OpaqueCaptureDescriptorDataCreateInfoEXT & operator=( VkOpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDescriptorDataCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDescriptorDataCreateInfoEXT & + setOpaqueCaptureDescriptorData( const void * opaqueCaptureDescriptorData_ ) VULKAN_HPP_NOEXCEPT + { + opaqueCaptureDescriptorData = opaqueCaptureDescriptorData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkOpaqueCaptureDescriptorDataCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpaqueCaptureDescriptorDataCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, opaqueCaptureDescriptorData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpaqueCaptureDescriptorDataCreateInfoEXT const & ) const = default; +#else + bool operator==( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureDescriptorData == rhs.opaqueCaptureDescriptorData ); +# endif + } + + bool operator!=( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT; + const void * pNext = {}; + const void * opaqueCaptureDescriptorData = {}; + }; + + template <> + struct CppType + { + using Type = OpaqueCaptureDescriptorDataCreateInfoEXT; + }; + + struct OpticalFlowExecuteInfoNV + { + using NativeType = VkOpticalFlowExecuteInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowExecuteInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_ = {}, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pRegions_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } + { + } + + VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpticalFlowExecuteInfoNV( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OpticalFlowExecuteInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + OpticalFlowExecuteInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), regionCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + OpticalFlowExecuteInfoNV & operator=( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + OpticalFlowExecuteInfoNV & operator=( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setFlags( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPRegions( const VULKAN_HPP_NAMESPACE::Rect2D * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + OpticalFlowExecuteInfoNV & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkOpticalFlowExecuteInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowExecuteInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpticalFlowExecuteInfoNV const & ) const = default; +#else + bool operator==( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowExecuteInfoNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV flags = {}; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = OpticalFlowExecuteInfoNV; + }; + + struct OpticalFlowImageFormatInfoNV + { + using NativeType = VkOpticalFlowImageFormatInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowImageFormatInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , usage{ usage_ } + { + } + + VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpticalFlowImageFormatInfoNV( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OpticalFlowImageFormatInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + OpticalFlowImageFormatInfoNV & operator=( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + OpticalFlowImageFormatInfoNV & operator=( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setUsage( VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkOpticalFlowImageFormatInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowImageFormatInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, usage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpticalFlowImageFormatInfoNV const & ) const = default; +#else + bool operator==( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); +# endif + } + + bool operator!=( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowImageFormatInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage = {}; + }; + + template <> + struct CppType + { + using Type = OpticalFlowImageFormatInfoNV; + }; + + struct OpticalFlowImageFormatPropertiesNV + { + using NativeType = VkOpticalFlowImageFormatPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowImageFormatPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , format{ format_ } + { + } + + VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpticalFlowImageFormatPropertiesNV( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OpticalFlowImageFormatPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + OpticalFlowImageFormatPropertiesNV & operator=( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + OpticalFlowImageFormatPropertiesNV & operator=( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkOpticalFlowImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, format ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpticalFlowImageFormatPropertiesNV const & ) const = default; +#else + bool operator==( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ); +# endif + } + + bool operator!=( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowImageFormatPropertiesNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = OpticalFlowImageFormatPropertiesNV; + }; + + struct OpticalFlowSessionCreateInfoNV + { + using NativeType = VkOpticalFlowSessionCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowSessionCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV( + uint32_t width_ = {}, + uint32_t height_ = {}, + VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format flowVectorFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format costFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize_ = {}, + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize_ = {}, + VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel_ = VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV::eUnknown, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , width{ width_ } + , height{ height_ } + , imageFormat{ imageFormat_ } + , flowVectorFormat{ flowVectorFormat_ } + , costFormat{ costFormat_ } + , outputGridSize{ outputGridSize_ } + , hintGridSize{ hintGridSize_ } + , performanceLevel{ performanceLevel_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpticalFlowSessionCreateInfoNV( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OpticalFlowSessionCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + OpticalFlowSessionCreateInfoNV & operator=( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + OpticalFlowSessionCreateInfoNV & operator=( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT + { + imageFormat = imageFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlowVectorFormat( VULKAN_HPP_NAMESPACE::Format flowVectorFormat_ ) VULKAN_HPP_NOEXCEPT + { + flowVectorFormat = flowVectorFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setCostFormat( VULKAN_HPP_NAMESPACE::Format costFormat_ ) VULKAN_HPP_NOEXCEPT + { + costFormat = costFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & + setOutputGridSize( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize_ ) VULKAN_HPP_NOEXCEPT + { + outputGridSize = outputGridSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & + setHintGridSize( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize_ ) VULKAN_HPP_NOEXCEPT + { + hintGridSize = hintGridSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & + setPerformanceLevel( VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel_ ) VULKAN_HPP_NOEXCEPT + { + performanceLevel = performanceLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkOpticalFlowSessionCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowSessionCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, width, height, imageFormat, flowVectorFormat, costFormat, outputGridSize, hintGridSize, performanceLevel, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpticalFlowSessionCreateInfoNV const & ) const = default; +#else + bool operator==( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( width == rhs.width ) && ( height == rhs.height ) && ( imageFormat == rhs.imageFormat ) && + ( flowVectorFormat == rhs.flowVectorFormat ) && ( costFormat == rhs.costFormat ) && ( outputGridSize == rhs.outputGridSize ) && + ( hintGridSize == rhs.hintGridSize ) && ( performanceLevel == rhs.performanceLevel ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowSessionCreateInfoNV; + void * pNext = {}; + uint32_t width = {}; + uint32_t height = {}; + VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format flowVectorFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format costFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV outputGridSize = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV hintGridSize = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel = VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV::eUnknown; + VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags = {}; + }; + + template <> + struct CppType + { + using Type = OpticalFlowSessionCreateInfoNV; + }; + + struct OpticalFlowSessionCreatePrivateDataInfoNV + { + using NativeType = VkOpticalFlowSessionCreatePrivateDataInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV( uint32_t id_ = {}, + uint32_t size_ = {}, + const void * pPrivateData_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , id{ id_ } + , size{ size_ } + , pPrivateData{ pPrivateData_ } + { + } + + VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OpticalFlowSessionCreatePrivateDataInfoNV( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OpticalFlowSessionCreatePrivateDataInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + OpticalFlowSessionCreatePrivateDataInfoNV & operator=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + OpticalFlowSessionCreatePrivateDataInfoNV & operator=( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setId( uint32_t id_ ) VULKAN_HPP_NOEXCEPT + { + id = id_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPPrivateData( const void * pPrivateData_ ) VULKAN_HPP_NOEXCEPT + { + pPrivateData = pPrivateData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkOpticalFlowSessionCreatePrivateDataInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOpticalFlowSessionCreatePrivateDataInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, id, size, pPrivateData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OpticalFlowSessionCreatePrivateDataInfoNV const & ) const = default; +#else + bool operator==( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( id == rhs.id ) && ( size == rhs.size ) && ( pPrivateData == rhs.pPrivateData ); +# endif + } + + bool operator!=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV; + void * pNext = {}; + uint32_t id = {}; + uint32_t size = {}; + const void * pPrivateData = {}; + }; + + template <> + struct CppType + { + using Type = OpticalFlowSessionCreatePrivateDataInfoNV; + }; + + struct OutOfBandQueueTypeInfoNV + { + using NativeType = VkOutOfBandQueueTypeInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOutOfBandQueueTypeInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR OutOfBandQueueTypeInfoNV( VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV queueType_ = VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV::eRender, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , queueType{ queueType_ } + { + } + + VULKAN_HPP_CONSTEXPR OutOfBandQueueTypeInfoNV( OutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + OutOfBandQueueTypeInfoNV( VkOutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : OutOfBandQueueTypeInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + OutOfBandQueueTypeInfoNV & operator=( OutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + OutOfBandQueueTypeInfoNV & operator=( VkOutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 OutOfBandQueueTypeInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 OutOfBandQueueTypeInfoNV & setQueueType( VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV queueType_ ) VULKAN_HPP_NOEXCEPT + { + queueType = queueType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkOutOfBandQueueTypeInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOutOfBandQueueTypeInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, queueType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( OutOfBandQueueTypeInfoNV const & ) const = default; +#else + bool operator==( OutOfBandQueueTypeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueType == rhs.queueType ); +# endif + } + + bool operator!=( OutOfBandQueueTypeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOutOfBandQueueTypeInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV queueType = VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV::eRender; + }; + + template <> + struct CppType + { + using Type = OutOfBandQueueTypeInfoNV; + }; + + struct PartitionedAccelerationStructureFlagsNV + { + using NativeType = VkPartitionedAccelerationStructureFlagsNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePartitionedAccelerationStructureFlagsNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureFlagsNV( VULKAN_HPP_NAMESPACE::Bool32 enablePartitionTranslation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , enablePartitionTranslation{ enablePartitionTranslation_ } + { + } + + VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureFlagsNV( PartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PartitionedAccelerationStructureFlagsNV( VkPartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PartitionedAccelerationStructureFlagsNV( *reinterpret_cast( &rhs ) ) + { + } + + PartitionedAccelerationStructureFlagsNV & operator=( PartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PartitionedAccelerationStructureFlagsNV & operator=( VkPartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureFlagsNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureFlagsNV & + setEnablePartitionTranslation( VULKAN_HPP_NAMESPACE::Bool32 enablePartitionTranslation_ ) VULKAN_HPP_NOEXCEPT + { + enablePartitionTranslation = enablePartitionTranslation_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPartitionedAccelerationStructureFlagsNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureFlagsNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, enablePartitionTranslation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PartitionedAccelerationStructureFlagsNV const & ) const = default; +#else + bool operator==( PartitionedAccelerationStructureFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( enablePartitionTranslation == rhs.enablePartitionTranslation ); +# endif + } + + bool operator!=( PartitionedAccelerationStructureFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePartitionedAccelerationStructureFlagsNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 enablePartitionTranslation = {}; + }; + + template <> + struct CppType + { + using Type = PartitionedAccelerationStructureFlagsNV; + }; + + struct PartitionedAccelerationStructureUpdateInstanceDataNV + { + using NativeType = VkPartitionedAccelerationStructureUpdateInstanceDataNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PartitionedAccelerationStructureUpdateInstanceDataNV( uint32_t instanceIndex_ = {}, + uint32_t instanceContributionToHitGroupIndex_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT + : instanceIndex{ instanceIndex_ } + , instanceContributionToHitGroupIndex{ instanceContributionToHitGroupIndex_ } + , accelerationStructure{ accelerationStructure_ } + { + } + + VULKAN_HPP_CONSTEXPR + PartitionedAccelerationStructureUpdateInstanceDataNV( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PartitionedAccelerationStructureUpdateInstanceDataNV( VkPartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PartitionedAccelerationStructureUpdateInstanceDataNV( *reinterpret_cast( &rhs ) ) + { + } + + PartitionedAccelerationStructureUpdateInstanceDataNV & + operator=( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PartitionedAccelerationStructureUpdateInstanceDataNV & operator=( VkPartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV & setInstanceIndex( uint32_t instanceIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceIndex = instanceIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV & + setInstanceContributionToHitGroupIndex( uint32_t instanceContributionToHitGroupIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceContributionToHitGroupIndex = instanceContributionToHitGroupIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPartitionedAccelerationStructureUpdateInstanceDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureUpdateInstanceDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( instanceIndex, instanceContributionToHitGroupIndex, accelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PartitionedAccelerationStructureUpdateInstanceDataNV const & ) const = default; +#else + bool operator==( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( instanceIndex == rhs.instanceIndex ) && ( instanceContributionToHitGroupIndex == rhs.instanceContributionToHitGroupIndex ) && + ( accelerationStructure == rhs.accelerationStructure ); +# endif + } + + bool operator!=( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t instanceIndex = {}; + uint32_t instanceContributionToHitGroupIndex = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure = {}; + }; + + struct PartitionedAccelerationStructureWriteInstanceDataNV + { + using NativeType = VkPartitionedAccelerationStructureWriteInstanceDataNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PartitionedAccelerationStructureWriteInstanceDataNV( VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, + std::array const & explicitAABB_ = {}, + uint32_t instanceID_ = {}, + uint32_t instanceMask_ = {}, + uint32_t instanceContributionToHitGroupIndex_ = {}, + VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstanceFlagsNV instanceFlags_ = {}, + uint32_t instanceIndex_ = {}, + uint32_t partitionIndex_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT + : transform{ transform_ } + , explicitAABB{ explicitAABB_ } + , instanceID{ instanceID_ } + , instanceMask{ instanceMask_ } + , instanceContributionToHitGroupIndex{ instanceContributionToHitGroupIndex_ } + , instanceFlags{ instanceFlags_ } + , instanceIndex{ instanceIndex_ } + , partitionIndex{ partitionIndex_ } + , accelerationStructure{ accelerationStructure_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + PartitionedAccelerationStructureWriteInstanceDataNV( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PartitionedAccelerationStructureWriteInstanceDataNV( VkPartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PartitionedAccelerationStructureWriteInstanceDataNV( *reinterpret_cast( &rhs ) ) + { + } + + PartitionedAccelerationStructureWriteInstanceDataNV & + operator=( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PartitionedAccelerationStructureWriteInstanceDataNV & operator=( VkPartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & + setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setExplicitAABB( std::array explicitAABB_ ) VULKAN_HPP_NOEXCEPT + { + explicitAABB = explicitAABB_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceID( uint32_t instanceID_ ) VULKAN_HPP_NOEXCEPT + { + instanceID = instanceID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceMask( uint32_t instanceMask_ ) VULKAN_HPP_NOEXCEPT + { + instanceMask = instanceMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & + setInstanceContributionToHitGroupIndex( uint32_t instanceContributionToHitGroupIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceContributionToHitGroupIndex = instanceContributionToHitGroupIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & + setInstanceFlags( VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstanceFlagsNV instanceFlags_ ) VULKAN_HPP_NOEXCEPT + { + instanceFlags = instanceFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceIndex( uint32_t instanceIndex_ ) VULKAN_HPP_NOEXCEPT + { + instanceIndex = instanceIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setPartitionIndex( uint32_t partitionIndex_ ) VULKAN_HPP_NOEXCEPT + { + partitionIndex = partitionIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPartitionedAccelerationStructureWriteInstanceDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureWriteInstanceDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstanceFlagsNV const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( transform, + explicitAABB, + instanceID, + instanceMask, + instanceContributionToHitGroupIndex, + instanceFlags, + instanceIndex, + partitionIndex, + accelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PartitionedAccelerationStructureWriteInstanceDataNV const & ) const = default; +#else + bool operator==( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( transform == rhs.transform ) && ( explicitAABB == rhs.explicitAABB ) && ( instanceID == rhs.instanceID ) && + ( instanceMask == rhs.instanceMask ) && ( instanceContributionToHitGroupIndex == rhs.instanceContributionToHitGroupIndex ) && + ( instanceFlags == rhs.instanceFlags ) && ( instanceIndex == rhs.instanceIndex ) && ( partitionIndex == rhs.partitionIndex ) && + ( accelerationStructure == rhs.accelerationStructure ); +# endif + } + + bool operator!=( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D explicitAABB = {}; + uint32_t instanceID = {}; + uint32_t instanceMask = {}; + uint32_t instanceContributionToHitGroupIndex = {}; + VULKAN_HPP_NAMESPACE::PartitionedAccelerationStructureInstanceFlagsNV instanceFlags = {}; + uint32_t instanceIndex = {}; + uint32_t partitionIndex = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress accelerationStructure = {}; + }; + + struct PartitionedAccelerationStructureWritePartitionTranslationDataNV + { + using NativeType = VkPartitionedAccelerationStructureWritePartitionTranslationDataNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PartitionedAccelerationStructureWritePartitionTranslationDataNV( uint32_t partitionIndex_ = {}, + std::array const & partitionTranslation_ = {} ) VULKAN_HPP_NOEXCEPT + : partitionIndex{ partitionIndex_ } + , partitionTranslation{ partitionTranslation_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV( + PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PartitionedAccelerationStructureWritePartitionTranslationDataNV( VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) + VULKAN_HPP_NOEXCEPT + : PartitionedAccelerationStructureWritePartitionTranslationDataNV( + *reinterpret_cast( &rhs ) ) + { + } + + PartitionedAccelerationStructureWritePartitionTranslationDataNV & + operator=( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PartitionedAccelerationStructureWritePartitionTranslationDataNV & + operator=( VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV & setPartitionIndex( uint32_t partitionIndex_ ) VULKAN_HPP_NOEXCEPT + { + partitionIndex = partitionIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV & + setPartitionTranslation( std::array partitionTranslation_ ) VULKAN_HPP_NOEXCEPT + { + partitionTranslation = partitionTranslation_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( partitionIndex, partitionTranslation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & ) const = default; +#else + bool operator==( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( partitionIndex == rhs.partitionIndex ) && ( partitionTranslation == rhs.partitionTranslation ); +# endif + } + + bool operator!=( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t partitionIndex = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D partitionTranslation = {}; + }; + + struct PastPresentationTimingGOOGLE + { + using NativeType = VkPastPresentationTimingGOOGLE; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( uint32_t presentID_ = {}, + uint64_t desiredPresentTime_ = {}, + uint64_t actualPresentTime_ = {}, + uint64_t earliestPresentTime_ = {}, + uint64_t presentMargin_ = {} ) VULKAN_HPP_NOEXCEPT + : presentID{ presentID_ } + , desiredPresentTime{ desiredPresentTime_ } + , actualPresentTime{ actualPresentTime_ } + , earliestPresentTime{ earliestPresentTime_ } + , presentMargin{ presentMargin_ } + { + } + + VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : PastPresentationTimingGOOGLE( *reinterpret_cast( &rhs ) ) + { + } + + PastPresentationTimingGOOGLE & operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPastPresentationTimingGOOGLE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( presentID, desiredPresentTime, actualPresentTime, earliestPresentTime, presentMargin ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PastPresentationTimingGOOGLE const & ) const = default; +#else + bool operator==( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ) && ( actualPresentTime == rhs.actualPresentTime ) && + ( earliestPresentTime == rhs.earliestPresentTime ) && ( presentMargin == rhs.presentMargin ); +# endif + } + + bool operator!=( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t presentID = {}; + uint64_t desiredPresentTime = {}; + uint64_t actualPresentTime = {}; + uint64_t earliestPresentTime = {}; + uint64_t presentMargin = {}; + }; + + struct PerformanceConfigurationAcquireInfoINTEL + { + using NativeType = VkPerformanceConfigurationAcquireInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PerformanceConfigurationAcquireInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + { + } + + VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceConfigurationAcquireInfoINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerformanceConfigurationAcquireInfoINTEL & operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & + setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPerformanceConfigurationAcquireInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ); +# endif + } + + bool operator!=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated; + }; + + template <> + struct CppType + { + using Type = PerformanceConfigurationAcquireInfoINTEL; + }; + + struct PerformanceCounterDescriptionKHR + { + using NativeType = VkPerformanceCounterDescriptionKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, + std::array const & name_ = {}, + std::array const & category_ = {}, + std::array const & description_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , name{ name_ } + , category{ category_ } + , description{ description_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceCounterDescriptionKHR( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, name, category, description ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( category, rhs.category ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( strcmp( name, rhs.name ) == 0 ) && + ( strcmp( category, rhs.category ) == 0 ) && ( strcmp( description, rhs.description ) == 0 ); + } + + bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D category = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceCounterDescriptionKHR; + }; + + struct PerformanceCounterKHR + { + using NativeType = VkPerformanceCounterKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PerformanceCounterKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, + std::array const & uuid_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , unit{ unit_ } + , scope{ scope_ } + , storage{ storage_ } + , uuid{ uuid_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceCounterKHR( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, unit, scope, storage, uuid ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceCounterKHR const & ) const = default; +#else + bool operator==( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( unit == rhs.unit ) && ( scope == rhs.scope ) && ( storage == rhs.storage ) && + ( uuid == rhs.uuid ); +# endif + } + + bool operator!=( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric; + VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer; + VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D uuid = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceCounterKHR; + }; + + union PerformanceCounterResultKHR + { + using NativeType = VkPerformanceCounterResultKHR; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int32_t int32_ = {} ) : int32( int32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int64_t int64_ ) : int64( int64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint32_t uint32_ ) : uint32( uint32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint64_t uint64_ ) : uint64( uint64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( float float32_ ) : float32( float32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( double float64_ ) : float64( float64_ ) {} +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT + { + int32 = int32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT + { + int64 = int64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT + { + uint32 = uint32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT + { + uint64 = uint64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT + { + float32 = float32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT + { + float64 = float64_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPerformanceCounterResultKHR const &() const + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceCounterResultKHR &() + { + return *reinterpret_cast( this ); + } + + int32_t int32; + int64_t int64; + uint32_t uint32; + uint64_t uint64; + float float32; + double float64; + }; + + struct PerformanceMarkerInfoINTEL + { + using NativeType = VkPerformanceMarkerInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , marker{ marker_ } + { + } + + VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceMarkerInfoINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, marker ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceMarkerInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); +# endif + } + + bool operator!=( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL; + const void * pNext = {}; + uint64_t marker = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceMarkerInfoINTEL; + }; + + struct PerformanceOverrideInfoINTEL + { + using NativeType = VkPerformanceOverrideInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, + VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, + uint64_t parameter_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , enable{ enable_ } + , parameter{ parameter_ } + { + } + + VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceOverrideInfoINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT + { + enable = enable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT + { + parameter = parameter_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, enable, parameter ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceOverrideInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( enable == rhs.enable ) && ( parameter == rhs.parameter ); +# endif + } + + bool operator!=( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware; + VULKAN_HPP_NAMESPACE::Bool32 enable = {}; + uint64_t parameter = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceOverrideInfoINTEL; + }; + + struct PerformanceQuerySubmitInfoKHR + { + using NativeType = VkPerformanceQuerySubmitInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , counterPassIndex{ counterPassIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceQuerySubmitInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT + { + counterPassIndex = counterPassIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, counterPassIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceQuerySubmitInfoKHR const & ) const = default; +#else + bool operator==( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( counterPassIndex == rhs.counterPassIndex ); +# endif + } + + bool operator!=( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR; + const void * pNext = {}; + uint32_t counterPassIndex = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceQuerySubmitInfoKHR; + }; + + struct PerformanceStreamMarkerInfoINTEL + { + using NativeType = VkPerformanceStreamMarkerInfoINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , marker{ marker_ } + { + } + + VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceStreamMarkerInfoINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, marker ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PerformanceStreamMarkerInfoINTEL const & ) const = default; +#else + bool operator==( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); +# endif + } + + bool operator!=( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL; + const void * pNext = {}; + uint32_t marker = {}; + }; + + template <> + struct CppType + { + using Type = PerformanceStreamMarkerInfoINTEL; + }; + + union PerformanceValueDataINTEL + { + using NativeType = VkPerformanceValueDataINTEL; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint32_t value32_ = {} ) : value32( value32_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint64_t value64_ ) : value64( value64_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( float valueFloat_ ) : valueFloat( valueFloat_ ) {} + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( const char * valueString_ ) : valueString( valueString_ ) {} +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT + { + value32 = value32_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT + { + value64 = value64_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT + { + valueFloat = valueFloat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT + { + valueBool = valueBool_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueString( const char * valueString_ ) VULKAN_HPP_NOEXCEPT + { + valueString = valueString_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPerformanceValueDataINTEL const &() const + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceValueDataINTEL &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + uint32_t value32; + uint64_t value64; + float valueFloat; + VULKAN_HPP_NAMESPACE::Bool32 valueBool; + const char * valueString; +#else + uint32_t value32; + uint64_t value64; + float valueFloat; + VkBool32 valueBool; + const char * valueString; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct PerformanceValueINTEL + { + using NativeType = VkPerformanceValueINTEL; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PerformanceValueINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT + : type{ type_ } + , data{ data_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PerformanceValueINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( type, data ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32; + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {}; + }; + + struct PhysicalDevice16BitStorageFeatures + { + using NativeType = VkPhysicalDevice16BitStorageFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice16BitStorageFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , storageBuffer16BitAccess{ storageBuffer16BitAccess_ } + , uniformAndStorageBuffer16BitAccess{ uniformAndStorageBuffer16BitAccess_ } + , storagePushConstant16{ storagePushConstant16_ } + , storageInputOutput16{ storageInputOutput16_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice16BitStorageFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevice16BitStorageFeatures & operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & + setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer16BitAccess = storageBuffer16BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & + setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & + setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant16 = storagePushConstant16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & + setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT + { + storageInputOutput16 = storageInputOutput16_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, storageBuffer16BitAccess, uniformAndStorageBuffer16BitAccess, storagePushConstant16, storageInputOutput16 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice16BitStorageFeatures const & ) const = default; +#else + bool operator==( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) && + ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && ( storagePushConstant16 == rhs.storagePushConstant16 ) && + ( storageInputOutput16 == rhs.storageInputOutput16 ); +# endif + } + + bool operator!=( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevice16BitStorageFeatures; + }; + + using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; + + struct PhysicalDevice4444FormatsFeaturesEXT + { + using NativeType = VkPhysicalDevice4444FormatsFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , formatA4R4G4B4{ formatA4R4G4B4_ } + , formatA4B4G4R4{ formatA4B4G4R4_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice4444FormatsFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevice4444FormatsFeaturesEXT & operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevice4444FormatsFeaturesEXT & operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT + { + formatA4R4G4B4 = formatA4R4G4B4_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT + { + formatA4B4G4R4 = formatA4B4G4R4_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevice4444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, formatA4R4G4B4, formatA4B4G4R4 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 ) && ( formatA4B4G4R4 == rhs.formatA4B4G4R4 ); +# endif + } + + bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4 = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevice4444FormatsFeaturesEXT; + }; + + struct PhysicalDevice8BitStorageFeatures + { + using NativeType = VkPhysicalDevice8BitStorageFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice8BitStorageFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , storageBuffer8BitAccess{ storageBuffer8BitAccess_ } + , uniformAndStorageBuffer8BitAccess{ uniformAndStorageBuffer8BitAccess_ } + , storagePushConstant8{ storagePushConstant8_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevice8BitStorageFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevice8BitStorageFeatures & operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & + setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer8BitAccess = storageBuffer8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & + setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & + setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant8 = storagePushConstant8_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, storageBuffer8BitAccess, uniformAndStorageBuffer8BitAccess, storagePushConstant8 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevice8BitStorageFeatures const & ) const = default; +#else + bool operator==( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) && + ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && ( storagePushConstant8 == rhs.storagePushConstant8 ); +# endif + } + + bool operator!=( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevice8BitStorageFeatures; + }; + + using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures; + + struct PhysicalDeviceASTCDecodeFeaturesEXT + { + using NativeType = VkPhysicalDeviceASTCDecodeFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , decodeModeSharedExponent{ decodeModeSharedExponent_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceASTCDecodeFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & + setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT + { + decodeModeSharedExponent = decodeModeSharedExponent_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, decodeModeSharedExponent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent ); +# endif + } + + bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceASTCDecodeFeaturesEXT; + }; + + struct PhysicalDeviceAccelerationStructureFeaturesKHR + { + using NativeType = VkPhysicalDeviceAccelerationStructureFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceAccelerationStructureFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , accelerationStructure{ accelerationStructure_ } + , accelerationStructureCaptureReplay{ accelerationStructureCaptureReplay_ } + , accelerationStructureIndirectBuild{ accelerationStructureIndirectBuild_ } + , accelerationStructureHostCommands{ accelerationStructureHostCommands_ } + , descriptorBindingAccelerationStructureUpdateAfterBind{ descriptorBindingAccelerationStructureUpdateAfterBind_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceAccelerationStructureFeaturesKHR( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructureFeaturesKHR( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAccelerationStructureFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & + setAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & + setAccelerationStructureCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCaptureReplay = accelerationStructureCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & + setAccelerationStructureIndirectBuild( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureIndirectBuild = accelerationStructureIndirectBuild_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & + setAccelerationStructureHostCommands( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureHostCommands = accelerationStructureHostCommands_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setDescriptorBindingAccelerationStructureUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingAccelerationStructureUpdateAfterBind = descriptorBindingAccelerationStructureUpdateAfterBind_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAccelerationStructureFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + accelerationStructure, + accelerationStructureCaptureReplay, + accelerationStructureIndirectBuild, + accelerationStructureHostCommands, + descriptorBindingAccelerationStructureUpdateAfterBind ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAccelerationStructureFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) && + ( accelerationStructureCaptureReplay == rhs.accelerationStructureCaptureReplay ) && + ( accelerationStructureIndirectBuild == rhs.accelerationStructureIndirectBuild ) && + ( accelerationStructureHostCommands == rhs.accelerationStructureHostCommands ) && + ( descriptorBindingAccelerationStructureUpdateAfterBind == rhs.descriptorBindingAccelerationStructureUpdateAfterBind ); +# endif + } + + bool operator!=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild = {}; + VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAccelerationStructureFeaturesKHR; + }; + + struct PhysicalDeviceAccelerationStructurePropertiesKHR + { + using NativeType = VkPhysicalDeviceAccelerationStructurePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( uint64_t maxGeometryCount_ = {}, + uint64_t maxInstanceCount_ = {}, + uint64_t maxPrimitiveCount_ = {}, + uint32_t maxPerStageDescriptorAccelerationStructures_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ = {}, + uint32_t maxDescriptorSetAccelerationStructures_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_ = {}, + uint32_t minAccelerationStructureScratchOffsetAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxGeometryCount{ maxGeometryCount_ } + , maxInstanceCount{ maxInstanceCount_ } + , maxPrimitiveCount{ maxPrimitiveCount_ } + , maxPerStageDescriptorAccelerationStructures{ maxPerStageDescriptorAccelerationStructures_ } + , maxPerStageDescriptorUpdateAfterBindAccelerationStructures{ maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ } + , maxDescriptorSetAccelerationStructures{ maxDescriptorSetAccelerationStructures_ } + , maxDescriptorSetUpdateAfterBindAccelerationStructures{ maxDescriptorSetUpdateAfterBindAccelerationStructures_ } + , minAccelerationStructureScratchOffsetAlignment{ minAccelerationStructureScratchOffsetAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceAccelerationStructurePropertiesKHR( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAccelerationStructurePropertiesKHR( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAccelerationStructurePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAccelerationStructurePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxGeometryCount, + maxInstanceCount, + maxPrimitiveCount, + maxPerStageDescriptorAccelerationStructures, + maxPerStageDescriptorUpdateAfterBindAccelerationStructures, + maxDescriptorSetAccelerationStructures, + maxDescriptorSetUpdateAfterBindAccelerationStructures, + minAccelerationStructureScratchOffsetAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAccelerationStructurePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGeometryCount == rhs.maxGeometryCount ) && ( maxInstanceCount == rhs.maxInstanceCount ) && + ( maxPrimitiveCount == rhs.maxPrimitiveCount ) && + ( maxPerStageDescriptorAccelerationStructures == rhs.maxPerStageDescriptorAccelerationStructures ) && + ( maxPerStageDescriptorUpdateAfterBindAccelerationStructures == rhs.maxPerStageDescriptorUpdateAfterBindAccelerationStructures ) && + ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ) && + ( maxDescriptorSetUpdateAfterBindAccelerationStructures == rhs.maxDescriptorSetUpdateAfterBindAccelerationStructures ) && + ( minAccelerationStructureScratchOffsetAlignment == rhs.minAccelerationStructureScratchOffsetAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; + void * pNext = {}; + uint64_t maxGeometryCount = {}; + uint64_t maxInstanceCount = {}; + uint64_t maxPrimitiveCount = {}; + uint32_t maxPerStageDescriptorAccelerationStructures = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures = {}; + uint32_t maxDescriptorSetAccelerationStructures = {}; + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures = {}; + uint32_t minAccelerationStructureScratchOffsetAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAccelerationStructurePropertiesKHR; + }; + + struct PhysicalDeviceAddressBindingReportFeaturesEXT + { + using NativeType = VkPhysicalDeviceAddressBindingReportFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAddressBindingReportFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , reportAddressBinding{ reportAddressBinding_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceAddressBindingReportFeaturesEXT( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAddressBindingReportFeaturesEXT( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAddressBindingReportFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT & + setReportAddressBinding( VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding_ ) VULKAN_HPP_NOEXCEPT + { + reportAddressBinding = reportAddressBinding_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceAddressBindingReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAddressBindingReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, reportAddressBinding ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAddressBindingReportFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reportAddressBinding == rhs.reportAddressBinding ); +# endif + } + + bool operator!=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAddressBindingReportFeaturesEXT; + }; + + struct PhysicalDeviceAmigoProfilingFeaturesSEC + { + using NativeType = VkPhysicalDeviceAmigoProfilingFeaturesSEC; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC( VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , amigoProfiling{ amigoProfiling_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAmigoProfilingFeaturesSEC( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAmigoProfilingFeaturesSEC( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setAmigoProfiling( VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling_ ) VULKAN_HPP_NOEXCEPT + { + amigoProfiling = amigoProfiling_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceAmigoProfilingFeaturesSEC const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAmigoProfilingFeaturesSEC &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, amigoProfiling ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAmigoProfilingFeaturesSEC const & ) const = default; +#else + bool operator==( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( amigoProfiling == rhs.amigoProfiling ); +# endif + } + + bool operator!=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAmigoProfilingFeaturesSEC; + }; + + struct PhysicalDeviceAntiLagFeaturesAMD + { + using NativeType = VkPhysicalDeviceAntiLagFeaturesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAntiLagFeaturesAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAntiLagFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 antiLag_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , antiLag{ antiLag_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceAntiLagFeaturesAMD( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAntiLagFeaturesAMD( VkPhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAntiLagFeaturesAMD( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAntiLagFeaturesAMD & operator=( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceAntiLagFeaturesAMD & operator=( VkPhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAntiLagFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAntiLagFeaturesAMD & setAntiLag( VULKAN_HPP_NAMESPACE::Bool32 antiLag_ ) VULKAN_HPP_NOEXCEPT + { + antiLag = antiLag_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceAntiLagFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAntiLagFeaturesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, antiLag ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAntiLagFeaturesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( antiLag == rhs.antiLag ); +# endif + } + + bool operator!=( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAntiLagFeaturesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 antiLag = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAntiLagFeaturesAMD; + }; + + struct PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT + { + using NativeType = VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopDynamicState_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , attachmentFeedbackLoopDynamicState{ attachmentFeedbackLoopDynamicState_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & + operator=( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & + operator=( VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & + setAttachmentFeedbackLoopDynamicState( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + attachmentFeedbackLoopDynamicState = attachmentFeedbackLoopDynamicState_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachmentFeedbackLoopDynamicState ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentFeedbackLoopDynamicState == rhs.attachmentFeedbackLoopDynamicState ); +# endif + } + + bool operator!=( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopDynamicState = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + }; + + struct PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT + { + using NativeType = VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , attachmentFeedbackLoopLayout{ attachmentFeedbackLoopLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & + operator=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & operator=( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & + setAttachmentFeedbackLoopLayout( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout_ ) VULKAN_HPP_NOEXCEPT + { + attachmentFeedbackLoopLayout = attachmentFeedbackLoopLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, attachmentFeedbackLoopLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentFeedbackLoopLayout == rhs.attachmentFeedbackLoopLayout ); +# endif + } + + bool operator!=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + }; + + struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT + { + using NativeType = VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , advancedBlendCoherentOperations{ advancedBlendCoherentOperations_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceBlendOperationAdvancedFeaturesEXT( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBlendOperationAdvancedFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & + setAdvancedBlendCoherentOperations( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT + { + advancedBlendCoherentOperations = advancedBlendCoherentOperations_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, advancedBlendCoherentOperations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations ); +# endif + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT; + }; + + struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT + { + using NativeType = VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( uint32_t advancedBlendMaxColorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , advancedBlendMaxColorAttachments{ advancedBlendMaxColorAttachments_ } + , advancedBlendIndependentBlend{ advancedBlendIndependentBlend_ } + , advancedBlendNonPremultipliedSrcColor{ advancedBlendNonPremultipliedSrcColor_ } + , advancedBlendNonPremultipliedDstColor{ advancedBlendNonPremultipliedDstColor_ } + , advancedBlendCorrelatedOverlap{ advancedBlendCorrelatedOverlap_ } + , advancedBlendAllOperations{ advancedBlendAllOperations_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceBlendOperationAdvancedPropertiesEXT( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBlendOperationAdvancedPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT & + operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + advancedBlendMaxColorAttachments, + advancedBlendIndependentBlend, + advancedBlendNonPremultipliedSrcColor, + advancedBlendNonPremultipliedDstColor, + advancedBlendCorrelatedOverlap, + advancedBlendAllOperations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) && + ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) && + ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) && + ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) && + ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations ); +# endif + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; + void * pNext = {}; + uint32_t advancedBlendMaxColorAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT; + }; + + struct PhysicalDeviceBorderColorSwizzleFeaturesEXT + { + using NativeType = VkPhysicalDeviceBorderColorSwizzleFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , borderColorSwizzle{ borderColorSwizzle_ } + , borderColorSwizzleFromImage{ borderColorSwizzleFromImage_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBorderColorSwizzleFeaturesEXT( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBorderColorSwizzleFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & + setBorderColorSwizzle( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ ) VULKAN_HPP_NOEXCEPT + { + borderColorSwizzle = borderColorSwizzle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & + setBorderColorSwizzleFromImage( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ ) VULKAN_HPP_NOEXCEPT + { + borderColorSwizzleFromImage = borderColorSwizzleFromImage_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, borderColorSwizzle, borderColorSwizzleFromImage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( borderColorSwizzle == rhs.borderColorSwizzle ) && + ( borderColorSwizzleFromImage == rhs.borderColorSwizzleFromImage ); +# endif + } + + bool operator!=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle = {}; + VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceBorderColorSwizzleFeaturesEXT; + }; + + struct PhysicalDeviceBufferDeviceAddressFeatures + { + using NativeType = VkPhysicalDeviceBufferDeviceAddressFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , bufferDeviceAddress{ bufferDeviceAddress_ } + , bufferDeviceAddressCaptureReplay{ bufferDeviceAddressCaptureReplay_ } + , bufferDeviceAddressMultiDevice{ bufferDeviceAddressMultiDevice_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBufferDeviceAddressFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceBufferDeviceAddressFeatures & operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceBufferDeviceAddressFeatures & operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddress = bufferDeviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & + setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & + setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); +# endif + } + + bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceBufferDeviceAddressFeatures; + }; + + using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures; + + struct PhysicalDeviceBufferDeviceAddressFeaturesEXT + { + using NativeType = VkPhysicalDeviceBufferDeviceAddressFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , bufferDeviceAddress{ bufferDeviceAddress_ } + , bufferDeviceAddressCaptureReplay{ bufferDeviceAddressCaptureReplay_ } + , bufferDeviceAddressMultiDevice{ bufferDeviceAddressMultiDevice_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceBufferDeviceAddressFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & + setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddress = bufferDeviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & + setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & + setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); +# endif + } + + bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + }; + + using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + + struct PhysicalDeviceClusterAccelerationStructureFeaturesNV + { + using NativeType = VkPhysicalDeviceClusterAccelerationStructureFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterAccelerationStructureFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterAccelerationStructureFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 clusterAccelerationStructure_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , clusterAccelerationStructure{ clusterAccelerationStructure_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceClusterAccelerationStructureFeaturesNV( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceClusterAccelerationStructureFeaturesNV( VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceClusterAccelerationStructureFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceClusterAccelerationStructureFeaturesNV & + operator=( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceClusterAccelerationStructureFeaturesNV & operator=( VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterAccelerationStructureFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterAccelerationStructureFeaturesNV & + setClusterAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 clusterAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + clusterAccelerationStructure = clusterAccelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, clusterAccelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( clusterAccelerationStructure == rhs.clusterAccelerationStructure ); +# endif + } + + bool operator!=( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterAccelerationStructureFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 clusterAccelerationStructure = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceClusterAccelerationStructureFeaturesNV; + }; + + struct PhysicalDeviceClusterAccelerationStructurePropertiesNV + { + using NativeType = VkPhysicalDeviceClusterAccelerationStructurePropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterAccelerationStructurePropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterAccelerationStructurePropertiesNV( uint32_t maxVerticesPerCluster_ = {}, + uint32_t maxTrianglesPerCluster_ = {}, + uint32_t clusterScratchByteAlignment_ = {}, + uint32_t clusterByteAlignment_ = {}, + uint32_t clusterTemplateByteAlignment_ = {}, + uint32_t clusterBottomLevelByteAlignment_ = {}, + uint32_t clusterTemplateBoundsByteAlignment_ = {}, + uint32_t maxClusterGeometryIndex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxVerticesPerCluster{ maxVerticesPerCluster_ } + , maxTrianglesPerCluster{ maxTrianglesPerCluster_ } + , clusterScratchByteAlignment{ clusterScratchByteAlignment_ } + , clusterByteAlignment{ clusterByteAlignment_ } + , clusterTemplateByteAlignment{ clusterTemplateByteAlignment_ } + , clusterBottomLevelByteAlignment{ clusterBottomLevelByteAlignment_ } + , clusterTemplateBoundsByteAlignment{ clusterTemplateBoundsByteAlignment_ } + , maxClusterGeometryIndex{ maxClusterGeometryIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterAccelerationStructurePropertiesNV( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceClusterAccelerationStructurePropertiesNV( VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceClusterAccelerationStructurePropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceClusterAccelerationStructurePropertiesNV & + operator=( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceClusterAccelerationStructurePropertiesNV & + operator=( VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxVerticesPerCluster, + maxTrianglesPerCluster, + clusterScratchByteAlignment, + clusterByteAlignment, + clusterTemplateByteAlignment, + clusterBottomLevelByteAlignment, + clusterTemplateBoundsByteAlignment, + maxClusterGeometryIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVerticesPerCluster == rhs.maxVerticesPerCluster ) && + ( maxTrianglesPerCluster == rhs.maxTrianglesPerCluster ) && ( clusterScratchByteAlignment == rhs.clusterScratchByteAlignment ) && + ( clusterByteAlignment == rhs.clusterByteAlignment ) && ( clusterTemplateByteAlignment == rhs.clusterTemplateByteAlignment ) && + ( clusterBottomLevelByteAlignment == rhs.clusterBottomLevelByteAlignment ) && + ( clusterTemplateBoundsByteAlignment == rhs.clusterTemplateBoundsByteAlignment ) && ( maxClusterGeometryIndex == rhs.maxClusterGeometryIndex ); +# endif + } + + bool operator!=( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterAccelerationStructurePropertiesNV; + void * pNext = {}; + uint32_t maxVerticesPerCluster = {}; + uint32_t maxTrianglesPerCluster = {}; + uint32_t clusterScratchByteAlignment = {}; + uint32_t clusterByteAlignment = {}; + uint32_t clusterTemplateByteAlignment = {}; + uint32_t clusterBottomLevelByteAlignment = {}; + uint32_t clusterTemplateBoundsByteAlignment = {}; + uint32_t maxClusterGeometryIndex = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceClusterAccelerationStructurePropertiesNV; + }; + + struct PhysicalDeviceClusterCullingShaderFeaturesHUAWEI + { + using NativeType = VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 clustercullingShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewClusterCullingShader_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , clustercullingShader{ clustercullingShader_ } + , multiviewClusterCullingShader{ multiviewClusterCullingShader_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & operator=( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & + setClustercullingShader( VULKAN_HPP_NAMESPACE::Bool32 clustercullingShader_ ) VULKAN_HPP_NOEXCEPT + { + clustercullingShader = clustercullingShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & + setMultiviewClusterCullingShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewClusterCullingShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewClusterCullingShader = multiviewClusterCullingShader_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, clustercullingShader, multiviewClusterCullingShader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( clustercullingShader == rhs.clustercullingShader ) && + ( multiviewClusterCullingShader == rhs.multiviewClusterCullingShader ); +# endif + } + + bool operator!=( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 clustercullingShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewClusterCullingShader = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + }; + + struct PhysicalDeviceClusterCullingShaderPropertiesHUAWEI + { + using NativeType = VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( std::array const & maxWorkGroupCount_ = {}, + std::array const & maxWorkGroupSize_ = {}, + uint32_t maxOutputClusterCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indirectBufferOffsetAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxWorkGroupCount{ maxWorkGroupCount_ } + , maxWorkGroupSize{ maxWorkGroupSize_ } + , maxOutputClusterCount{ maxOutputClusterCount_ } + , indirectBufferOffsetAlignment{ indirectBufferOffsetAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceClusterCullingShaderPropertiesHUAWEI & + operator=( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceClusterCullingShaderPropertiesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxWorkGroupCount, maxWorkGroupSize, maxOutputClusterCount, indirectBufferOffsetAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxWorkGroupCount == rhs.maxWorkGroupCount ) && + ( maxWorkGroupSize == rhs.maxWorkGroupSize ) && ( maxOutputClusterCount == rhs.maxOutputClusterCount ) && + ( indirectBufferOffsetAlignment == rhs.indirectBufferOffsetAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxWorkGroupCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxWorkGroupSize = {}; + uint32_t maxOutputClusterCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indirectBufferOffsetAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + }; + + struct PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI + { + using NativeType = VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 clusterShadingRate_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , clusterShadingRate{ clusterShadingRate_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & + operator=( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & + setClusterShadingRate( VULKAN_HPP_NAMESPACE::Bool32 clusterShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + clusterShadingRate = clusterShadingRate_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, clusterShadingRate ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( clusterShadingRate == rhs.clusterShadingRate ); +# endif + } + + bool operator!=( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 clusterShadingRate = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + }; + + struct PhysicalDeviceCoherentMemoryFeaturesAMD + { + using NativeType = VkPhysicalDeviceCoherentMemoryFeaturesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceCoherentMemory{ deviceCoherentMemory_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCoherentMemoryFeaturesAMD( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & + setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT + { + deviceCoherentMemory = deviceCoherentMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceCoherentMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceCoherentMemory == rhs.deviceCoherentMemory ); +# endif + } + + bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCoherentMemoryFeaturesAMD; + }; + + struct PhysicalDeviceColorWriteEnableFeaturesEXT + { + using NativeType = VkPhysicalDeviceColorWriteEnableFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , colorWriteEnable{ colorWriteEnable_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceColorWriteEnableFeaturesEXT( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceColorWriteEnableFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & + setColorWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ ) VULKAN_HPP_NOEXCEPT + { + colorWriteEnable = colorWriteEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceColorWriteEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, colorWriteEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceColorWriteEnableFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorWriteEnable == rhs.colorWriteEnable ); +# endif + } + + bool operator!=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceColorWriteEnableFeaturesEXT; + }; + + struct PhysicalDeviceCommandBufferInheritanceFeaturesNV + { + using NativeType = VkPhysicalDeviceCommandBufferInheritanceFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCommandBufferInheritanceFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 commandBufferInheritance_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , commandBufferInheritance{ commandBufferInheritance_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceCommandBufferInheritanceFeaturesNV( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCommandBufferInheritanceFeaturesNV( VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCommandBufferInheritanceFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCommandBufferInheritanceFeaturesNV & operator=( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCommandBufferInheritanceFeaturesNV & operator=( VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCommandBufferInheritanceFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCommandBufferInheritanceFeaturesNV & + setCommandBufferInheritance( VULKAN_HPP_NAMESPACE::Bool32 commandBufferInheritance_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferInheritance = commandBufferInheritance_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, commandBufferInheritance ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBufferInheritance == rhs.commandBufferInheritance ); +# endif + } + + bool operator!=( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 commandBufferInheritance = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCommandBufferInheritanceFeaturesNV; + }; + + struct PhysicalDeviceComputeShaderDerivativesFeaturesKHR + { + using NativeType = VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , computeDerivativeGroupQuads{ computeDerivativeGroupQuads_ } + , computeDerivativeGroupLinear{ computeDerivativeGroupLinear_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceComputeShaderDerivativesFeaturesKHR( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceComputeShaderDerivativesFeaturesKHR( VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceComputeShaderDerivativesFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceComputeShaderDerivativesFeaturesKHR & + operator=( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceComputeShaderDerivativesFeaturesKHR & operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR & + setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT + { + computeDerivativeGroupQuads = computeDerivativeGroupQuads_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR & + setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT + { + computeDerivativeGroupLinear = computeDerivativeGroupLinear_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, computeDerivativeGroupQuads, computeDerivativeGroupLinear ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) && + ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear ); +# endif + } + + bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceComputeShaderDerivativesFeaturesKHR; + }; + + using PhysicalDeviceComputeShaderDerivativesFeaturesNV = PhysicalDeviceComputeShaderDerivativesFeaturesKHR; + + struct PhysicalDeviceComputeShaderDerivativesPropertiesKHR + { + using NativeType = VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 meshAndTaskShaderDerivatives_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , meshAndTaskShaderDerivatives{ meshAndTaskShaderDerivatives_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceComputeShaderDerivativesPropertiesKHR( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceComputeShaderDerivativesPropertiesKHR( VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceComputeShaderDerivativesPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceComputeShaderDerivativesPropertiesKHR & + operator=( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceComputeShaderDerivativesPropertiesKHR & operator=( VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, meshAndTaskShaderDerivatives ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( meshAndTaskShaderDerivatives == rhs.meshAndTaskShaderDerivatives ); +# endif + } + + bool operator!=( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 meshAndTaskShaderDerivatives = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceComputeShaderDerivativesPropertiesKHR; + }; + + struct PhysicalDeviceConditionalRenderingFeaturesEXT + { + using NativeType = VkPhysicalDeviceConditionalRenderingFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , conditionalRendering{ conditionalRendering_ } + , inheritedConditionalRendering{ inheritedConditionalRendering_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceConditionalRenderingFeaturesEXT( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceConditionalRenderingFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & + setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT + { + conditionalRendering = conditionalRendering_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & + setInheritedConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT + { + inheritedConditionalRendering = inheritedConditionalRendering_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, conditionalRendering, inheritedConditionalRendering ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRendering == rhs.conditionalRendering ) && + ( inheritedConditionalRendering == rhs.inheritedConditionalRendering ); +# endif + } + + bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceConditionalRenderingFeaturesEXT; + }; + + struct PhysicalDeviceConservativeRasterizationPropertiesEXT + { + using NativeType = VkPhysicalDeviceConservativeRasterizationPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = {}, + float maxExtraPrimitiveOverestimationSize_ = {}, + float extraPrimitiveOverestimationSizeGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , primitiveOverestimationSize{ primitiveOverestimationSize_ } + , maxExtraPrimitiveOverestimationSize{ maxExtraPrimitiveOverestimationSize_ } + , extraPrimitiveOverestimationSizeGranularity{ extraPrimitiveOverestimationSizeGranularity_ } + , primitiveUnderestimation{ primitiveUnderestimation_ } + , conservativePointAndLineRasterization{ conservativePointAndLineRasterization_ } + , degenerateTrianglesRasterized{ degenerateTrianglesRasterized_ } + , degenerateLinesRasterized{ degenerateLinesRasterized_ } + , fullyCoveredFragmentShaderInputVariable{ fullyCoveredFragmentShaderInputVariable_ } + , conservativeRasterizationPostDepthCoverage{ conservativeRasterizationPostDepthCoverage_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceConservativeRasterizationPropertiesEXT( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceConservativeRasterizationPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT & + operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + primitiveOverestimationSize, + maxExtraPrimitiveOverestimationSize, + extraPrimitiveOverestimationSizeGranularity, + primitiveUnderestimation, + conservativePointAndLineRasterization, + degenerateTrianglesRasterized, + degenerateLinesRasterized, + fullyCoveredFragmentShaderInputVariable, + conservativeRasterizationPostDepthCoverage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) && + ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) && + ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) && + ( primitiveUnderestimation == rhs.primitiveUnderestimation ) && + ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) && + ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) && + ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) && + ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage ); +# endif + } + + bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + void * pNext = {}; + float primitiveOverestimationSize = {}; + float maxExtraPrimitiveOverestimationSize = {}; + float extraPrimitiveOverestimationSizeGranularity = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {}; + VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {}; + VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {}; + VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {}; + VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT; + }; + + struct PhysicalDeviceCooperativeMatrix2FeaturesNV + { + using NativeType = VkPhysicalDeviceCooperativeMatrix2FeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2FeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixWorkgroupScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixFlexibleDimensions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixReductions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixConversions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixPerElementOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixTensorAddressing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixBlockLoads_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeMatrixWorkgroupScope{ cooperativeMatrixWorkgroupScope_ } + , cooperativeMatrixFlexibleDimensions{ cooperativeMatrixFlexibleDimensions_ } + , cooperativeMatrixReductions{ cooperativeMatrixReductions_ } + , cooperativeMatrixConversions{ cooperativeMatrixConversions_ } + , cooperativeMatrixPerElementOperations{ cooperativeMatrixPerElementOperations_ } + , cooperativeMatrixTensorAddressing{ cooperativeMatrixTensorAddressing_ } + , cooperativeMatrixBlockLoads{ cooperativeMatrixBlockLoads_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2FeaturesNV( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrix2FeaturesNV( VkPhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrix2FeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrix2FeaturesNV & operator=( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrix2FeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixWorkgroupScope( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixWorkgroupScope_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixWorkgroupScope = cooperativeMatrixWorkgroupScope_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixFlexibleDimensions( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixFlexibleDimensions_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixFlexibleDimensions = cooperativeMatrixFlexibleDimensions_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixReductions( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixReductions_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixReductions = cooperativeMatrixReductions_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixConversions( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixConversions_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixConversions = cooperativeMatrixConversions_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixPerElementOperations( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixPerElementOperations_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixPerElementOperations = cooperativeMatrixPerElementOperations_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixTensorAddressing( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixTensorAddressing_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixTensorAddressing = cooperativeMatrixTensorAddressing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixBlockLoads( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixBlockLoads_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixBlockLoads = cooperativeMatrixBlockLoads_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + cooperativeMatrixWorkgroupScope, + cooperativeMatrixFlexibleDimensions, + cooperativeMatrixReductions, + cooperativeMatrixConversions, + cooperativeMatrixPerElementOperations, + cooperativeMatrixTensorAddressing, + cooperativeMatrixBlockLoads ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrix2FeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixWorkgroupScope == rhs.cooperativeMatrixWorkgroupScope ) && + ( cooperativeMatrixFlexibleDimensions == rhs.cooperativeMatrixFlexibleDimensions ) && + ( cooperativeMatrixReductions == rhs.cooperativeMatrixReductions ) && ( cooperativeMatrixConversions == rhs.cooperativeMatrixConversions ) && + ( cooperativeMatrixPerElementOperations == rhs.cooperativeMatrixPerElementOperations ) && + ( cooperativeMatrixTensorAddressing == rhs.cooperativeMatrixTensorAddressing ) && + ( cooperativeMatrixBlockLoads == rhs.cooperativeMatrixBlockLoads ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixWorkgroupScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixFlexibleDimensions = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixReductions = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixConversions = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixPerElementOperations = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixTensorAddressing = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixBlockLoads = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrix2FeaturesNV; + }; + + struct PhysicalDeviceCooperativeMatrix2PropertiesNV + { + using NativeType = VkPhysicalDeviceCooperativeMatrix2PropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2PropertiesNV( uint32_t cooperativeMatrixWorkgroupScopeMaxWorkgroupSize_ = {}, + uint32_t cooperativeMatrixFlexibleDimensionsMaxDimension_ = {}, + uint32_t cooperativeMatrixWorkgroupScopeReservedSharedMemory_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeMatrixWorkgroupScopeMaxWorkgroupSize{ cooperativeMatrixWorkgroupScopeMaxWorkgroupSize_ } + , cooperativeMatrixFlexibleDimensionsMaxDimension{ cooperativeMatrixFlexibleDimensionsMaxDimension_ } + , cooperativeMatrixWorkgroupScopeReservedSharedMemory{ cooperativeMatrixWorkgroupScopeReservedSharedMemory_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2PropertiesNV( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrix2PropertiesNV( VkPhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrix2PropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrix2PropertiesNV & operator=( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrix2PropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + cooperativeMatrixWorkgroupScopeMaxWorkgroupSize, + cooperativeMatrixFlexibleDimensionsMaxDimension, + cooperativeMatrixWorkgroupScopeReservedSharedMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrix2PropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( cooperativeMatrixWorkgroupScopeMaxWorkgroupSize == rhs.cooperativeMatrixWorkgroupScopeMaxWorkgroupSize ) && + ( cooperativeMatrixFlexibleDimensionsMaxDimension == rhs.cooperativeMatrixFlexibleDimensionsMaxDimension ) && + ( cooperativeMatrixWorkgroupScopeReservedSharedMemory == rhs.cooperativeMatrixWorkgroupScopeReservedSharedMemory ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV; + void * pNext = {}; + uint32_t cooperativeMatrixWorkgroupScopeMaxWorkgroupSize = {}; + uint32_t cooperativeMatrixFlexibleDimensionsMaxDimension = {}; + uint32_t cooperativeMatrixWorkgroupScopeReservedSharedMemory = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrix2PropertiesNV; + }; + + struct PhysicalDeviceCooperativeMatrixFeaturesKHR + { + using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeMatrix{ cooperativeMatrix_ } + , cooperativeMatrixRobustBufferAccess{ cooperativeMatrixRobustBufferAccess_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesKHR( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixFeaturesKHR( VkPhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrixFeaturesKHR & operator=( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrixFeaturesKHR & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR & + setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrix = cooperativeMatrix_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR & + setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) && + ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixFeaturesKHR; + }; + + struct PhysicalDeviceCooperativeMatrixFeaturesNV + { + using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeMatrix{ cooperativeMatrix_ } + , cooperativeMatrixRobustBufferAccess{ cooperativeMatrixRobustBufferAccess_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & + setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrix = cooperativeMatrix_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & + setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) && + ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixFeaturesNV; + }; + + struct PhysicalDeviceCooperativeMatrixPropertiesKHR + { + using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeMatrixSupportedStages{ cooperativeMatrixSupportedStages_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesKHR( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrixPropertiesKHR & operator=( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrixPropertiesKHR & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cooperativeMatrixSupportedStages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixPropertiesKHR; + }; + + struct PhysicalDeviceCooperativeMatrixPropertiesNV + { + using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeMatrixSupportedStages{ cooperativeMatrixSupportedStages_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrixPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cooperativeMatrixSupportedStages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrixPropertiesNV; + }; + + struct PhysicalDeviceCooperativeVectorFeaturesNV + { + using NativeType = VkPhysicalDeviceCooperativeVectorFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeVectorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeVector_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTraining_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeVector{ cooperativeVector_ } + , cooperativeVectorTraining{ cooperativeVectorTraining_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorFeaturesNV( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeVectorFeaturesNV( VkPhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeVectorFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeVectorFeaturesNV & operator=( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeVectorFeaturesNV & operator=( VkPhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV & + setCooperativeVector( VULKAN_HPP_NAMESPACE::Bool32 cooperativeVector_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeVector = cooperativeVector_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV & + setCooperativeVectorTraining( VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTraining_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeVectorTraining = cooperativeVectorTraining_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCooperativeVectorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeVectorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cooperativeVector, cooperativeVectorTraining ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeVectorFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeVector == rhs.cooperativeVector ) && + ( cooperativeVectorTraining == rhs.cooperativeVectorTraining ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeVectorFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeVector = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTraining = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeVectorFeaturesNV; + }; + + struct PhysicalDeviceCooperativeVectorPropertiesNV + { + using NativeType = VkPhysicalDeviceCooperativeVectorPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeVectorPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorPropertiesNV( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeVectorSupportedStages_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTrainingFloat16Accumulation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTrainingFloat32Accumulation_ = {}, + uint32_t maxCooperativeVectorComponents_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeVectorSupportedStages{ cooperativeVectorSupportedStages_ } + , cooperativeVectorTrainingFloat16Accumulation{ cooperativeVectorTrainingFloat16Accumulation_ } + , cooperativeVectorTrainingFloat32Accumulation{ cooperativeVectorTrainingFloat32Accumulation_ } + , maxCooperativeVectorComponents{ maxCooperativeVectorComponents_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorPropertiesNV( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeVectorPropertiesNV( VkPhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeVectorPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeVectorPropertiesNV & operator=( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeVectorPropertiesNV & operator=( VkPhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCooperativeVectorPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeVectorPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + cooperativeVectorSupportedStages, + cooperativeVectorTrainingFloat16Accumulation, + cooperativeVectorTrainingFloat32Accumulation, + maxCooperativeVectorComponents ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeVectorPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeVectorSupportedStages == rhs.cooperativeVectorSupportedStages ) && + ( cooperativeVectorTrainingFloat16Accumulation == rhs.cooperativeVectorTrainingFloat16Accumulation ) && + ( cooperativeVectorTrainingFloat32Accumulation == rhs.cooperativeVectorTrainingFloat32Accumulation ) && + ( maxCooperativeVectorComponents == rhs.maxCooperativeVectorComponents ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeVectorPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeVectorSupportedStages = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTrainingFloat16Accumulation = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeVectorTrainingFloat32Accumulation = {}; + uint32_t maxCooperativeVectorComponents = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeVectorPropertiesNV; + }; + + struct PhysicalDeviceCopyMemoryIndirectFeaturesNV + { + using NativeType = VkPhysicalDeviceCopyMemoryIndirectFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 indirectCopy_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , indirectCopy{ indirectCopy_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesNV( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCopyMemoryIndirectFeaturesNV( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCopyMemoryIndirectFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCopyMemoryIndirectFeaturesNV & operator=( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCopyMemoryIndirectFeaturesNV & operator=( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV & setIndirectCopy( VULKAN_HPP_NAMESPACE::Bool32 indirectCopy_ ) VULKAN_HPP_NOEXCEPT + { + indirectCopy = indirectCopy_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, indirectCopy ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indirectCopy == rhs.indirectCopy ); +# endif + } + + bool operator!=( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 indirectCopy = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCopyMemoryIndirectFeaturesNV; + }; + + struct PhysicalDeviceCopyMemoryIndirectPropertiesNV + { + using NativeType = VkPhysicalDeviceCopyMemoryIndirectPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectPropertiesNV( VULKAN_HPP_NAMESPACE::QueueFlags supportedQueues_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , supportedQueues{ supportedQueues_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectPropertiesNV( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCopyMemoryIndirectPropertiesNV( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCopyMemoryIndirectPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCopyMemoryIndirectPropertiesNV & operator=( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCopyMemoryIndirectPropertiesNV & operator=( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCopyMemoryIndirectPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCopyMemoryIndirectPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportedQueues ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedQueues == rhs.supportedQueues ); +# endif + } + + bool operator!=( PhysicalDeviceCopyMemoryIndirectPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueueFlags supportedQueues = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCopyMemoryIndirectPropertiesNV; + }; + + struct PhysicalDeviceCornerSampledImageFeaturesNV + { + using NativeType = VkPhysicalDeviceCornerSampledImageFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cornerSampledImage{ cornerSampledImage_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCornerSampledImageFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & + setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT + { + cornerSampledImage = cornerSampledImage_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCornerSampledImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cornerSampledImage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cornerSampledImage == rhs.cornerSampledImage ); +# endif + } + + bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCornerSampledImageFeaturesNV; + }; + + struct PhysicalDeviceCoverageReductionModeFeaturesNV + { + using NativeType = VkPhysicalDeviceCoverageReductionModeFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , coverageReductionMode{ coverageReductionMode_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceCoverageReductionModeFeaturesNV( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCoverageReductionModeFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & + setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + { + coverageReductionMode = coverageReductionMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, coverageReductionMode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode ); +# endif + } + + bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCoverageReductionModeFeaturesNV; + }; + + struct PhysicalDeviceCubicClampFeaturesQCOM + { + using NativeType = VkPhysicalDeviceCubicClampFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCubicClampFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicClampFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 cubicRangeClamp_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cubicRangeClamp{ cubicRangeClamp_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicClampFeaturesQCOM( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCubicClampFeaturesQCOM( VkPhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCubicClampFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCubicClampFeaturesQCOM & operator=( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCubicClampFeaturesQCOM & operator=( VkPhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicClampFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicClampFeaturesQCOM & setCubicRangeClamp( VULKAN_HPP_NAMESPACE::Bool32 cubicRangeClamp_ ) VULKAN_HPP_NOEXCEPT + { + cubicRangeClamp = cubicRangeClamp_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCubicClampFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCubicClampFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cubicRangeClamp ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCubicClampFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cubicRangeClamp == rhs.cubicRangeClamp ); +# endif + } + + bool operator!=( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCubicClampFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cubicRangeClamp = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCubicClampFeaturesQCOM; + }; + + struct PhysicalDeviceCubicWeightsFeaturesQCOM + { + using NativeType = VkPhysicalDeviceCubicWeightsFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCubicWeightsFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicWeightsFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 selectableCubicWeights_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , selectableCubicWeights{ selectableCubicWeights_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicWeightsFeaturesQCOM( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCubicWeightsFeaturesQCOM( VkPhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCubicWeightsFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCubicWeightsFeaturesQCOM & operator=( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCubicWeightsFeaturesQCOM & operator=( VkPhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicWeightsFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicWeightsFeaturesQCOM & + setSelectableCubicWeights( VULKAN_HPP_NAMESPACE::Bool32 selectableCubicWeights_ ) VULKAN_HPP_NOEXCEPT + { + selectableCubicWeights = selectableCubicWeights_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCubicWeightsFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCubicWeightsFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, selectableCubicWeights ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCubicWeightsFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( selectableCubicWeights == rhs.selectableCubicWeights ); +# endif + } + + bool operator!=( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCubicWeightsFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 selectableCubicWeights = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCubicWeightsFeaturesQCOM; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDeviceCudaKernelLaunchFeaturesNV + { + using NativeType = VkPhysicalDeviceCudaKernelLaunchFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cudaKernelLaunchFeatures_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cudaKernelLaunchFeatures{ cudaKernelLaunchFeatures_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchFeaturesNV( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCudaKernelLaunchFeaturesNV( VkPhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCudaKernelLaunchFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCudaKernelLaunchFeaturesNV & operator=( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCudaKernelLaunchFeaturesNV & operator=( VkPhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCudaKernelLaunchFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCudaKernelLaunchFeaturesNV & + setCudaKernelLaunchFeatures( VULKAN_HPP_NAMESPACE::Bool32 cudaKernelLaunchFeatures_ ) VULKAN_HPP_NOEXCEPT + { + cudaKernelLaunchFeatures = cudaKernelLaunchFeatures_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, cudaKernelLaunchFeatures ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCudaKernelLaunchFeaturesNV const & ) const = default; +# else + bool operator==( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cudaKernelLaunchFeatures == rhs.cudaKernelLaunchFeatures ); +# endif + } + + bool operator!=( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cudaKernelLaunchFeatures = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCudaKernelLaunchFeaturesNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDeviceCudaKernelLaunchPropertiesNV + { + using NativeType = VkPhysicalDeviceCudaKernelLaunchPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchPropertiesNV( uint32_t computeCapabilityMinor_ = {}, + uint32_t computeCapabilityMajor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , computeCapabilityMinor{ computeCapabilityMinor_ } + , computeCapabilityMajor{ computeCapabilityMajor_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchPropertiesNV( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCudaKernelLaunchPropertiesNV( VkPhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCudaKernelLaunchPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCudaKernelLaunchPropertiesNV & operator=( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCudaKernelLaunchPropertiesNV & operator=( VkPhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, computeCapabilityMinor, computeCapabilityMajor ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCudaKernelLaunchPropertiesNV const & ) const = default; +# else + bool operator==( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( computeCapabilityMinor == rhs.computeCapabilityMinor ) && + ( computeCapabilityMajor == rhs.computeCapabilityMajor ); +# endif + } + + bool operator!=( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV; + void * pNext = {}; + uint32_t computeCapabilityMinor = {}; + uint32_t computeCapabilityMajor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCudaKernelLaunchPropertiesNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct PhysicalDeviceCustomBorderColorFeaturesEXT + { + using NativeType = VkPhysicalDeviceCustomBorderColorFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , customBorderColors{ customBorderColors_ } + , customBorderColorWithoutFormat{ customBorderColorWithoutFormat_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCustomBorderColorFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & + setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColors = customBorderColors_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & + setCustomBorderColorWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + { + customBorderColorWithoutFormat = customBorderColorWithoutFormat_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, customBorderColors, customBorderColorWithoutFormat ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( customBorderColors == rhs.customBorderColors ) && + ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat ); +# endif + } + + bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {}; + VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCustomBorderColorFeaturesEXT; + }; + + struct PhysicalDeviceCustomBorderColorPropertiesEXT + { + using NativeType = VkPhysicalDeviceCustomBorderColorPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( uint32_t maxCustomBorderColorSamplers_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxCustomBorderColorSamplers{ maxCustomBorderColorSamplers_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCustomBorderColorPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxCustomBorderColorSamplers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers ); +# endif + } + + bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; + void * pNext = {}; + uint32_t maxCustomBorderColorSamplers = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCustomBorderColorPropertiesEXT; + }; + + struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV + { + using NativeType = VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dedicatedAllocationImageAliasing{ dedicatedAllocationImageAliasing_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & + setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT + { + dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dedicatedAllocationImageAliasing ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing ); +# endif + } + + bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + }; + + struct PhysicalDeviceDepthBiasControlFeaturesEXT + { + using NativeType = VkPhysicalDeviceDepthBiasControlFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthBiasControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthBiasControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 leastRepresentableValueForceUnormRepresentation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 floatRepresentation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthBiasControl{ depthBiasControl_ } + , leastRepresentableValueForceUnormRepresentation{ leastRepresentableValueForceUnormRepresentation_ } + , floatRepresentation{ floatRepresentation_ } + , depthBiasExact{ depthBiasExact_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthBiasControlFeaturesEXT( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthBiasControlFeaturesEXT( VkPhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthBiasControlFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthBiasControlFeaturesEXT & operator=( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDepthBiasControlFeaturesEXT & operator=( VkPhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & + setDepthBiasControl( VULKAN_HPP_NAMESPACE::Bool32 depthBiasControl_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasControl = depthBiasControl_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & + setLeastRepresentableValueForceUnormRepresentation( VULKAN_HPP_NAMESPACE::Bool32 leastRepresentableValueForceUnormRepresentation_ ) VULKAN_HPP_NOEXCEPT + { + leastRepresentableValueForceUnormRepresentation = leastRepresentableValueForceUnormRepresentation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & + setFloatRepresentation( VULKAN_HPP_NAMESPACE::Bool32 floatRepresentation_ ) VULKAN_HPP_NOEXCEPT + { + floatRepresentation = floatRepresentation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setDepthBiasExact( VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasExact = depthBiasExact_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDepthBiasControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthBiasControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthBiasControl, leastRepresentableValueForceUnormRepresentation, floatRepresentation, depthBiasExact ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthBiasControlFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthBiasControl == rhs.depthBiasControl ) && + ( leastRepresentableValueForceUnormRepresentation == rhs.leastRepresentableValueForceUnormRepresentation ) && + ( floatRepresentation == rhs.floatRepresentation ) && ( depthBiasExact == rhs.depthBiasExact ); +# endif + } + + bool operator!=( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthBiasControlFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 leastRepresentableValueForceUnormRepresentation = {}; + VULKAN_HPP_NAMESPACE::Bool32 floatRepresentation = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthBiasControlFeaturesEXT; + }; + + struct PhysicalDeviceDepthClampControlFeaturesEXT + { + using NativeType = VkPhysicalDeviceDepthClampControlFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampControl_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthClampControl{ depthClampControl_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampControlFeaturesEXT( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClampControlFeaturesEXT( VkPhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthClampControlFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthClampControlFeaturesEXT & operator=( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDepthClampControlFeaturesEXT & operator=( VkPhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampControlFeaturesEXT & + setDepthClampControl( VULKAN_HPP_NAMESPACE::Bool32 depthClampControl_ ) VULKAN_HPP_NOEXCEPT + { + depthClampControl = depthClampControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDepthClampControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClampControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthClampControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthClampControlFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClampControl == rhs.depthClampControl ); +# endif + } + + bool operator!=( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClampControl = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthClampControlFeaturesEXT; + }; + + struct PhysicalDeviceDepthClampZeroOneFeaturesKHR + { + using NativeType = VkPhysicalDeviceDepthClampZeroOneFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthClampZeroOne{ depthClampZeroOne_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesKHR( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClampZeroOneFeaturesKHR( VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthClampZeroOneFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthClampZeroOneFeaturesKHR & operator=( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDepthClampZeroOneFeaturesKHR & operator=( VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesKHR & + setDepthClampZeroOne( VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne_ ) VULKAN_HPP_NOEXCEPT + { + depthClampZeroOne = depthClampZeroOne_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthClampZeroOne ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClampZeroOne == rhs.depthClampZeroOne ); +# endif + } + + bool operator!=( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthClampZeroOneFeaturesKHR; + }; + + using PhysicalDeviceDepthClampZeroOneFeaturesEXT = PhysicalDeviceDepthClampZeroOneFeaturesKHR; + + struct PhysicalDeviceDepthClipControlFeaturesEXT + { + using NativeType = VkPhysicalDeviceDepthClipControlFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthClipControl{ depthClipControl_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClipControlFeaturesEXT( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthClipControlFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthClipControlFeaturesEXT & operator=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDepthClipControlFeaturesEXT & operator=( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & + setDepthClipControl( VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ ) VULKAN_HPP_NOEXCEPT + { + depthClipControl = depthClipControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDepthClipControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClipControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthClipControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthClipControlFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipControl == rhs.depthClipControl ); +# endif + } + + bool operator!=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipControl = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthClipControlFeaturesEXT; + }; + + struct PhysicalDeviceDepthClipEnableFeaturesEXT + { + using NativeType = VkPhysicalDeviceDepthClipEnableFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthClipEnable{ depthClipEnable_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthClipEnableFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClipEnable = depthClipEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthClipEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipEnable == rhs.depthClipEnable ); +# endif + } + + bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthClipEnableFeaturesEXT; + }; + + struct PhysicalDeviceDepthStencilResolveProperties + { + using NativeType = VkPhysicalDeviceDepthStencilResolveProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , supportedDepthResolveModes{ supportedDepthResolveModes_ } + , supportedStencilResolveModes{ supportedStencilResolveModes_ } + , independentResolveNone{ independentResolveNone_ } + , independentResolve{ independentResolve_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthStencilResolveProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthStencilResolveProperties & operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDepthStencilResolveProperties & operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportedDepthResolveModes, supportedStencilResolveModes, independentResolveNone, independentResolve ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && + ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && ( independentResolveNone == rhs.independentResolveNone ) && + ( independentResolve == rhs.independentResolve ); +# endif + } + + bool operator!=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthStencilResolveProperties; + }; + + using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties; + + struct PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT + { + using NativeType = VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( size_t combinedImageSamplerDensityMapDescriptorSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , combinedImageSamplerDensityMapDescriptorSize{ combinedImageSamplerDensityMapDescriptorSize_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT & + operator=( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT & operator=( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, combinedImageSamplerDensityMapDescriptorSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( combinedImageSamplerDensityMapDescriptorSize == rhs.combinedImageSamplerDensityMapDescriptorSize ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + void * pNext = {}; + size_t combinedImageSamplerDensityMapDescriptorSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + }; + + struct PhysicalDeviceDescriptorBufferFeaturesEXT + { + using NativeType = VkPhysicalDeviceDescriptorBufferFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 descriptorBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferImageLayoutIgnored_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferPushDescriptors_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorBuffer{ descriptorBuffer_ } + , descriptorBufferCaptureReplay{ descriptorBufferCaptureReplay_ } + , descriptorBufferImageLayoutIgnored{ descriptorBufferImageLayoutIgnored_ } + , descriptorBufferPushDescriptors{ descriptorBufferPushDescriptors_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferFeaturesEXT( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorBufferFeaturesEXT( VkPhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorBufferFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorBufferFeaturesEXT & operator=( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorBufferFeaturesEXT & operator=( VkPhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & + setDescriptorBuffer( VULKAN_HPP_NAMESPACE::Bool32 descriptorBuffer_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBuffer = descriptorBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & + setDescriptorBufferCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBufferCaptureReplay = descriptorBufferCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & + setDescriptorBufferImageLayoutIgnored( VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferImageLayoutIgnored_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBufferImageLayoutIgnored = descriptorBufferImageLayoutIgnored_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & + setDescriptorBufferPushDescriptors( VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferPushDescriptors_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBufferPushDescriptors = descriptorBufferPushDescriptors_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDescriptorBufferFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorBufferFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorBuffer, descriptorBufferCaptureReplay, descriptorBufferImageLayoutIgnored, descriptorBufferPushDescriptors ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorBufferFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorBuffer == rhs.descriptorBuffer ) && + ( descriptorBufferCaptureReplay == rhs.descriptorBufferCaptureReplay ) && + ( descriptorBufferImageLayoutIgnored == rhs.descriptorBufferImageLayoutIgnored ) && + ( descriptorBufferPushDescriptors == rhs.descriptorBufferPushDescriptors ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferImageLayoutIgnored = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferPushDescriptors = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorBufferFeaturesEXT; + }; + + struct PhysicalDeviceDescriptorBufferPropertiesEXT + { + using NativeType = VkPhysicalDeviceDescriptorBufferPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 combinedImageSamplerDescriptorSingleArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferlessPushDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 allowSamplerImageViewPostSubmitCreation_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferOffsetAlignment_ = {}, + uint32_t maxDescriptorBufferBindings_ = {}, + uint32_t maxResourceDescriptorBufferBindings_ = {}, + uint32_t maxSamplerDescriptorBufferBindings_ = {}, + uint32_t maxEmbeddedImmutableSamplerBindings_ = {}, + uint32_t maxEmbeddedImmutableSamplers_ = {}, + size_t bufferCaptureReplayDescriptorDataSize_ = {}, + size_t imageCaptureReplayDescriptorDataSize_ = {}, + size_t imageViewCaptureReplayDescriptorDataSize_ = {}, + size_t samplerCaptureReplayDescriptorDataSize_ = {}, + size_t accelerationStructureCaptureReplayDescriptorDataSize_ = {}, + size_t samplerDescriptorSize_ = {}, + size_t combinedImageSamplerDescriptorSize_ = {}, + size_t sampledImageDescriptorSize_ = {}, + size_t storageImageDescriptorSize_ = {}, + size_t uniformTexelBufferDescriptorSize_ = {}, + size_t robustUniformTexelBufferDescriptorSize_ = {}, + size_t storageTexelBufferDescriptorSize_ = {}, + size_t robustStorageTexelBufferDescriptorSize_ = {}, + size_t uniformBufferDescriptorSize_ = {}, + size_t robustUniformBufferDescriptorSize_ = {}, + size_t storageBufferDescriptorSize_ = {}, + size_t robustStorageBufferDescriptorSize_ = {}, + size_t inputAttachmentDescriptorSize_ = {}, + size_t accelerationStructureDescriptorSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxSamplerDescriptorBufferRange_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceDescriptorBufferRange_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize samplerDescriptorBufferAddressSpaceSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize resourceDescriptorBufferAddressSpaceSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferAddressSpaceSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , combinedImageSamplerDescriptorSingleArray{ combinedImageSamplerDescriptorSingleArray_ } + , bufferlessPushDescriptors{ bufferlessPushDescriptors_ } + , allowSamplerImageViewPostSubmitCreation{ allowSamplerImageViewPostSubmitCreation_ } + , descriptorBufferOffsetAlignment{ descriptorBufferOffsetAlignment_ } + , maxDescriptorBufferBindings{ maxDescriptorBufferBindings_ } + , maxResourceDescriptorBufferBindings{ maxResourceDescriptorBufferBindings_ } + , maxSamplerDescriptorBufferBindings{ maxSamplerDescriptorBufferBindings_ } + , maxEmbeddedImmutableSamplerBindings{ maxEmbeddedImmutableSamplerBindings_ } + , maxEmbeddedImmutableSamplers{ maxEmbeddedImmutableSamplers_ } + , bufferCaptureReplayDescriptorDataSize{ bufferCaptureReplayDescriptorDataSize_ } + , imageCaptureReplayDescriptorDataSize{ imageCaptureReplayDescriptorDataSize_ } + , imageViewCaptureReplayDescriptorDataSize{ imageViewCaptureReplayDescriptorDataSize_ } + , samplerCaptureReplayDescriptorDataSize{ samplerCaptureReplayDescriptorDataSize_ } + , accelerationStructureCaptureReplayDescriptorDataSize{ accelerationStructureCaptureReplayDescriptorDataSize_ } + , samplerDescriptorSize{ samplerDescriptorSize_ } + , combinedImageSamplerDescriptorSize{ combinedImageSamplerDescriptorSize_ } + , sampledImageDescriptorSize{ sampledImageDescriptorSize_ } + , storageImageDescriptorSize{ storageImageDescriptorSize_ } + , uniformTexelBufferDescriptorSize{ uniformTexelBufferDescriptorSize_ } + , robustUniformTexelBufferDescriptorSize{ robustUniformTexelBufferDescriptorSize_ } + , storageTexelBufferDescriptorSize{ storageTexelBufferDescriptorSize_ } + , robustStorageTexelBufferDescriptorSize{ robustStorageTexelBufferDescriptorSize_ } + , uniformBufferDescriptorSize{ uniformBufferDescriptorSize_ } + , robustUniformBufferDescriptorSize{ robustUniformBufferDescriptorSize_ } + , storageBufferDescriptorSize{ storageBufferDescriptorSize_ } + , robustStorageBufferDescriptorSize{ robustStorageBufferDescriptorSize_ } + , inputAttachmentDescriptorSize{ inputAttachmentDescriptorSize_ } + , accelerationStructureDescriptorSize{ accelerationStructureDescriptorSize_ } + , maxSamplerDescriptorBufferRange{ maxSamplerDescriptorBufferRange_ } + , maxResourceDescriptorBufferRange{ maxResourceDescriptorBufferRange_ } + , samplerDescriptorBufferAddressSpaceSize{ samplerDescriptorBufferAddressSpaceSize_ } + , resourceDescriptorBufferAddressSpaceSize{ resourceDescriptorBufferAddressSpaceSize_ } + , descriptorBufferAddressSpaceSize{ descriptorBufferAddressSpaceSize_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferPropertiesEXT( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorBufferPropertiesEXT( VkPhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorBufferPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorBufferPropertiesEXT & operator=( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorBufferPropertiesEXT & operator=( VkPhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDescriptorBufferPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorBufferPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + combinedImageSamplerDescriptorSingleArray, + bufferlessPushDescriptors, + allowSamplerImageViewPostSubmitCreation, + descriptorBufferOffsetAlignment, + maxDescriptorBufferBindings, + maxResourceDescriptorBufferBindings, + maxSamplerDescriptorBufferBindings, + maxEmbeddedImmutableSamplerBindings, + maxEmbeddedImmutableSamplers, + bufferCaptureReplayDescriptorDataSize, + imageCaptureReplayDescriptorDataSize, + imageViewCaptureReplayDescriptorDataSize, + samplerCaptureReplayDescriptorDataSize, + accelerationStructureCaptureReplayDescriptorDataSize, + samplerDescriptorSize, + combinedImageSamplerDescriptorSize, + sampledImageDescriptorSize, + storageImageDescriptorSize, + uniformTexelBufferDescriptorSize, + robustUniformTexelBufferDescriptorSize, + storageTexelBufferDescriptorSize, + robustStorageTexelBufferDescriptorSize, + uniformBufferDescriptorSize, + robustUniformBufferDescriptorSize, + storageBufferDescriptorSize, + robustStorageBufferDescriptorSize, + inputAttachmentDescriptorSize, + accelerationStructureDescriptorSize, + maxSamplerDescriptorBufferRange, + maxResourceDescriptorBufferRange, + samplerDescriptorBufferAddressSpaceSize, + resourceDescriptorBufferAddressSpaceSize, + descriptorBufferAddressSpaceSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorBufferPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( combinedImageSamplerDescriptorSingleArray == rhs.combinedImageSamplerDescriptorSingleArray ) && + ( bufferlessPushDescriptors == rhs.bufferlessPushDescriptors ) && + ( allowSamplerImageViewPostSubmitCreation == rhs.allowSamplerImageViewPostSubmitCreation ) && + ( descriptorBufferOffsetAlignment == rhs.descriptorBufferOffsetAlignment ) && ( maxDescriptorBufferBindings == rhs.maxDescriptorBufferBindings ) && + ( maxResourceDescriptorBufferBindings == rhs.maxResourceDescriptorBufferBindings ) && + ( maxSamplerDescriptorBufferBindings == rhs.maxSamplerDescriptorBufferBindings ) && + ( maxEmbeddedImmutableSamplerBindings == rhs.maxEmbeddedImmutableSamplerBindings ) && + ( maxEmbeddedImmutableSamplers == rhs.maxEmbeddedImmutableSamplers ) && + ( bufferCaptureReplayDescriptorDataSize == rhs.bufferCaptureReplayDescriptorDataSize ) && + ( imageCaptureReplayDescriptorDataSize == rhs.imageCaptureReplayDescriptorDataSize ) && + ( imageViewCaptureReplayDescriptorDataSize == rhs.imageViewCaptureReplayDescriptorDataSize ) && + ( samplerCaptureReplayDescriptorDataSize == rhs.samplerCaptureReplayDescriptorDataSize ) && + ( accelerationStructureCaptureReplayDescriptorDataSize == rhs.accelerationStructureCaptureReplayDescriptorDataSize ) && + ( samplerDescriptorSize == rhs.samplerDescriptorSize ) && ( combinedImageSamplerDescriptorSize == rhs.combinedImageSamplerDescriptorSize ) && + ( sampledImageDescriptorSize == rhs.sampledImageDescriptorSize ) && ( storageImageDescriptorSize == rhs.storageImageDescriptorSize ) && + ( uniformTexelBufferDescriptorSize == rhs.uniformTexelBufferDescriptorSize ) && + ( robustUniformTexelBufferDescriptorSize == rhs.robustUniformTexelBufferDescriptorSize ) && + ( storageTexelBufferDescriptorSize == rhs.storageTexelBufferDescriptorSize ) && + ( robustStorageTexelBufferDescriptorSize == rhs.robustStorageTexelBufferDescriptorSize ) && + ( uniformBufferDescriptorSize == rhs.uniformBufferDescriptorSize ) && + ( robustUniformBufferDescriptorSize == rhs.robustUniformBufferDescriptorSize ) && + ( storageBufferDescriptorSize == rhs.storageBufferDescriptorSize ) && + ( robustStorageBufferDescriptorSize == rhs.robustStorageBufferDescriptorSize ) && + ( inputAttachmentDescriptorSize == rhs.inputAttachmentDescriptorSize ) && + ( accelerationStructureDescriptorSize == rhs.accelerationStructureDescriptorSize ) && + ( maxSamplerDescriptorBufferRange == rhs.maxSamplerDescriptorBufferRange ) && + ( maxResourceDescriptorBufferRange == rhs.maxResourceDescriptorBufferRange ) && + ( samplerDescriptorBufferAddressSpaceSize == rhs.samplerDescriptorBufferAddressSpaceSize ) && + ( resourceDescriptorBufferAddressSpaceSize == rhs.resourceDescriptorBufferAddressSpaceSize ) && + ( descriptorBufferAddressSpaceSize == rhs.descriptorBufferAddressSpaceSize ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 combinedImageSamplerDescriptorSingleArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferlessPushDescriptors = {}; + VULKAN_HPP_NAMESPACE::Bool32 allowSamplerImageViewPostSubmitCreation = {}; + VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferOffsetAlignment = {}; + uint32_t maxDescriptorBufferBindings = {}; + uint32_t maxResourceDescriptorBufferBindings = {}; + uint32_t maxSamplerDescriptorBufferBindings = {}; + uint32_t maxEmbeddedImmutableSamplerBindings = {}; + uint32_t maxEmbeddedImmutableSamplers = {}; + size_t bufferCaptureReplayDescriptorDataSize = {}; + size_t imageCaptureReplayDescriptorDataSize = {}; + size_t imageViewCaptureReplayDescriptorDataSize = {}; + size_t samplerCaptureReplayDescriptorDataSize = {}; + size_t accelerationStructureCaptureReplayDescriptorDataSize = {}; + size_t samplerDescriptorSize = {}; + size_t combinedImageSamplerDescriptorSize = {}; + size_t sampledImageDescriptorSize = {}; + size_t storageImageDescriptorSize = {}; + size_t uniformTexelBufferDescriptorSize = {}; + size_t robustUniformTexelBufferDescriptorSize = {}; + size_t storageTexelBufferDescriptorSize = {}; + size_t robustStorageTexelBufferDescriptorSize = {}; + size_t uniformBufferDescriptorSize = {}; + size_t robustUniformBufferDescriptorSize = {}; + size_t storageBufferDescriptorSize = {}; + size_t robustStorageBufferDescriptorSize = {}; + size_t inputAttachmentDescriptorSize = {}; + size_t accelerationStructureDescriptorSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxSamplerDescriptorBufferRange = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceDescriptorBufferRange = {}; + VULKAN_HPP_NAMESPACE::DeviceSize samplerDescriptorBufferAddressSpaceSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize resourceDescriptorBufferAddressSpaceSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferAddressSpaceSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorBufferPropertiesEXT; + }; + + struct PhysicalDeviceDescriptorIndexingFeatures + { + using NativeType = VkPhysicalDeviceDescriptorIndexingFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderInputAttachmentArrayDynamicIndexing{ shaderInputAttachmentArrayDynamicIndexing_ } + , shaderUniformTexelBufferArrayDynamicIndexing{ shaderUniformTexelBufferArrayDynamicIndexing_ } + , shaderStorageTexelBufferArrayDynamicIndexing{ shaderStorageTexelBufferArrayDynamicIndexing_ } + , shaderUniformBufferArrayNonUniformIndexing{ shaderUniformBufferArrayNonUniformIndexing_ } + , shaderSampledImageArrayNonUniformIndexing{ shaderSampledImageArrayNonUniformIndexing_ } + , shaderStorageBufferArrayNonUniformIndexing{ shaderStorageBufferArrayNonUniformIndexing_ } + , shaderStorageImageArrayNonUniformIndexing{ shaderStorageImageArrayNonUniformIndexing_ } + , shaderInputAttachmentArrayNonUniformIndexing{ shaderInputAttachmentArrayNonUniformIndexing_ } + , shaderUniformTexelBufferArrayNonUniformIndexing{ shaderUniformTexelBufferArrayNonUniformIndexing_ } + , shaderStorageTexelBufferArrayNonUniformIndexing{ shaderStorageTexelBufferArrayNonUniformIndexing_ } + , descriptorBindingUniformBufferUpdateAfterBind{ descriptorBindingUniformBufferUpdateAfterBind_ } + , descriptorBindingSampledImageUpdateAfterBind{ descriptorBindingSampledImageUpdateAfterBind_ } + , descriptorBindingStorageImageUpdateAfterBind{ descriptorBindingStorageImageUpdateAfterBind_ } + , descriptorBindingStorageBufferUpdateAfterBind{ descriptorBindingStorageBufferUpdateAfterBind_ } + , descriptorBindingUniformTexelBufferUpdateAfterBind{ descriptorBindingUniformTexelBufferUpdateAfterBind_ } + , descriptorBindingStorageTexelBufferUpdateAfterBind{ descriptorBindingStorageTexelBufferUpdateAfterBind_ } + , descriptorBindingUpdateUnusedWhilePending{ descriptorBindingUpdateUnusedWhilePending_ } + , descriptorBindingPartiallyBound{ descriptorBindingPartiallyBound_ } + , descriptorBindingVariableDescriptorCount{ descriptorBindingVariableDescriptorCount_ } + , runtimeDescriptorArray{ runtimeDescriptorArray_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorIndexingFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorIndexingFeatures & operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorIndexingFeatures & operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & + setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT + { + runtimeDescriptorArray = runtimeDescriptorArray_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderInputAttachmentArrayDynamicIndexing, + shaderUniformTexelBufferArrayDynamicIndexing, + shaderStorageTexelBufferArrayDynamicIndexing, + shaderUniformBufferArrayNonUniformIndexing, + shaderSampledImageArrayNonUniformIndexing, + shaderStorageBufferArrayNonUniformIndexing, + shaderStorageImageArrayNonUniformIndexing, + shaderInputAttachmentArrayNonUniformIndexing, + shaderUniformTexelBufferArrayNonUniformIndexing, + shaderStorageTexelBufferArrayNonUniformIndexing, + descriptorBindingUniformBufferUpdateAfterBind, + descriptorBindingSampledImageUpdateAfterBind, + descriptorBindingStorageImageUpdateAfterBind, + descriptorBindingStorageBufferUpdateAfterBind, + descriptorBindingUniformTexelBufferUpdateAfterBind, + descriptorBindingStorageTexelBufferUpdateAfterBind, + descriptorBindingUpdateUnusedWhilePending, + descriptorBindingPartiallyBound, + descriptorBindingVariableDescriptorCount, + runtimeDescriptorArray ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) && + ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) && + ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) && + ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) && + ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) && + ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) && + ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) && + ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) && + ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) && + ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) && + ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) && + ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) && + ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) && + ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) && + ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) && + ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) && + ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) && + ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) && + ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) && + ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorIndexingFeatures; + }; + + using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures; + + struct PhysicalDeviceDescriptorIndexingProperties + { + using NativeType = VkPhysicalDeviceDescriptorIndexingProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, + uint32_t maxPerStageUpdateAfterBindResources_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxUpdateAfterBindDescriptorsInAllPools{ maxUpdateAfterBindDescriptorsInAllPools_ } + , shaderUniformBufferArrayNonUniformIndexingNative{ shaderUniformBufferArrayNonUniformIndexingNative_ } + , shaderSampledImageArrayNonUniformIndexingNative{ shaderSampledImageArrayNonUniformIndexingNative_ } + , shaderStorageBufferArrayNonUniformIndexingNative{ shaderStorageBufferArrayNonUniformIndexingNative_ } + , shaderStorageImageArrayNonUniformIndexingNative{ shaderStorageImageArrayNonUniformIndexingNative_ } + , shaderInputAttachmentArrayNonUniformIndexingNative{ shaderInputAttachmentArrayNonUniformIndexingNative_ } + , robustBufferAccessUpdateAfterBind{ robustBufferAccessUpdateAfterBind_ } + , quadDivergentImplicitLod{ quadDivergentImplicitLod_ } + , maxPerStageDescriptorUpdateAfterBindSamplers{ maxPerStageDescriptorUpdateAfterBindSamplers_ } + , maxPerStageDescriptorUpdateAfterBindUniformBuffers{ maxPerStageDescriptorUpdateAfterBindUniformBuffers_ } + , maxPerStageDescriptorUpdateAfterBindStorageBuffers{ maxPerStageDescriptorUpdateAfterBindStorageBuffers_ } + , maxPerStageDescriptorUpdateAfterBindSampledImages{ maxPerStageDescriptorUpdateAfterBindSampledImages_ } + , maxPerStageDescriptorUpdateAfterBindStorageImages{ maxPerStageDescriptorUpdateAfterBindStorageImages_ } + , maxPerStageDescriptorUpdateAfterBindInputAttachments{ maxPerStageDescriptorUpdateAfterBindInputAttachments_ } + , maxPerStageUpdateAfterBindResources{ maxPerStageUpdateAfterBindResources_ } + , maxDescriptorSetUpdateAfterBindSamplers{ maxDescriptorSetUpdateAfterBindSamplers_ } + , maxDescriptorSetUpdateAfterBindUniformBuffers{ maxDescriptorSetUpdateAfterBindUniformBuffers_ } + , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic{ maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindStorageBuffers{ maxDescriptorSetUpdateAfterBindStorageBuffers_ } + , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic{ maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindSampledImages{ maxDescriptorSetUpdateAfterBindSampledImages_ } + , maxDescriptorSetUpdateAfterBindStorageImages{ maxDescriptorSetUpdateAfterBindStorageImages_ } + , maxDescriptorSetUpdateAfterBindInputAttachments{ maxDescriptorSetUpdateAfterBindInputAttachments_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorIndexingProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorIndexingProperties & operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorIndexingProperties & operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxUpdateAfterBindDescriptorsInAllPools, + shaderUniformBufferArrayNonUniformIndexingNative, + shaderSampledImageArrayNonUniformIndexingNative, + shaderStorageBufferArrayNonUniformIndexingNative, + shaderStorageImageArrayNonUniformIndexingNative, + shaderInputAttachmentArrayNonUniformIndexingNative, + robustBufferAccessUpdateAfterBind, + quadDivergentImplicitLod, + maxPerStageDescriptorUpdateAfterBindSamplers, + maxPerStageDescriptorUpdateAfterBindUniformBuffers, + maxPerStageDescriptorUpdateAfterBindStorageBuffers, + maxPerStageDescriptorUpdateAfterBindSampledImages, + maxPerStageDescriptorUpdateAfterBindStorageImages, + maxPerStageDescriptorUpdateAfterBindInputAttachments, + maxPerStageUpdateAfterBindResources, + maxDescriptorSetUpdateAfterBindSamplers, + maxDescriptorSetUpdateAfterBindUniformBuffers, + maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, + maxDescriptorSetUpdateAfterBindStorageBuffers, + maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, + maxDescriptorSetUpdateAfterBindSampledImages, + maxDescriptorSetUpdateAfterBindStorageImages, + maxDescriptorSetUpdateAfterBindInputAttachments ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) && + ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) && + ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) && + ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) && + ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) && + ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) && + ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) && + ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) && + ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) && + ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) && + ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) && + ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) && + ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) && + ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) && + ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; + void * pNext = {}; + uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; + uint32_t maxPerStageUpdateAfterBindResources = {}; + uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorIndexingProperties; + }; + + using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties; + + struct PhysicalDeviceDescriptorPoolOverallocationFeaturesNV + { + using NativeType = VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 descriptorPoolOverallocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorPoolOverallocation{ descriptorPoolOverallocation_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & + operator=( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & operator=( VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & + setDescriptorPoolOverallocation( VULKAN_HPP_NAMESPACE::Bool32 descriptorPoolOverallocation_ ) VULKAN_HPP_NOEXCEPT + { + descriptorPoolOverallocation = descriptorPoolOverallocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorPoolOverallocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPoolOverallocation == rhs.descriptorPoolOverallocation ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorPoolOverallocation = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + }; + + struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE + { + using NativeType = VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorSetHostMapping{ descriptorSetHostMapping_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & + operator=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & operator=( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & + setDescriptorSetHostMapping( VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetHostMapping = descriptorSetHostMapping_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, descriptorSetHostMapping ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetHostMapping == rhs.descriptorSetHostMapping ); +# endif + } + + bool operator!=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + }; + + struct PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceGeneratedCompute{ deviceGeneratedCompute_ } + , deviceGeneratedComputePipelines{ deviceGeneratedComputePipelines_ } + , deviceGeneratedComputeCaptureReplay{ deviceGeneratedComputeCaptureReplay_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + operator=( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + operator=( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + setDeviceGeneratedCompute( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedCompute = deviceGeneratedCompute_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + setDeviceGeneratedComputePipelines( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedComputePipelines = deviceGeneratedComputePipelines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + setDeviceGeneratedComputeCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedComputeCaptureReplay = deviceGeneratedComputeCaptureReplay_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceGeneratedCompute, deviceGeneratedComputePipelines, deviceGeneratedComputeCaptureReplay ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCompute == rhs.deviceGeneratedCompute ) && + ( deviceGeneratedComputePipelines == rhs.deviceGeneratedComputePipelines ) && + ( deviceGeneratedComputeCaptureReplay == rhs.deviceGeneratedComputeCaptureReplay ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + }; + + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicGeneratedPipelineLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceGeneratedCommands{ deviceGeneratedCommands_ } + , dynamicGeneratedPipelineLayout{ dynamicGeneratedPipelineLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & + setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedCommands = deviceGeneratedCommands_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & + setDynamicGeneratedPipelineLayout( VULKAN_HPP_NAMESPACE::Bool32 dynamicGeneratedPipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + dynamicGeneratedPipelineLayout = dynamicGeneratedPipelineLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceGeneratedCommands, dynamicGeneratedPipelineLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ) && + ( dynamicGeneratedPipelineLayout == rhs.dynamicGeneratedPipelineLayout ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicGeneratedPipelineLayout = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + }; + + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceGeneratedCommands{ deviceGeneratedCommands_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & + setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedCommands = deviceGeneratedCommands_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceGeneratedCommands ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + }; + + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( uint32_t maxIndirectPipelineCount_ = {}, + uint32_t maxIndirectShaderObjectCount_ = {}, + uint32_t maxIndirectSequenceCount_ = {}, + uint32_t maxIndirectCommandsTokenCount_ = {}, + uint32_t maxIndirectCommandsTokenOffset_ = {}, + uint32_t maxIndirectCommandsIndirectStride_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagsEXT supportedIndirectCommandsInputModes_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStages_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStagesPipelineBinding_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStagesShaderBinding_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommandsTransformFeedback_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommandsMultiDrawIndirectCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxIndirectPipelineCount{ maxIndirectPipelineCount_ } + , maxIndirectShaderObjectCount{ maxIndirectShaderObjectCount_ } + , maxIndirectSequenceCount{ maxIndirectSequenceCount_ } + , maxIndirectCommandsTokenCount{ maxIndirectCommandsTokenCount_ } + , maxIndirectCommandsTokenOffset{ maxIndirectCommandsTokenOffset_ } + , maxIndirectCommandsIndirectStride{ maxIndirectCommandsIndirectStride_ } + , supportedIndirectCommandsInputModes{ supportedIndirectCommandsInputModes_ } + , supportedIndirectCommandsShaderStages{ supportedIndirectCommandsShaderStages_ } + , supportedIndirectCommandsShaderStagesPipelineBinding{ supportedIndirectCommandsShaderStagesPipelineBinding_ } + , supportedIndirectCommandsShaderStagesShaderBinding{ supportedIndirectCommandsShaderStagesShaderBinding_ } + , deviceGeneratedCommandsTransformFeedback{ deviceGeneratedCommandsTransformFeedback_ } + , deviceGeneratedCommandsMultiDrawIndirectCount{ deviceGeneratedCommandsMultiDrawIndirectCount_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT & + operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxIndirectPipelineCount, + maxIndirectShaderObjectCount, + maxIndirectSequenceCount, + maxIndirectCommandsTokenCount, + maxIndirectCommandsTokenOffset, + maxIndirectCommandsIndirectStride, + supportedIndirectCommandsInputModes, + supportedIndirectCommandsShaderStages, + supportedIndirectCommandsShaderStagesPipelineBinding, + supportedIndirectCommandsShaderStagesShaderBinding, + deviceGeneratedCommandsTransformFeedback, + deviceGeneratedCommandsMultiDrawIndirectCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxIndirectPipelineCount == rhs.maxIndirectPipelineCount ) && + ( maxIndirectShaderObjectCount == rhs.maxIndirectShaderObjectCount ) && ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) && + ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) && + ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) && + ( maxIndirectCommandsIndirectStride == rhs.maxIndirectCommandsIndirectStride ) && + ( supportedIndirectCommandsInputModes == rhs.supportedIndirectCommandsInputModes ) && + ( supportedIndirectCommandsShaderStages == rhs.supportedIndirectCommandsShaderStages ) && + ( supportedIndirectCommandsShaderStagesPipelineBinding == rhs.supportedIndirectCommandsShaderStagesPipelineBinding ) && + ( supportedIndirectCommandsShaderStagesShaderBinding == rhs.supportedIndirectCommandsShaderStagesShaderBinding ) && + ( deviceGeneratedCommandsTransformFeedback == rhs.deviceGeneratedCommandsTransformFeedback ) && + ( deviceGeneratedCommandsMultiDrawIndirectCount == rhs.deviceGeneratedCommandsMultiDrawIndirectCount ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + void * pNext = {}; + uint32_t maxIndirectPipelineCount = {}; + uint32_t maxIndirectShaderObjectCount = {}; + uint32_t maxIndirectSequenceCount = {}; + uint32_t maxIndirectCommandsTokenCount = {}; + uint32_t maxIndirectCommandsTokenOffset = {}; + uint32_t maxIndirectCommandsIndirectStride = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagsEXT supportedIndirectCommandsInputModes = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStages = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStagesPipelineBinding = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStagesShaderBinding = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommandsTransformFeedback = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommandsMultiDrawIndirectCount = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + }; + + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( uint32_t maxGraphicsShaderGroupCount_ = {}, + uint32_t maxIndirectSequenceCount_ = {}, + uint32_t maxIndirectCommandsTokenCount_ = {}, + uint32_t maxIndirectCommandsStreamCount_ = {}, + uint32_t maxIndirectCommandsTokenOffset_ = {}, + uint32_t maxIndirectCommandsStreamStride_ = {}, + uint32_t minSequencesCountBufferOffsetAlignment_ = {}, + uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, + uint32_t minIndirectCommandsBufferOffsetAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxGraphicsShaderGroupCount{ maxGraphicsShaderGroupCount_ } + , maxIndirectSequenceCount{ maxIndirectSequenceCount_ } + , maxIndirectCommandsTokenCount{ maxIndirectCommandsTokenCount_ } + , maxIndirectCommandsStreamCount{ maxIndirectCommandsStreamCount_ } + , maxIndirectCommandsTokenOffset{ maxIndirectCommandsTokenOffset_ } + , maxIndirectCommandsStreamStride{ maxIndirectCommandsStreamStride_ } + , minSequencesCountBufferOffsetAlignment{ minSequencesCountBufferOffsetAlignment_ } + , minSequencesIndexBufferOffsetAlignment{ minSequencesIndexBufferOffsetAlignment_ } + , minIndirectCommandsBufferOffsetAlignment{ minIndirectCommandsBufferOffsetAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & + operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxGraphicsShaderGroupCount, + maxIndirectSequenceCount, + maxIndirectCommandsTokenCount, + maxIndirectCommandsStreamCount, + maxIndirectCommandsTokenOffset, + maxIndirectCommandsStreamStride, + minSequencesCountBufferOffsetAlignment, + minSequencesIndexBufferOffsetAlignment, + minIndirectCommandsBufferOffsetAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) && + ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) && ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) && + ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount ) && + ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) && + ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride ) && + ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment ) && + ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment ) && + ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + void * pNext = {}; + uint32_t maxGraphicsShaderGroupCount = {}; + uint32_t maxIndirectSequenceCount = {}; + uint32_t maxIndirectCommandsTokenCount = {}; + uint32_t maxIndirectCommandsStreamCount = {}; + uint32_t maxIndirectCommandsTokenOffset = {}; + uint32_t maxIndirectCommandsStreamStride = {}; + uint32_t minSequencesCountBufferOffsetAlignment = {}; + uint32_t minSequencesIndexBufferOffsetAlignment = {}; + uint32_t minIndirectCommandsBufferOffsetAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + }; + + struct PhysicalDeviceDeviceMemoryReportFeaturesEXT + { + using NativeType = VkPhysicalDeviceDeviceMemoryReportFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceMemoryReport{ deviceMemoryReport_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceMemoryReportFeaturesEXT( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceMemoryReportFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & + setDeviceMemoryReport( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ ) VULKAN_HPP_NOEXCEPT + { + deviceMemoryReport = deviceMemoryReport_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceMemoryReport ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMemoryReport == rhs.deviceMemoryReport ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT; + }; + + struct PhysicalDeviceDiagnosticsConfigFeaturesNV + { + using NativeType = VkPhysicalDeviceDiagnosticsConfigFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , diagnosticsConfig{ diagnosticsConfig_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDiagnosticsConfigFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & + setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT + { + diagnosticsConfig = diagnosticsConfig_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, diagnosticsConfig ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( diagnosticsConfig == rhs.diagnosticsConfig ); +# endif + } + + bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV; + }; + + struct PhysicalDeviceDiscardRectanglePropertiesEXT + { + using NativeType = VkPhysicalDeviceDiscardRectanglePropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxDiscardRectangles{ maxDiscardRectangles_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDiscardRectanglePropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxDiscardRectangles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDiscardRectangles == rhs.maxDiscardRectangles ); +# endif + } + + bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + void * pNext = {}; + uint32_t maxDiscardRectangles = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDiscardRectanglePropertiesEXT; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDeviceDisplacementMicromapFeaturesNV + { + using NativeType = VkPhysicalDeviceDisplacementMicromapFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDisplacementMicromapFeaturesNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 displacementMicromap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , displacementMicromap{ displacementMicromap_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapFeaturesNV( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDisplacementMicromapFeaturesNV( VkPhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDisplacementMicromapFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDisplacementMicromapFeaturesNV & operator=( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDisplacementMicromapFeaturesNV & operator=( VkPhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV & + setDisplacementMicromap( VULKAN_HPP_NAMESPACE::Bool32 displacementMicromap_ ) VULKAN_HPP_NOEXCEPT + { + displacementMicromap = displacementMicromap_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDisplacementMicromapFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDisplacementMicromapFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, displacementMicromap ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDisplacementMicromapFeaturesNV const & ) const = default; +# else + bool operator==( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displacementMicromap == rhs.displacementMicromap ); +# endif + } + + bool operator!=( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDisplacementMicromapFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 displacementMicromap = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDisplacementMicromapFeaturesNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDeviceDisplacementMicromapPropertiesNV + { + using NativeType = VkPhysicalDeviceDisplacementMicromapPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDisplacementMicromapPropertiesNV; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapPropertiesNV( uint32_t maxDisplacementMicromapSubdivisionLevel_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxDisplacementMicromapSubdivisionLevel{ maxDisplacementMicromapSubdivisionLevel_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDisplacementMicromapPropertiesNV( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDisplacementMicromapPropertiesNV( VkPhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDisplacementMicromapPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDisplacementMicromapPropertiesNV & operator=( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDisplacementMicromapPropertiesNV & operator=( VkPhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDisplacementMicromapPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDisplacementMicromapPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxDisplacementMicromapSubdivisionLevel ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDisplacementMicromapPropertiesNV const & ) const = default; +# else + bool operator==( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDisplacementMicromapSubdivisionLevel == rhs.maxDisplacementMicromapSubdivisionLevel ); +# endif + } + + bool operator!=( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDisplacementMicromapPropertiesNV; + void * pNext = {}; + uint32_t maxDisplacementMicromapSubdivisionLevel = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDisplacementMicromapPropertiesNV; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct PhysicalDeviceDriverProperties + { + using NativeType = VkPhysicalDeviceDriverProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, + std::array const & driverName_ = {}, + std::array const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , driverID{ driverID_ } + , driverName{ driverName_ } + , driverInfo{ driverInfo_ } + , conformanceVersion{ conformanceVersion_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDriverProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ConformanceVersion const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, driverID, driverName, driverInfo, conformanceVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = driverID <=> rhs.driverID; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( driverName, rhs.driverName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( driverInfo, rhs.driverInfo ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = conformanceVersion <=> rhs.conformanceVersion; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( strcmp( driverName, rhs.driverName ) == 0 ) && + ( strcmp( driverInfo, rhs.driverInfo ) == 0 ) && ( conformanceVersion == rhs.conformanceVersion ); + } + + bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDriverProperties; + }; + + using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties; + + struct PhysicalDeviceDrmPropertiesEXT + { + using NativeType = VkPhysicalDeviceDrmPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDrmPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 hasPrimary_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hasRender_ = {}, + int64_t primaryMajor_ = {}, + int64_t primaryMinor_ = {}, + int64_t renderMajor_ = {}, + int64_t renderMinor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hasPrimary{ hasPrimary_ } + , hasRender{ hasRender_ } + , primaryMajor{ primaryMajor_ } + , primaryMinor{ primaryMinor_ } + , renderMajor{ renderMajor_ } + , renderMinor{ renderMinor_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDrmPropertiesEXT( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDrmPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDrmPropertiesEXT & operator=( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDrmPropertiesEXT & operator=( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDrmPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDrmPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hasPrimary, hasRender, primaryMajor, primaryMinor, renderMajor, renderMinor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDrmPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasPrimary == rhs.hasPrimary ) && ( hasRender == rhs.hasRender ) && + ( primaryMajor == rhs.primaryMajor ) && ( primaryMinor == rhs.primaryMinor ) && ( renderMajor == rhs.renderMajor ) && + ( renderMinor == rhs.renderMinor ); +# endif + } + + bool operator!=( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDrmPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasPrimary = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasRender = {}; + int64_t primaryMajor = {}; + int64_t primaryMinor = {}; + int64_t renderMajor = {}; + int64_t renderMinor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDrmPropertiesEXT; + }; + + struct PhysicalDeviceDynamicRenderingFeatures + { + using NativeType = VkPhysicalDeviceDynamicRenderingFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dynamicRendering{ dynamicRendering_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDynamicRenderingFeatures( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDynamicRenderingFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDynamicRenderingFeatures & operator=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDynamicRenderingFeatures & operator=( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT + { + dynamicRendering = dynamicRendering_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDynamicRenderingFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDynamicRenderingFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dynamicRendering ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDynamicRenderingFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRendering == rhs.dynamicRendering ); +# endif + } + + bool operator!=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDynamicRenderingFeatures; + }; + + using PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures; + + struct PhysicalDeviceDynamicRenderingLocalReadFeatures + { + using NativeType = VkPhysicalDeviceDynamicRenderingLocalReadFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingLocalReadFeatures( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dynamicRenderingLocalRead{ dynamicRenderingLocalRead_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDynamicRenderingLocalReadFeatures( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDynamicRenderingLocalReadFeatures( VkPhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDynamicRenderingLocalReadFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDynamicRenderingLocalReadFeatures & operator=( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDynamicRenderingLocalReadFeatures & operator=( VkPhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingLocalReadFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingLocalReadFeatures & + setDynamicRenderingLocalRead( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ ) VULKAN_HPP_NOEXCEPT + { + dynamicRenderingLocalRead = dynamicRenderingLocalRead_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dynamicRenderingLocalRead ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDynamicRenderingLocalReadFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRenderingLocalRead == rhs.dynamicRenderingLocalRead ); +# endif + } + + bool operator!=( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDynamicRenderingLocalReadFeatures; + }; + + using PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR = PhysicalDeviceDynamicRenderingLocalReadFeatures; + + struct PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT + { + using NativeType = VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingUnusedAttachments_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dynamicRenderingUnusedAttachments{ dynamicRenderingUnusedAttachments_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & + operator=( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & + operator=( VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & + setDynamicRenderingUnusedAttachments( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingUnusedAttachments_ ) VULKAN_HPP_NOEXCEPT + { + dynamicRenderingUnusedAttachments = dynamicRenderingUnusedAttachments_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dynamicRenderingUnusedAttachments ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRenderingUnusedAttachments == rhs.dynamicRenderingUnusedAttachments ); +# endif + } + + bool operator!=( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingUnusedAttachments = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + }; + + struct PhysicalDeviceExclusiveScissorFeaturesNV + { + using NativeType = VkPhysicalDeviceExclusiveScissorFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , exclusiveScissor{ exclusiveScissor_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExclusiveScissorFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT + { + exclusiveScissor = exclusiveScissor_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, exclusiveScissor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissor == rhs.exclusiveScissor ); +# endif + } + + bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExclusiveScissorFeaturesNV; + }; + + struct PhysicalDeviceExtendedDynamicState2FeaturesEXT + { + using NativeType = VkPhysicalDeviceExtendedDynamicState2FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , extendedDynamicState2{ extendedDynamicState2_ } + , extendedDynamicState2LogicOp{ extendedDynamicState2LogicOp_ } + , extendedDynamicState2PatchControlPoints{ extendedDynamicState2PatchControlPoints_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedDynamicState2FeaturesEXT( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicState2FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & + setExtendedDynamicState2( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2 = extendedDynamicState2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & + setExtendedDynamicState2LogicOp( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2LogicOp = extendedDynamicState2LogicOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & + setExtendedDynamicState2PatchControlPoints( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState2PatchControlPoints = extendedDynamicState2PatchControlPoints_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, extendedDynamicState2, extendedDynamicState2LogicOp, extendedDynamicState2PatchControlPoints ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState2 == rhs.extendedDynamicState2 ) && + ( extendedDynamicState2LogicOp == rhs.extendedDynamicState2LogicOp ) && + ( extendedDynamicState2PatchControlPoints == rhs.extendedDynamicState2PatchControlPoints ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT; + }; + + struct PhysicalDeviceExtendedDynamicState3FeaturesEXT + { + using NativeType = VkPhysicalDeviceExtendedDynamicState3FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedDynamicState3FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , extendedDynamicState3TessellationDomainOrigin{ extendedDynamicState3TessellationDomainOrigin_ } + , extendedDynamicState3DepthClampEnable{ extendedDynamicState3DepthClampEnable_ } + , extendedDynamicState3PolygonMode{ extendedDynamicState3PolygonMode_ } + , extendedDynamicState3RasterizationSamples{ extendedDynamicState3RasterizationSamples_ } + , extendedDynamicState3SampleMask{ extendedDynamicState3SampleMask_ } + , extendedDynamicState3AlphaToCoverageEnable{ extendedDynamicState3AlphaToCoverageEnable_ } + , extendedDynamicState3AlphaToOneEnable{ extendedDynamicState3AlphaToOneEnable_ } + , extendedDynamicState3LogicOpEnable{ extendedDynamicState3LogicOpEnable_ } + , extendedDynamicState3ColorBlendEnable{ extendedDynamicState3ColorBlendEnable_ } + , extendedDynamicState3ColorBlendEquation{ extendedDynamicState3ColorBlendEquation_ } + , extendedDynamicState3ColorWriteMask{ extendedDynamicState3ColorWriteMask_ } + , extendedDynamicState3RasterizationStream{ extendedDynamicState3RasterizationStream_ } + , extendedDynamicState3ConservativeRasterizationMode{ extendedDynamicState3ConservativeRasterizationMode_ } + , extendedDynamicState3ExtraPrimitiveOverestimationSize{ extendedDynamicState3ExtraPrimitiveOverestimationSize_ } + , extendedDynamicState3DepthClipEnable{ extendedDynamicState3DepthClipEnable_ } + , extendedDynamicState3SampleLocationsEnable{ extendedDynamicState3SampleLocationsEnable_ } + , extendedDynamicState3ColorBlendAdvanced{ extendedDynamicState3ColorBlendAdvanced_ } + , extendedDynamicState3ProvokingVertexMode{ extendedDynamicState3ProvokingVertexMode_ } + , extendedDynamicState3LineRasterizationMode{ extendedDynamicState3LineRasterizationMode_ } + , extendedDynamicState3LineStippleEnable{ extendedDynamicState3LineStippleEnable_ } + , extendedDynamicState3DepthClipNegativeOneToOne{ extendedDynamicState3DepthClipNegativeOneToOne_ } + , extendedDynamicState3ViewportWScalingEnable{ extendedDynamicState3ViewportWScalingEnable_ } + , extendedDynamicState3ViewportSwizzle{ extendedDynamicState3ViewportSwizzle_ } + , extendedDynamicState3CoverageToColorEnable{ extendedDynamicState3CoverageToColorEnable_ } + , extendedDynamicState3CoverageToColorLocation{ extendedDynamicState3CoverageToColorLocation_ } + , extendedDynamicState3CoverageModulationMode{ extendedDynamicState3CoverageModulationMode_ } + , extendedDynamicState3CoverageModulationTableEnable{ extendedDynamicState3CoverageModulationTableEnable_ } + , extendedDynamicState3CoverageModulationTable{ extendedDynamicState3CoverageModulationTable_ } + , extendedDynamicState3CoverageReductionMode{ extendedDynamicState3CoverageReductionMode_ } + , extendedDynamicState3RepresentativeFragmentTestEnable{ extendedDynamicState3RepresentativeFragmentTestEnable_ } + , extendedDynamicState3ShadingRateImageEnable{ extendedDynamicState3ShadingRateImageEnable_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedDynamicState3FeaturesEXT( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicState3FeaturesEXT( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicState3FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3TessellationDomainOrigin( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3TessellationDomainOrigin = extendedDynamicState3TessellationDomainOrigin_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3DepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3DepthClampEnable = extendedDynamicState3DepthClampEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3PolygonMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3PolygonMode = extendedDynamicState3PolygonMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3RasterizationSamples( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3SampleMask( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3SampleMask = extendedDynamicState3SampleMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3AlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3AlphaToCoverageEnable = extendedDynamicState3AlphaToCoverageEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3AlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3AlphaToOneEnable = extendedDynamicState3AlphaToOneEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3LogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3LogicOpEnable = extendedDynamicState3LogicOpEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ColorBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ColorBlendEnable = extendedDynamicState3ColorBlendEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ColorBlendEquation( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ColorBlendEquation = extendedDynamicState3ColorBlendEquation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ColorWriteMask( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ColorWriteMask = extendedDynamicState3ColorWriteMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3RasterizationStream( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3RasterizationStream = extendedDynamicState3RasterizationStream_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ConservativeRasterizationMode( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ConservativeRasterizationMode = extendedDynamicState3ConservativeRasterizationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ExtraPrimitiveOverestimationSize( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ExtraPrimitiveOverestimationSize = extendedDynamicState3ExtraPrimitiveOverestimationSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3DepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3DepthClipEnable = extendedDynamicState3DepthClipEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3SampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3SampleLocationsEnable = extendedDynamicState3SampleLocationsEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ColorBlendAdvanced( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ColorBlendAdvanced = extendedDynamicState3ColorBlendAdvanced_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ProvokingVertexMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ProvokingVertexMode = extendedDynamicState3ProvokingVertexMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3LineRasterizationMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3LineRasterizationMode = extendedDynamicState3LineRasterizationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3LineStippleEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3LineStippleEnable = extendedDynamicState3LineStippleEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3DepthClipNegativeOneToOne( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3DepthClipNegativeOneToOne = extendedDynamicState3DepthClipNegativeOneToOne_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ViewportWScalingEnable = extendedDynamicState3ViewportWScalingEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ViewportSwizzle( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ViewportSwizzle = extendedDynamicState3ViewportSwizzle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3CoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageToColorEnable = extendedDynamicState3CoverageToColorEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3CoverageToColorLocation( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageToColorLocation = extendedDynamicState3CoverageToColorLocation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3CoverageModulationMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageModulationMode = extendedDynamicState3CoverageModulationMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageModulationTableEnable( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageModulationTableEnable = extendedDynamicState3CoverageModulationTableEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3CoverageModulationTable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageModulationTable = extendedDynamicState3CoverageModulationTable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3CoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3CoverageReductionMode = extendedDynamicState3CoverageReductionMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3RepresentativeFragmentTestEnable( + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3RepresentativeFragmentTestEnable = extendedDynamicState3RepresentativeFragmentTestEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & + setExtendedDynamicState3ShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState3ShadingRateImageEnable = extendedDynamicState3ShadingRateImageEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + extendedDynamicState3TessellationDomainOrigin, + extendedDynamicState3DepthClampEnable, + extendedDynamicState3PolygonMode, + extendedDynamicState3RasterizationSamples, + extendedDynamicState3SampleMask, + extendedDynamicState3AlphaToCoverageEnable, + extendedDynamicState3AlphaToOneEnable, + extendedDynamicState3LogicOpEnable, + extendedDynamicState3ColorBlendEnable, + extendedDynamicState3ColorBlendEquation, + extendedDynamicState3ColorWriteMask, + extendedDynamicState3RasterizationStream, + extendedDynamicState3ConservativeRasterizationMode, + extendedDynamicState3ExtraPrimitiveOverestimationSize, + extendedDynamicState3DepthClipEnable, + extendedDynamicState3SampleLocationsEnable, + extendedDynamicState3ColorBlendAdvanced, + extendedDynamicState3ProvokingVertexMode, + extendedDynamicState3LineRasterizationMode, + extendedDynamicState3LineStippleEnable, + extendedDynamicState3DepthClipNegativeOneToOne, + extendedDynamicState3ViewportWScalingEnable, + extendedDynamicState3ViewportSwizzle, + extendedDynamicState3CoverageToColorEnable, + extendedDynamicState3CoverageToColorLocation, + extendedDynamicState3CoverageModulationMode, + extendedDynamicState3CoverageModulationTableEnable, + extendedDynamicState3CoverageModulationTable, + extendedDynamicState3CoverageReductionMode, + extendedDynamicState3RepresentativeFragmentTestEnable, + extendedDynamicState3ShadingRateImageEnable ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( extendedDynamicState3TessellationDomainOrigin == rhs.extendedDynamicState3TessellationDomainOrigin ) && + ( extendedDynamicState3DepthClampEnable == rhs.extendedDynamicState3DepthClampEnable ) && + ( extendedDynamicState3PolygonMode == rhs.extendedDynamicState3PolygonMode ) && + ( extendedDynamicState3RasterizationSamples == rhs.extendedDynamicState3RasterizationSamples ) && + ( extendedDynamicState3SampleMask == rhs.extendedDynamicState3SampleMask ) && + ( extendedDynamicState3AlphaToCoverageEnable == rhs.extendedDynamicState3AlphaToCoverageEnable ) && + ( extendedDynamicState3AlphaToOneEnable == rhs.extendedDynamicState3AlphaToOneEnable ) && + ( extendedDynamicState3LogicOpEnable == rhs.extendedDynamicState3LogicOpEnable ) && + ( extendedDynamicState3ColorBlendEnable == rhs.extendedDynamicState3ColorBlendEnable ) && + ( extendedDynamicState3ColorBlendEquation == rhs.extendedDynamicState3ColorBlendEquation ) && + ( extendedDynamicState3ColorWriteMask == rhs.extendedDynamicState3ColorWriteMask ) && + ( extendedDynamicState3RasterizationStream == rhs.extendedDynamicState3RasterizationStream ) && + ( extendedDynamicState3ConservativeRasterizationMode == rhs.extendedDynamicState3ConservativeRasterizationMode ) && + ( extendedDynamicState3ExtraPrimitiveOverestimationSize == rhs.extendedDynamicState3ExtraPrimitiveOverestimationSize ) && + ( extendedDynamicState3DepthClipEnable == rhs.extendedDynamicState3DepthClipEnable ) && + ( extendedDynamicState3SampleLocationsEnable == rhs.extendedDynamicState3SampleLocationsEnable ) && + ( extendedDynamicState3ColorBlendAdvanced == rhs.extendedDynamicState3ColorBlendAdvanced ) && + ( extendedDynamicState3ProvokingVertexMode == rhs.extendedDynamicState3ProvokingVertexMode ) && + ( extendedDynamicState3LineRasterizationMode == rhs.extendedDynamicState3LineRasterizationMode ) && + ( extendedDynamicState3LineStippleEnable == rhs.extendedDynamicState3LineStippleEnable ) && + ( extendedDynamicState3DepthClipNegativeOneToOne == rhs.extendedDynamicState3DepthClipNegativeOneToOne ) && + ( extendedDynamicState3ViewportWScalingEnable == rhs.extendedDynamicState3ViewportWScalingEnable ) && + ( extendedDynamicState3ViewportSwizzle == rhs.extendedDynamicState3ViewportSwizzle ) && + ( extendedDynamicState3CoverageToColorEnable == rhs.extendedDynamicState3CoverageToColorEnable ) && + ( extendedDynamicState3CoverageToColorLocation == rhs.extendedDynamicState3CoverageToColorLocation ) && + ( extendedDynamicState3CoverageModulationMode == rhs.extendedDynamicState3CoverageModulationMode ) && + ( extendedDynamicState3CoverageModulationTableEnable == rhs.extendedDynamicState3CoverageModulationTableEnable ) && + ( extendedDynamicState3CoverageModulationTable == rhs.extendedDynamicState3CoverageModulationTable ) && + ( extendedDynamicState3CoverageReductionMode == rhs.extendedDynamicState3CoverageReductionMode ) && + ( extendedDynamicState3RepresentativeFragmentTestEnable == rhs.extendedDynamicState3RepresentativeFragmentTestEnable ) && + ( extendedDynamicState3ShadingRateImageEnable == rhs.extendedDynamicState3ShadingRateImageEnable ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3TessellationDomainOrigin = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3PolygonMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationSamples = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToCoverageEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LogicOpEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendEquation = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorWriteMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ConservativeRasterizationMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleLocationsEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ProvokingVertexMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineRasterizationMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3LineStippleEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipNegativeOneToOne = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportWScalingEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ViewportSwizzle = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageToColorLocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTableEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageModulationTable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3CoverageReductionMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicState3FeaturesEXT; + }; + + struct PhysicalDeviceExtendedDynamicState3PropertiesEXT + { + using NativeType = VkPhysicalDeviceExtendedDynamicState3PropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3PropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dynamicPrimitiveTopologyUnrestricted{ dynamicPrimitiveTopologyUnrestricted_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedDynamicState3PropertiesEXT( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicState3PropertiesEXT( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicState3PropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3PropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3PropertiesEXT & + setDynamicPrimitiveTopologyUnrestricted( VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted_ ) VULKAN_HPP_NOEXCEPT + { + dynamicPrimitiveTopologyUnrestricted = dynamicPrimitiveTopologyUnrestricted_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dynamicPrimitiveTopologyUnrestricted ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicPrimitiveTopologyUnrestricted == rhs.dynamicPrimitiveTopologyUnrestricted ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicState3PropertiesEXT; + }; + + struct PhysicalDeviceExtendedDynamicStateFeaturesEXT + { + using NativeType = VkPhysicalDeviceExtendedDynamicStateFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , extendedDynamicState{ extendedDynamicState_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedDynamicStateFeaturesEXT( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedDynamicStateFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & + setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + extendedDynamicState = extendedDynamicState_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, extendedDynamicState ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState == rhs.extendedDynamicState ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT; + }; + + struct PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV + { + using NativeType = VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 extendedSparseAddressSpace_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , extendedSparseAddressSpace{ extendedSparseAddressSpace_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & + operator=( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & operator=( VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & + setExtendedSparseAddressSpace( VULKAN_HPP_NAMESPACE::Bool32 extendedSparseAddressSpace_ ) VULKAN_HPP_NOEXCEPT + { + extendedSparseAddressSpace = extendedSparseAddressSpace_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, extendedSparseAddressSpace ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedSparseAddressSpace == rhs.extendedSparseAddressSpace ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 extendedSparseAddressSpace = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + }; + + struct PhysicalDeviceExtendedSparseAddressSpacePropertiesNV + { + using NativeType = VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( VULKAN_HPP_NAMESPACE::DeviceSize extendedSparseAddressSpaceSize_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags extendedSparseImageUsageFlags_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags extendedSparseBufferUsageFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , extendedSparseAddressSpaceSize{ extendedSparseAddressSpaceSize_ } + , extendedSparseImageUsageFlags{ extendedSparseImageUsageFlags_ } + , extendedSparseBufferUsageFlags{ extendedSparseBufferUsageFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExtendedSparseAddressSpacePropertiesNV & + operator=( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExtendedSparseAddressSpacePropertiesNV & operator=( VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, extendedSparseAddressSpaceSize, extendedSparseImageUsageFlags, extendedSparseBufferUsageFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedSparseAddressSpaceSize == rhs.extendedSparseAddressSpaceSize ) && + ( extendedSparseImageUsageFlags == rhs.extendedSparseImageUsageFlags ) && ( extendedSparseBufferUsageFlags == rhs.extendedSparseBufferUsageFlags ); +# endif + } + + bool operator!=( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize extendedSparseAddressSpaceSize = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags extendedSparseImageUsageFlags = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags extendedSparseBufferUsageFlags = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + }; + + struct PhysicalDeviceExternalBufferInfo + { + using NativeType = VkPhysicalDeviceExternalBufferInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , usage{ usage_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalBufferInfo( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, usage, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalBufferInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalBufferInfo; + }; + + using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; + + struct PhysicalDeviceExternalFenceInfo + { + using NativeType = VkPhysicalDeviceExternalFenceInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalFenceInfo( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalFenceInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalFenceInfo; + }; + + using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct PhysicalDeviceExternalFormatResolveFeaturesANDROID + { + using NativeType = VkPhysicalDeviceExternalFormatResolveFeaturesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolveFeaturesANDROID( VULKAN_HPP_NAMESPACE::Bool32 externalFormatResolve_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , externalFormatResolve{ externalFormatResolve_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalFormatResolveFeaturesANDROID( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFormatResolveFeaturesANDROID( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalFormatResolveFeaturesANDROID( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalFormatResolveFeaturesANDROID & + operator=( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalFormatResolveFeaturesANDROID & operator=( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFormatResolveFeaturesANDROID & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFormatResolveFeaturesANDROID & + setExternalFormatResolve( VULKAN_HPP_NAMESPACE::Bool32 externalFormatResolve_ ) VULKAN_HPP_NOEXCEPT + { + externalFormatResolve = externalFormatResolve_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalFormatResolve ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & ) const = default; +# else + bool operator==( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormatResolve == rhs.externalFormatResolve ); +# endif + } + + bool operator!=( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 externalFormatResolve = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalFormatResolveFeaturesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + struct PhysicalDeviceExternalFormatResolvePropertiesANDROID + { + using NativeType = VkPhysicalDeviceExternalFormatResolvePropertiesANDROID; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolvePropertiesANDROID( + VULKAN_HPP_NAMESPACE::Bool32 nullColorAttachmentWithExternalFormatResolve_ = {}, + VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetX_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetY_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , nullColorAttachmentWithExternalFormatResolve{ nullColorAttachmentWithExternalFormatResolve_ } + , externalFormatResolveChromaOffsetX{ externalFormatResolveChromaOffsetX_ } + , externalFormatResolveChromaOffsetY{ externalFormatResolveChromaOffsetY_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalFormatResolvePropertiesANDROID( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalFormatResolvePropertiesANDROID( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalFormatResolvePropertiesANDROID( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalFormatResolvePropertiesANDROID & + operator=( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalFormatResolvePropertiesANDROID & operator=( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, nullColorAttachmentWithExternalFormatResolve, externalFormatResolveChromaOffsetX, externalFormatResolveChromaOffsetY ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & ) const = default; +# else + bool operator==( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( nullColorAttachmentWithExternalFormatResolve == rhs.nullColorAttachmentWithExternalFormatResolve ) && + ( externalFormatResolveChromaOffsetX == rhs.externalFormatResolveChromaOffsetX ) && + ( externalFormatResolveChromaOffsetY == rhs.externalFormatResolveChromaOffsetY ); +# endif + } + + bool operator!=( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 nullColorAttachmentWithExternalFormatResolve = {}; + VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetX = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetY = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalFormatResolvePropertiesANDROID; + }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct PhysicalDeviceExternalImageFormatInfo + { + using NativeType = VkPhysicalDeviceExternalImageFormatInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalImageFormatInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalImageFormatInfo( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalImageFormatInfo & operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalImageFormatInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalImageFormatInfo; + }; + + using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; + + struct PhysicalDeviceExternalMemoryHostPropertiesEXT + { + using NativeType = VkPhysicalDeviceExternalMemoryHostPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minImportedHostPointerAlignment{ minImportedHostPointerAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalMemoryHostPropertiesEXT( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalMemoryHostPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minImportedHostPointerAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT; + }; + + struct PhysicalDeviceExternalMemoryRDMAFeaturesNV + { + using NativeType = VkPhysicalDeviceExternalMemoryRDMAFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , externalMemoryRDMA{ externalMemoryRDMA_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalMemoryRDMAFeaturesNV( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalMemoryRDMAFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & + setExternalMemoryRDMA( VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ ) VULKAN_HPP_NOEXCEPT + { + externalMemoryRDMA = externalMemoryRDMA_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, externalMemoryRDMA ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryRDMA == rhs.externalMemoryRDMA ); +# endif + } + + bool operator!=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalMemoryRDMAFeaturesNV; + }; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + struct PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX + { + using NativeType = VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( VULKAN_HPP_NAMESPACE::Bool32 screenBufferImport_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , screenBufferImport{ screenBufferImport_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & + operator=( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & operator=( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & + setScreenBufferImport( VULKAN_HPP_NAMESPACE::Bool32 screenBufferImport_ ) VULKAN_HPP_NOEXCEPT + { + screenBufferImport = screenBufferImport_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, screenBufferImport ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & ) const = default; +# else + bool operator==( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( screenBufferImport == rhs.screenBufferImport ); +# endif + } + + bool operator!=( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 screenBufferImport = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + struct PhysicalDeviceExternalSemaphoreInfo + { + using NativeType = VkPhysicalDeviceExternalSemaphoreInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceExternalSemaphoreInfo( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceExternalSemaphoreInfo & operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, handleType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const & ) const = default; +#else + bool operator==( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceExternalSemaphoreInfo; + }; + + using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; + + struct PhysicalDeviceFaultFeaturesEXT + { + using NativeType = VkPhysicalDeviceFaultFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFaultFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 deviceFault_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceFault{ deviceFault_ } + , deviceFaultVendorBinary{ deviceFaultVendorBinary_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFaultFeaturesEXT( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFaultFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFaultFeaturesEXT & operator=( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFaultFeaturesEXT & operator=( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setDeviceFault( VULKAN_HPP_NAMESPACE::Bool32 deviceFault_ ) VULKAN_HPP_NOEXCEPT + { + deviceFault = deviceFault_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & + setDeviceFaultVendorBinary( VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary_ ) VULKAN_HPP_NOEXCEPT + { + deviceFaultVendorBinary = deviceFaultVendorBinary_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFaultFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFaultFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceFault, deviceFaultVendorBinary ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFaultFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceFault == rhs.deviceFault ) && + ( deviceFaultVendorBinary == rhs.deviceFaultVendorBinary ); +# endif + } + + bool operator!=( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFaultFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceFault = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFaultFeaturesEXT; + }; + + struct PhysicalDeviceFeatures2 + { + using NativeType = VkPhysicalDeviceFeatures2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , features{ features_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFeatures2( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT + { + features = features_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, features ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFeatures2 const & ) const = default; +#else + bool operator==( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( features == rhs.features ); +# endif + } + + bool operator!=( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFeatures2; + }; + + using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; + + struct PhysicalDeviceFloatControlsProperties + { + using NativeType = VkPhysicalDeviceFloatControlsProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFloatControlsProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , denormBehaviorIndependence{ denormBehaviorIndependence_ } + , roundingModeIndependence{ roundingModeIndependence_ } + , shaderSignedZeroInfNanPreserveFloat16{ shaderSignedZeroInfNanPreserveFloat16_ } + , shaderSignedZeroInfNanPreserveFloat32{ shaderSignedZeroInfNanPreserveFloat32_ } + , shaderSignedZeroInfNanPreserveFloat64{ shaderSignedZeroInfNanPreserveFloat64_ } + , shaderDenormPreserveFloat16{ shaderDenormPreserveFloat16_ } + , shaderDenormPreserveFloat32{ shaderDenormPreserveFloat32_ } + , shaderDenormPreserveFloat64{ shaderDenormPreserveFloat64_ } + , shaderDenormFlushToZeroFloat16{ shaderDenormFlushToZeroFloat16_ } + , shaderDenormFlushToZeroFloat32{ shaderDenormFlushToZeroFloat32_ } + , shaderDenormFlushToZeroFloat64{ shaderDenormFlushToZeroFloat64_ } + , shaderRoundingModeRTEFloat16{ shaderRoundingModeRTEFloat16_ } + , shaderRoundingModeRTEFloat32{ shaderRoundingModeRTEFloat32_ } + , shaderRoundingModeRTEFloat64{ shaderRoundingModeRTEFloat64_ } + , shaderRoundingModeRTZFloat16{ shaderRoundingModeRTZFloat16_ } + , shaderRoundingModeRTZFloat32{ shaderRoundingModeRTZFloat32_ } + , shaderRoundingModeRTZFloat64{ shaderRoundingModeRTZFloat64_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFloatControlsProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFloatControlsProperties & operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFloatControlsProperties & operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + denormBehaviorIndependence, + roundingModeIndependence, + shaderSignedZeroInfNanPreserveFloat16, + shaderSignedZeroInfNanPreserveFloat32, + shaderSignedZeroInfNanPreserveFloat64, + shaderDenormPreserveFloat16, + shaderDenormPreserveFloat32, + shaderDenormPreserveFloat64, + shaderDenormFlushToZeroFloat16, + shaderDenormFlushToZeroFloat32, + shaderDenormFlushToZeroFloat64, + shaderRoundingModeRTEFloat16, + shaderRoundingModeRTEFloat32, + shaderRoundingModeRTEFloat64, + shaderRoundingModeRTZFloat16, + shaderRoundingModeRTZFloat32, + shaderRoundingModeRTZFloat64 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFloatControlsProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && + ( roundingModeIndependence == rhs.roundingModeIndependence ) && + ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && + ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && + ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) && + ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) && + ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) && + ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) && + ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) && + ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) && + ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) && + ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ); +# endif + } + + bool operator!=( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFloatControlsProperties; + }; + + using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties; + + struct PhysicalDeviceFragmentDensityMap2FeaturesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMap2FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentDensityMapDeferred{ fragmentDensityMapDeferred_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMap2FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & + setFragmentDensityMapDeferred( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapDeferred = fragmentDensityMapDeferred_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityMapDeferred ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT; + }; + + struct PhysicalDeviceFragmentDensityMap2PropertiesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMap2PropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {}, + uint32_t maxSubsampledArrayLayers_ = {}, + uint32_t maxDescriptorSetSubsampledSamplers_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , subsampledLoads{ subsampledLoads_ } + , subsampledCoarseReconstructionEarlyAccess{ subsampledCoarseReconstructionEarlyAccess_ } + , maxSubsampledArrayLayers{ maxSubsampledArrayLayers_ } + , maxDescriptorSetSubsampledSamplers{ maxDescriptorSetSubsampledSamplers_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentDensityMap2PropertiesEXT( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMap2PropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, subsampledLoads, subsampledCoarseReconstructionEarlyAccess, maxSubsampledArrayLayers, maxDescriptorSetSubsampledSamplers ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subsampledLoads == rhs.subsampledLoads ) && + ( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess ) && + ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers ) && ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess = {}; + uint32_t maxSubsampledArrayLayers = {}; + uint32_t maxDescriptorSetSubsampledSamplers = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT; + }; + + struct PhysicalDeviceFragmentDensityMapFeaturesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMapFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentDensityMap{ fragmentDensityMap_ } + , fragmentDensityMapDynamic{ fragmentDensityMapDynamic_ } + , fragmentDensityMapNonSubsampledImages{ fragmentDensityMapNonSubsampledImages_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & + setFragmentDensityMap( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMap = fragmentDensityMap_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & + setFragmentDensityMapDynamic( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapDynamic = fragmentDensityMapDynamic_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & + setFragmentDensityMapNonSubsampledImages( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityMap, fragmentDensityMapDynamic, fragmentDensityMapNonSubsampledImages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMap == rhs.fragmentDensityMap ) && + ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic ) && + ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT; + }; + + struct PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM + { + using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentDensityMapOffset{ fragmentDensityMapOffset_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & + operator=( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & operator=( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & + setFragmentDensityMapOffset( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapOffset = fragmentDensityMapOffset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityMapOffset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapOffset == rhs.fragmentDensityMapOffset ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + }; + + struct PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM + { + using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentDensityOffsetGranularity{ fragmentDensityOffsetGranularity_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM & + operator=( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM & operator=( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentDensityOffsetGranularity ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityOffsetGranularity == rhs.fragmentDensityOffsetGranularity ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + }; + + struct PhysicalDeviceFragmentDensityMapPropertiesEXT + { + using NativeType = VkPhysicalDeviceFragmentDensityMapPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minFragmentDensityTexelSize{ minFragmentDensityTexelSize_ } + , maxFragmentDensityTexelSize{ maxFragmentDensityTexelSize_ } + , fragmentDensityInvocations{ fragmentDensityInvocations_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentDensityMapPropertiesEXT( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentDensityMapPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minFragmentDensityTexelSize, maxFragmentDensityTexelSize, fragmentDensityInvocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize ) && + ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) && ( fragmentDensityInvocations == rhs.fragmentDensityInvocations ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT; + }; + + struct PhysicalDeviceFragmentShaderBarycentricFeaturesKHR + { + using NativeType = VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentShaderBarycentric{ fragmentShaderBarycentric_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & + operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & + setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderBarycentric = fragmentShaderBarycentric_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentShaderBarycentric ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + }; + + using PhysicalDeviceFragmentShaderBarycentricFeaturesNV = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + + struct PhysicalDeviceFragmentShaderBarycentricPropertiesKHR + { + using NativeType = VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 triStripVertexOrderIndependentOfProvokingVertex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , triStripVertexOrderIndependentOfProvokingVertex{ triStripVertexOrderIndependentOfProvokingVertex_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShaderBarycentricPropertiesKHR & + operator=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShaderBarycentricPropertiesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, triStripVertexOrderIndependentOfProvokingVertex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( triStripVertexOrderIndependentOfProvokingVertex == rhs.triStripVertexOrderIndependentOfProvokingVertex ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 triStripVertexOrderIndependentOfProvokingVertex = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + }; + + struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT + { + using NativeType = VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentShaderSampleInterlock{ fragmentShaderSampleInterlock_ } + , fragmentShaderPixelInterlock{ fragmentShaderPixelInterlock_ } + , fragmentShaderShadingRateInterlock{ fragmentShaderShadingRateInterlock_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShaderInterlockFeaturesEXT( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShaderInterlockFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + setFragmentShaderSampleInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & + setFragmentShaderShadingRateInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentShaderSampleInterlock, fragmentShaderPixelInterlock, fragmentShaderShadingRateInterlock ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) && + ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock ) && + ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT; + }; + + struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV + { + using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentShadingRateEnums{ fragmentShadingRateEnums_ } + , supersampleFragmentShadingRates{ supersampleFragmentShadingRates_ } + , noInvocationFragmentShadingRates{ noInvocationFragmentShadingRates_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + setFragmentShadingRateEnums( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShadingRateEnums = fragmentShadingRateEnums_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + setSupersampleFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT + { + supersampleFragmentShadingRates = supersampleFragmentShadingRates_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & + setNoInvocationFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT + { + noInvocationFragmentShadingRates = noInvocationFragmentShadingRates_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, fragmentShadingRateEnums, supersampleFragmentShadingRates, noInvocationFragmentShadingRates ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShadingRateEnums == rhs.fragmentShadingRateEnums ) && + ( supersampleFragmentShadingRates == rhs.supersampleFragmentShadingRates ) && + ( noInvocationFragmentShadingRates == rhs.noInvocationFragmentShadingRates ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums = {}; + VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates = {}; + VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + }; + + struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV + { + using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxFragmentShadingRateInvocationCount{ maxFragmentShadingRateInvocationCount_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & + operator=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & + setMaxFragmentShadingRateInvocationCount( VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ ) VULKAN_HPP_NOEXCEPT + { + maxFragmentShadingRateInvocationCount = maxFragmentShadingRateInvocationCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxFragmentShadingRateInvocationCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxFragmentShadingRateInvocationCount == rhs.maxFragmentShadingRateInvocationCount ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + }; + + struct PhysicalDeviceFragmentShadingRateFeaturesKHR + { + using NativeType = VkPhysicalDeviceFragmentShadingRateFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineFragmentShadingRate{ pipelineFragmentShadingRate_ } + , primitiveFragmentShadingRate{ primitiveFragmentShadingRate_ } + , attachmentFragmentShadingRate{ attachmentFragmentShadingRate_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & + setPipelineFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + pipelineFragmentShadingRate = pipelineFragmentShadingRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & + setPrimitiveFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + primitiveFragmentShadingRate = primitiveFragmentShadingRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & + setAttachmentFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT + { + attachmentFragmentShadingRate = attachmentFragmentShadingRate_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineFragmentShadingRate, primitiveFragmentShadingRate, attachmentFragmentShadingRate ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate ) && + ( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate ) && ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR; + }; + + struct PhysicalDeviceFragmentShadingRateKHR + { + using NativeType = VkPhysicalDeviceFragmentShadingRateKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , sampleCounts{ sampleCounts_ } + , fragmentSize{ fragmentSize_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRateKHR( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRateKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShadingRateKHR & operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRateKHR & operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentShadingRateKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, sampleCounts, fragmentSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRateKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleCounts == rhs.sampleCounts ) && ( fragmentSize == rhs.fragmentSize ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRateKHR; + }; + + struct PhysicalDeviceFragmentShadingRatePropertiesKHR + { + using NativeType = VkPhysicalDeviceFragmentShadingRatePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( + VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize_ = {}, + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize_ = {}, + uint32_t maxFragmentSizeAspectRatio_ = {}, + uint32_t maxFragmentShadingRateCoverageSamples_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minFragmentShadingRateAttachmentTexelSize{ minFragmentShadingRateAttachmentTexelSize_ } + , maxFragmentShadingRateAttachmentTexelSize{ maxFragmentShadingRateAttachmentTexelSize_ } + , maxFragmentShadingRateAttachmentTexelSizeAspectRatio{ maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ } + , primitiveFragmentShadingRateWithMultipleViewports{ primitiveFragmentShadingRateWithMultipleViewports_ } + , layeredShadingRateAttachments{ layeredShadingRateAttachments_ } + , fragmentShadingRateNonTrivialCombinerOps{ fragmentShadingRateNonTrivialCombinerOps_ } + , maxFragmentSize{ maxFragmentSize_ } + , maxFragmentSizeAspectRatio{ maxFragmentSizeAspectRatio_ } + , maxFragmentShadingRateCoverageSamples{ maxFragmentShadingRateCoverageSamples_ } + , maxFragmentShadingRateRasterizationSamples{ maxFragmentShadingRateRasterizationSamples_ } + , fragmentShadingRateWithShaderDepthStencilWrites{ fragmentShadingRateWithShaderDepthStencilWrites_ } + , fragmentShadingRateWithSampleMask{ fragmentShadingRateWithSampleMask_ } + , fragmentShadingRateWithShaderSampleMask{ fragmentShadingRateWithShaderSampleMask_ } + , fragmentShadingRateWithConservativeRasterization{ fragmentShadingRateWithConservativeRasterization_ } + , fragmentShadingRateWithFragmentShaderInterlock{ fragmentShadingRateWithFragmentShaderInterlock_ } + , fragmentShadingRateWithCustomSampleLocations{ fragmentShadingRateWithCustomSampleLocations_ } + , fragmentShadingRateStrictMultiplyCombiner{ fragmentShadingRateStrictMultiplyCombiner_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceFragmentShadingRatePropertiesKHR( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFragmentShadingRatePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + minFragmentShadingRateAttachmentTexelSize, + maxFragmentShadingRateAttachmentTexelSize, + maxFragmentShadingRateAttachmentTexelSizeAspectRatio, + primitiveFragmentShadingRateWithMultipleViewports, + layeredShadingRateAttachments, + fragmentShadingRateNonTrivialCombinerOps, + maxFragmentSize, + maxFragmentSizeAspectRatio, + maxFragmentShadingRateCoverageSamples, + maxFragmentShadingRateRasterizationSamples, + fragmentShadingRateWithShaderDepthStencilWrites, + fragmentShadingRateWithSampleMask, + fragmentShadingRateWithShaderSampleMask, + fragmentShadingRateWithConservativeRasterization, + fragmentShadingRateWithFragmentShaderInterlock, + fragmentShadingRateWithCustomSampleLocations, + fragmentShadingRateStrictMultiplyCombiner ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFragmentShadingRatePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize ) && + ( maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize ) && + ( maxFragmentShadingRateAttachmentTexelSizeAspectRatio == rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio ) && + ( primitiveFragmentShadingRateWithMultipleViewports == rhs.primitiveFragmentShadingRateWithMultipleViewports ) && + ( layeredShadingRateAttachments == rhs.layeredShadingRateAttachments ) && + ( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps ) && ( maxFragmentSize == rhs.maxFragmentSize ) && + ( maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio ) && + ( maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples ) && + ( maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples ) && + ( fragmentShadingRateWithShaderDepthStencilWrites == rhs.fragmentShadingRateWithShaderDepthStencilWrites ) && + ( fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask ) && + ( fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask ) && + ( fragmentShadingRateWithConservativeRasterization == rhs.fragmentShadingRateWithConservativeRasterization ) && + ( fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock ) && + ( fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations ) && + ( fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner ); +# endif + } + + bool operator!=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize = {}; + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports = {}; + VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize = {}; + uint32_t maxFragmentSizeAspectRatio = {}; + uint32_t maxFragmentShadingRateCoverageSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR; + }; + + struct PhysicalDeviceFrameBoundaryFeaturesEXT + { + using NativeType = VkPhysicalDeviceFrameBoundaryFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFrameBoundaryFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceFrameBoundaryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 frameBoundary_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , frameBoundary{ frameBoundary_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFrameBoundaryFeaturesEXT( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceFrameBoundaryFeaturesEXT( VkPhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceFrameBoundaryFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceFrameBoundaryFeaturesEXT & operator=( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceFrameBoundaryFeaturesEXT & operator=( VkPhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFrameBoundaryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFrameBoundaryFeaturesEXT & setFrameBoundary( VULKAN_HPP_NAMESPACE::Bool32 frameBoundary_ ) VULKAN_HPP_NOEXCEPT + { + frameBoundary = frameBoundary_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceFrameBoundaryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFrameBoundaryFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, frameBoundary ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceFrameBoundaryFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( frameBoundary == rhs.frameBoundary ); +# endif + } + + bool operator!=( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFrameBoundaryFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 frameBoundary = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceFrameBoundaryFeaturesEXT; + }; + + struct PhysicalDeviceGlobalPriorityQueryFeatures + { + using NativeType = VkPhysicalDeviceGlobalPriorityQueryFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeatures( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , globalPriorityQuery{ globalPriorityQuery_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeatures( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGlobalPriorityQueryFeatures( VkPhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGlobalPriorityQueryFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceGlobalPriorityQueryFeatures & operator=( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceGlobalPriorityQueryFeatures & operator=( VkPhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeatures & + setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT + { + globalPriorityQuery = globalPriorityQuery_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceGlobalPriorityQueryFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGlobalPriorityQueryFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, globalPriorityQuery ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceGlobalPriorityQueryFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriorityQuery == rhs.globalPriorityQuery ); +# endif + } + + bool operator!=( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceGlobalPriorityQueryFeatures; + }; + + using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeatures; + using PhysicalDeviceGlobalPriorityQueryFeaturesKHR = PhysicalDeviceGlobalPriorityQueryFeatures; + + struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT + { + using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , graphicsPipelineLibrary{ graphicsPipelineLibrary_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & + setGraphicsPipelineLibrary( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary_ ) VULKAN_HPP_NOEXCEPT + { + graphicsPipelineLibrary = graphicsPipelineLibrary_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, graphicsPipelineLibrary ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( graphicsPipelineLibrary == rhs.graphicsPipelineLibrary ); +# endif + } + + bool operator!=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + }; + + struct PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT + { + using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , graphicsPipelineLibraryFastLinking{ graphicsPipelineLibraryFastLinking_ } + , graphicsPipelineLibraryIndependentInterpolationDecoration{ graphicsPipelineLibraryIndependentInterpolationDecoration_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & + operator=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & + setGraphicsPipelineLibraryFastLinking( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking_ ) VULKAN_HPP_NOEXCEPT + { + graphicsPipelineLibraryFastLinking = graphicsPipelineLibraryFastLinking_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & setGraphicsPipelineLibraryIndependentInterpolationDecoration( + VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration_ ) VULKAN_HPP_NOEXCEPT + { + graphicsPipelineLibraryIndependentInterpolationDecoration = graphicsPipelineLibraryIndependentInterpolationDecoration_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, graphicsPipelineLibraryFastLinking, graphicsPipelineLibraryIndependentInterpolationDecoration ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( graphicsPipelineLibraryFastLinking == rhs.graphicsPipelineLibraryFastLinking ) && + ( graphicsPipelineLibraryIndependentInterpolationDecoration == rhs.graphicsPipelineLibraryIndependentInterpolationDecoration ); +# endif + } + + bool operator!=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking = {}; + VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + }; + + struct PhysicalDeviceGroupProperties + { + using NativeType = VkPhysicalDeviceGroupProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceGroupProperties( uint32_t physicalDeviceCount_ = {}, + std::array const & physicalDevices_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , physicalDeviceCount{ physicalDeviceCount_ } + , physicalDevices{ physicalDevices_ } + , subsetAllocation{ subsetAllocation_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGroupProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, physicalDeviceCount, physicalDevices, subsetAllocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = physicalDeviceCount <=> rhs.physicalDeviceCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < physicalDeviceCount; ++i ) + { + if ( auto cmp = physicalDevices[i] <=> rhs.physicalDevices[i]; cmp != 0 ) + return cmp; + } + if ( auto cmp = subsetAllocation <=> rhs.subsetAllocation; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && + ( memcmp( physicalDevices, rhs.physicalDevices, physicalDeviceCount * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) ) == 0 ) && + ( subsetAllocation == rhs.subsetAllocation ); + } + + bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties; + void * pNext = {}; + uint32_t physicalDeviceCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D physicalDevices = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceGroupProperties; + }; + + using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; + + struct PhysicalDeviceHdrVividFeaturesHUAWEI + { + using NativeType = VkPhysicalDeviceHdrVividFeaturesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceHdrVividFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 hdrVivid_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hdrVivid{ hdrVivid_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceHdrVividFeaturesHUAWEI( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHdrVividFeaturesHUAWEI( VkPhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHdrVividFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceHdrVividFeaturesHUAWEI & operator=( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceHdrVividFeaturesHUAWEI & operator=( VkPhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHdrVividFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHdrVividFeaturesHUAWEI & setHdrVivid( VULKAN_HPP_NAMESPACE::Bool32 hdrVivid_ ) VULKAN_HPP_NOEXCEPT + { + hdrVivid = hdrVivid_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceHdrVividFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHdrVividFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hdrVivid ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHdrVividFeaturesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hdrVivid == rhs.hdrVivid ); +# endif + } + + bool operator!=( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hdrVivid = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceHdrVividFeaturesHUAWEI; + }; + + struct PhysicalDeviceHostImageCopyFeatures + { + using NativeType = VkPhysicalDeviceHostImageCopyFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hostImageCopy{ hostImageCopy_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeatures( PhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHostImageCopyFeatures( VkPhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHostImageCopyFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceHostImageCopyFeatures & operator=( PhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceHostImageCopyFeatures & operator=( VkPhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeatures & setHostImageCopy( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ ) VULKAN_HPP_NOEXCEPT + { + hostImageCopy = hostImageCopy_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceHostImageCopyFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostImageCopyFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hostImageCopy ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHostImageCopyFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceHostImageCopyFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostImageCopy == rhs.hostImageCopy ); +# endif + } + + bool operator!=( PhysicalDeviceHostImageCopyFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostImageCopyFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceHostImageCopyFeatures; + }; + + using PhysicalDeviceHostImageCopyFeaturesEXT = PhysicalDeviceHostImageCopyFeatures; + + struct PhysicalDeviceHostImageCopyProperties + { + using NativeType = VkPhysicalDeviceHostImageCopyProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties( uint32_t copySrcLayoutCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ = {}, + uint32_t copyDstLayoutCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ = {}, + std::array const & optimalTilingLayoutUUID_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , copySrcLayoutCount{ copySrcLayoutCount_ } + , pCopySrcLayouts{ pCopySrcLayouts_ } + , copyDstLayoutCount{ copyDstLayoutCount_ } + , pCopyDstLayouts{ pCopyDstLayouts_ } + , optimalTilingLayoutUUID{ optimalTilingLayoutUUID_ } + , identicalMemoryTypeRequirements{ identicalMemoryTypeRequirements_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties( PhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHostImageCopyProperties( VkPhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHostImageCopyProperties( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceHostImageCopyProperties( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copySrcLayouts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copyDstLayouts_ = {}, + std::array const & optimalTilingLayoutUUID_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ) + , copySrcLayoutCount( static_cast( copySrcLayouts_.size() ) ) + , pCopySrcLayouts( copySrcLayouts_.data() ) + , copyDstLayoutCount( static_cast( copyDstLayouts_.size() ) ) + , pCopyDstLayouts( copyDstLayouts_.data() ) + , optimalTilingLayoutUUID( optimalTilingLayoutUUID_ ) + , identicalMemoryTypeRequirements( identicalMemoryTypeRequirements_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PhysicalDeviceHostImageCopyProperties & operator=( PhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceHostImageCopyProperties & operator=( VkPhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setCopySrcLayoutCount( uint32_t copySrcLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + copySrcLayoutCount = copySrcLayoutCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & + setPCopySrcLayouts( VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pCopySrcLayouts = pCopySrcLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceHostImageCopyProperties & + setCopySrcLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copySrcLayouts_ ) VULKAN_HPP_NOEXCEPT + { + copySrcLayoutCount = static_cast( copySrcLayouts_.size() ); + pCopySrcLayouts = copySrcLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setCopyDstLayoutCount( uint32_t copyDstLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + copyDstLayoutCount = copyDstLayoutCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & + setPCopyDstLayouts( VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pCopyDstLayouts = pCopyDstLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceHostImageCopyProperties & + setCopyDstLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copyDstLayouts_ ) VULKAN_HPP_NOEXCEPT + { + copyDstLayoutCount = static_cast( copyDstLayouts_.size() ); + pCopyDstLayouts = copyDstLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & + setOptimalTilingLayoutUUID( std::array optimalTilingLayoutUUID_ ) VULKAN_HPP_NOEXCEPT + { + optimalTilingLayoutUUID = optimalTilingLayoutUUID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & + setIdenticalMemoryTypeRequirements( VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ ) VULKAN_HPP_NOEXCEPT + { + identicalMemoryTypeRequirements = identicalMemoryTypeRequirements_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceHostImageCopyProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostImageCopyProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, copySrcLayoutCount, pCopySrcLayouts, copyDstLayoutCount, pCopyDstLayouts, optimalTilingLayoutUUID, identicalMemoryTypeRequirements ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHostImageCopyProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceHostImageCopyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( copySrcLayoutCount == rhs.copySrcLayoutCount ) && + ( pCopySrcLayouts == rhs.pCopySrcLayouts ) && ( copyDstLayoutCount == rhs.copyDstLayoutCount ) && ( pCopyDstLayouts == rhs.pCopyDstLayouts ) && + ( optimalTilingLayoutUUID == rhs.optimalTilingLayoutUUID ) && ( identicalMemoryTypeRequirements == rhs.identicalMemoryTypeRequirements ); +# endif + } + + bool operator!=( PhysicalDeviceHostImageCopyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostImageCopyProperties; + void * pNext = {}; + uint32_t copySrcLayoutCount = {}; + VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts = {}; + uint32_t copyDstLayoutCount = {}; + VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D optimalTilingLayoutUUID = {}; + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceHostImageCopyProperties; + }; + + using PhysicalDeviceHostImageCopyPropertiesEXT = PhysicalDeviceHostImageCopyProperties; + + struct PhysicalDeviceHostQueryResetFeatures + { + using NativeType = VkPhysicalDeviceHostQueryResetFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostQueryResetFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hostQueryReset{ hostQueryReset_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHostQueryResetFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceHostQueryResetFeatures & operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceHostQueryResetFeatures & operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT + { + hostQueryReset = hostQueryReset_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hostQueryReset ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHostQueryResetFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostQueryReset == rhs.hostQueryReset ); +# endif + } + + bool operator!=( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceHostQueryResetFeatures; + }; + + using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures; + + struct PhysicalDeviceIDProperties + { + using NativeType = VkPhysicalDeviceIDProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( std::array const & deviceUUID_ = {}, + std::array const & driverUUID_ = {}, + std::array const & deviceLUID_ = {}, + uint32_t deviceNodeMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceUUID{ deviceUUID_ } + , driverUUID{ driverUUID_ } + , deviceLUID{ deviceLUID_ } + , deviceNodeMask{ deviceNodeMask_ } + , deviceLUIDValid{ deviceLUIDValid_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceIDProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceUUID, driverUUID, deviceLUID, deviceNodeMask, deviceLUIDValid ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceIDProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && ( driverUUID == rhs.driverUUID ) && + ( deviceLUID == rhs.deviceLUID ) && ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ); +# endif + } + + bool operator!=( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; + uint32_t deviceNodeMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceIDProperties; + }; + + using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; + + struct PhysicalDeviceImage2DViewOf3DFeaturesEXT + { + using NativeType = VkPhysicalDeviceImage2DViewOf3DFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , image2DViewOf3D{ image2DViewOf3D_ } + , sampler2DViewOf3D{ sampler2DViewOf3D_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImage2DViewOf3DFeaturesEXT( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImage2DViewOf3DFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setImage2DViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D_ ) VULKAN_HPP_NOEXCEPT + { + image2DViewOf3D = image2DViewOf3D_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & + setSampler2DViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D_ ) VULKAN_HPP_NOEXCEPT + { + sampler2DViewOf3D = sampler2DViewOf3D_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image2DViewOf3D, sampler2DViewOf3D ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image2DViewOf3D == rhs.image2DViewOf3D ) && ( sampler2DViewOf3D == rhs.sampler2DViewOf3D ); +# endif + } + + bool operator!=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImage2DViewOf3DFeaturesEXT; + }; + + struct PhysicalDeviceImageAlignmentControlFeaturesMESA + { + using NativeType = VkPhysicalDeviceImageAlignmentControlFeaturesMESA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageAlignmentControlFeaturesMESA( VULKAN_HPP_NAMESPACE::Bool32 imageAlignmentControl_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageAlignmentControl{ imageAlignmentControl_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageAlignmentControlFeaturesMESA( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageAlignmentControlFeaturesMESA( VkPhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageAlignmentControlFeaturesMESA( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageAlignmentControlFeaturesMESA & operator=( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageAlignmentControlFeaturesMESA & operator=( VkPhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlFeaturesMESA & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlFeaturesMESA & + setImageAlignmentControl( VULKAN_HPP_NAMESPACE::Bool32 imageAlignmentControl_ ) VULKAN_HPP_NOEXCEPT + { + imageAlignmentControl = imageAlignmentControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageAlignmentControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageAlignmentControlFeaturesMESA const & ) const = default; +#else + bool operator==( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageAlignmentControl == rhs.imageAlignmentControl ); +# endif + } + + bool operator!=( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageAlignmentControl = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageAlignmentControlFeaturesMESA; + }; + + struct PhysicalDeviceImageAlignmentControlPropertiesMESA + { + using NativeType = VkPhysicalDeviceImageAlignmentControlPropertiesMESA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageAlignmentControlPropertiesMESA( uint32_t supportedImageAlignmentMask_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , supportedImageAlignmentMask{ supportedImageAlignmentMask_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageAlignmentControlPropertiesMESA( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageAlignmentControlPropertiesMESA( VkPhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageAlignmentControlPropertiesMESA( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageAlignmentControlPropertiesMESA & + operator=( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageAlignmentControlPropertiesMESA & operator=( VkPhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlPropertiesMESA & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlPropertiesMESA & + setSupportedImageAlignmentMask( uint32_t supportedImageAlignmentMask_ ) VULKAN_HPP_NOEXCEPT + { + supportedImageAlignmentMask = supportedImageAlignmentMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportedImageAlignmentMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageAlignmentControlPropertiesMESA const & ) const = default; +#else + bool operator==( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedImageAlignmentMask == rhs.supportedImageAlignmentMask ); +# endif + } + + bool operator!=( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA; + void * pNext = {}; + uint32_t supportedImageAlignmentMask = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageAlignmentControlPropertiesMESA; + }; + + struct PhysicalDeviceImageCompressionControlFeaturesEXT + { + using NativeType = VkPhysicalDeviceImageCompressionControlFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageCompressionControl{ imageCompressionControl_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageCompressionControlFeaturesEXT( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageCompressionControlFeaturesEXT( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageCompressionControlFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT & + setImageCompressionControl( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl_ ) VULKAN_HPP_NOEXCEPT + { + imageCompressionControl = imageCompressionControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageCompressionControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageCompressionControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageCompressionControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageCompressionControlFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionControl == rhs.imageCompressionControl ); +# endif + } + + bool operator!=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageCompressionControlFeaturesEXT; + }; + + struct PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT + { + using NativeType = VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageCompressionControlSwapchain{ imageCompressionControlSwapchain_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & + operator=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & + operator=( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & + setImageCompressionControlSwapchain( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain_ ) VULKAN_HPP_NOEXCEPT + { + imageCompressionControlSwapchain = imageCompressionControlSwapchain_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageCompressionControlSwapchain ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionControlSwapchain == rhs.imageCompressionControlSwapchain ); +# endif + } + + bool operator!=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + }; + + struct PhysicalDeviceImageDrmFormatModifierInfoEXT + { + using NativeType = VkPhysicalDeviceImageDrmFormatModifierInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , drmFormatModifier{ drmFormatModifier_ } + , sharingMode{ sharingMode_ } + , queueFamilyIndexCount{ queueFamilyIndexCount_ } + , pQueueFamilyIndices{ pQueueFamilyIndices_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageDrmFormatModifierInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , drmFormatModifier( drmFormatModifier_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifier = drmFormatModifier_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + { + sharingMode = sharingMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceImageDrmFormatModifierInfoEXT & + setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, drmFormatModifier, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && ( sharingMode == rhs.sharingMode ) && + ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); +# endif + } + + bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; + const void * pNext = {}; + uint64_t drmFormatModifier = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT; + }; + + struct PhysicalDeviceImageFormatInfo2 + { + using NativeType = VkPhysicalDeviceImageFormatInfo2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , format{ format_ } + , type{ type_ } + , tiling{ tiling_ } + , usage{ usage_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageFormatInfo2( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, format, type, tiling, usage, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageFormatInfo2 const & ) const = default; +#else + bool operator==( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && ( tiling == rhs.tiling ) && + ( usage == rhs.usage ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageFormatInfo2; + }; + + using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; + + struct PhysicalDeviceImageProcessing2FeaturesQCOM + { + using NativeType = VkPhysicalDeviceImageProcessing2FeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessing2FeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2FeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch2_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , textureBlockMatch2{ textureBlockMatch2_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2FeaturesQCOM( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageProcessing2FeaturesQCOM( VkPhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageProcessing2FeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageProcessing2FeaturesQCOM & operator=( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageProcessing2FeaturesQCOM & operator=( VkPhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessing2FeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessing2FeaturesQCOM & + setTextureBlockMatch2( VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch2_ ) VULKAN_HPP_NOEXCEPT + { + textureBlockMatch2 = textureBlockMatch2_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageProcessing2FeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessing2FeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, textureBlockMatch2 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageProcessing2FeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureBlockMatch2 == rhs.textureBlockMatch2 ); +# endif + } + + bool operator!=( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessing2FeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch2 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageProcessing2FeaturesQCOM; + }; + + struct PhysicalDeviceImageProcessing2PropertiesQCOM + { + using NativeType = VkPhysicalDeviceImageProcessing2PropertiesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessing2PropertiesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2PropertiesQCOM( VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchWindow_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxBlockMatchWindow{ maxBlockMatchWindow_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2PropertiesQCOM( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageProcessing2PropertiesQCOM( VkPhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageProcessing2PropertiesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageProcessing2PropertiesQCOM & operator=( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageProcessing2PropertiesQCOM & operator=( VkPhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceImageProcessing2PropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessing2PropertiesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxBlockMatchWindow ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageProcessing2PropertiesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxBlockMatchWindow == rhs.maxBlockMatchWindow ); +# endif + } + + bool operator!=( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessing2PropertiesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchWindow = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageProcessing2PropertiesQCOM; + }; + + struct PhysicalDeviceImageProcessingFeaturesQCOM + { + using NativeType = VkPhysicalDeviceImageProcessingFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , textureSampleWeighted{ textureSampleWeighted_ } + , textureBoxFilter{ textureBoxFilter_ } + , textureBlockMatch{ textureBlockMatch_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageProcessingFeaturesQCOM( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageProcessingFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageProcessingFeaturesQCOM & operator=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageProcessingFeaturesQCOM & operator=( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & + setTextureSampleWeighted( VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted_ ) VULKAN_HPP_NOEXCEPT + { + textureSampleWeighted = textureSampleWeighted_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & + setTextureBoxFilter( VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter_ ) VULKAN_HPP_NOEXCEPT + { + textureBoxFilter = textureBoxFilter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & + setTextureBlockMatch( VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch_ ) VULKAN_HPP_NOEXCEPT + { + textureBlockMatch = textureBlockMatch_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageProcessingFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessingFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, textureSampleWeighted, textureBoxFilter, textureBlockMatch ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageProcessingFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureSampleWeighted == rhs.textureSampleWeighted ) && + ( textureBoxFilter == rhs.textureBoxFilter ) && ( textureBlockMatch == rhs.textureBlockMatch ); +# endif + } + + bool operator!=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureSampleWeighted = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageProcessingFeaturesQCOM; + }; + + struct PhysicalDeviceImageProcessingPropertiesQCOM + { + using NativeType = VkPhysicalDeviceImageProcessingPropertiesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM( uint32_t maxWeightFilterPhases_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxWeightFilterDimension_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchRegion_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxBoxFilterBlockSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxWeightFilterPhases{ maxWeightFilterPhases_ } + , maxWeightFilterDimension{ maxWeightFilterDimension_ } + , maxBlockMatchRegion{ maxBlockMatchRegion_ } + , maxBoxFilterBlockSize{ maxBoxFilterBlockSize_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageProcessingPropertiesQCOM( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageProcessingPropertiesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageProcessingPropertiesQCOM & operator=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageProcessingPropertiesQCOM & operator=( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceImageProcessingPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageProcessingPropertiesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxWeightFilterPhases, maxWeightFilterDimension, maxBlockMatchRegion, maxBoxFilterBlockSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageProcessingPropertiesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxWeightFilterPhases == rhs.maxWeightFilterPhases ) && + ( maxWeightFilterDimension == rhs.maxWeightFilterDimension ) && ( maxBlockMatchRegion == rhs.maxBlockMatchRegion ) && + ( maxBoxFilterBlockSize == rhs.maxBoxFilterBlockSize ); +# endif + } + + bool operator!=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM; + void * pNext = {}; + uint32_t maxWeightFilterPhases = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxWeightFilterDimension = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchRegion = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxBoxFilterBlockSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageProcessingPropertiesQCOM; + }; + + struct PhysicalDeviceImageRobustnessFeatures + { + using NativeType = VkPhysicalDeviceImageRobustnessFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageRobustnessFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , robustImageAccess{ robustImageAccess_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageRobustnessFeatures( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageRobustnessFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageRobustnessFeatures & operator=( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageRobustnessFeatures & operator=( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT + { + robustImageAccess = robustImageAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageRobustnessFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, robustImageAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageRobustnessFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess ); +# endif + } + + bool operator!=( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageRobustnessFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageRobustnessFeatures; + }; + + using PhysicalDeviceImageRobustnessFeaturesEXT = PhysicalDeviceImageRobustnessFeatures; + + struct PhysicalDeviceImageSlicedViewOf3DFeaturesEXT + { + using NativeType = VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 imageSlicedViewOf3D_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageSlicedViewOf3D{ imageSlicedViewOf3D_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & operator=( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & operator=( VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & + setImageSlicedViewOf3D( VULKAN_HPP_NAMESPACE::Bool32 imageSlicedViewOf3D_ ) VULKAN_HPP_NOEXCEPT + { + imageSlicedViewOf3D = imageSlicedViewOf3D_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageSlicedViewOf3D ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageSlicedViewOf3D == rhs.imageSlicedViewOf3D ); +# endif + } + + bool operator!=( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageSlicedViewOf3D = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + }; + + struct PhysicalDeviceImageViewImageFormatInfoEXT + { + using NativeType = VkPhysicalDeviceImageViewImageFormatInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageViewType{ imageViewType_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageViewImageFormatInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & + setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT + { + imageViewType = imageViewType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageViewType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageViewType == rhs.imageViewType ); +# endif + } + + bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageViewImageFormatInfoEXT; + }; + + struct PhysicalDeviceImageViewMinLodFeaturesEXT + { + using NativeType = VkPhysicalDeviceImageViewMinLodFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 minLod_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minLod{ minLod_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageViewMinLodFeaturesEXT( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageViewMinLodFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setMinLod( VULKAN_HPP_NAMESPACE::Bool32 minLod_ ) VULKAN_HPP_NOEXCEPT + { + minLod = minLod_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImageViewMinLodFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageViewMinLodFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minLod ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageViewMinLodFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minLod == rhs.minLod ); +# endif + } + + bool operator!=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 minLod = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageViewMinLodFeaturesEXT; + }; + + struct PhysicalDeviceImagelessFramebufferFeatures + { + using NativeType = VkPhysicalDeviceImagelessFramebufferFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imagelessFramebuffer{ imagelessFramebuffer_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImagelessFramebufferFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImagelessFramebufferFeatures & operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceImagelessFramebufferFeatures & operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & + setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT + { + imagelessFramebuffer = imagelessFramebuffer_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imagelessFramebuffer ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer ); +# endif + } + + bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImagelessFramebufferFeatures; + }; + + using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures; + + struct PhysicalDeviceIndexTypeUint8Features + { + using NativeType = VkPhysicalDeviceIndexTypeUint8Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIndexTypeUint8Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8Features( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , indexTypeUint8{ indexTypeUint8_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8Features( PhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceIndexTypeUint8Features( VkPhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceIndexTypeUint8Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceIndexTypeUint8Features & operator=( PhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceIndexTypeUint8Features & operator=( VkPhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8Features & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeUint8 = indexTypeUint8_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceIndexTypeUint8Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceIndexTypeUint8Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, indexTypeUint8 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceIndexTypeUint8Features const & ) const = default; +#else + bool operator==( PhysicalDeviceIndexTypeUint8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indexTypeUint8 == rhs.indexTypeUint8 ); +# endif + } + + bool operator!=( PhysicalDeviceIndexTypeUint8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceIndexTypeUint8Features; + }; + + using PhysicalDeviceIndexTypeUint8FeaturesEXT = PhysicalDeviceIndexTypeUint8Features; + using PhysicalDeviceIndexTypeUint8FeaturesKHR = PhysicalDeviceIndexTypeUint8Features; + + struct PhysicalDeviceInheritedViewportScissorFeaturesNV + { + using NativeType = VkPhysicalDeviceInheritedViewportScissorFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , inheritedViewportScissor2D{ inheritedViewportScissor2D_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceInheritedViewportScissorFeaturesNV( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInheritedViewportScissorFeaturesNV( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInheritedViewportScissorFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & + setInheritedViewportScissor2D( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ ) VULKAN_HPP_NOEXCEPT + { + inheritedViewportScissor2D = inheritedViewportScissor2D_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, inheritedViewportScissor2D ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInheritedViewportScissorFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inheritedViewportScissor2D == rhs.inheritedViewportScissor2D ); +# endif + } + + bool operator!=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceInheritedViewportScissorFeaturesNV; + }; + + struct PhysicalDeviceInlineUniformBlockFeatures + { + using NativeType = VkPhysicalDeviceInlineUniformBlockFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , inlineUniformBlock{ inlineUniformBlock_ } + , descriptorBindingInlineUniformBlockUpdateAfterBind{ descriptorBindingInlineUniformBlockUpdateAfterBind_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockFeatures( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInlineUniformBlockFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceInlineUniformBlockFeatures & operator=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceInlineUniformBlockFeatures & operator=( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & + setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT + { + inlineUniformBlock = inlineUniformBlock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setDescriptorBindingInlineUniformBlockUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceInlineUniformBlockFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInlineUniformBlockFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, inlineUniformBlock, descriptorBindingInlineUniformBlockUpdateAfterBind ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInlineUniformBlockFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inlineUniformBlock == rhs.inlineUniformBlock ) && + ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ); +# endif + } + + bool operator!=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceInlineUniformBlockFeatures; + }; + + using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures; + + struct PhysicalDeviceInlineUniformBlockProperties + { + using NativeType = VkPhysicalDeviceInlineUniformBlockProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( uint32_t maxInlineUniformBlockSize_ = {}, + uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxInlineUniformBlockSize{ maxInlineUniformBlockSize_ } + , maxPerStageDescriptorInlineUniformBlocks{ maxPerStageDescriptorInlineUniformBlocks_ } + , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks{ maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ } + , maxDescriptorSetInlineUniformBlocks{ maxDescriptorSetInlineUniformBlocks_ } + , maxDescriptorSetUpdateAfterBindInlineUniformBlocks{ maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInlineUniformBlockProperties( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInlineUniformBlockProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceInlineUniformBlockProperties & operator=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceInlineUniformBlockProperties & operator=( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceInlineUniformBlockProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInlineUniformBlockProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxInlineUniformBlockSize, + maxPerStageDescriptorInlineUniformBlocks, + maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, + maxDescriptorSetInlineUniformBlocks, + maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInlineUniformBlockProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) && + ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) && + ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) && + ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) && + ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); +# endif + } + + bool operator!=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockProperties; + void * pNext = {}; + uint32_t maxInlineUniformBlockSize = {}; + uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; + uint32_t maxDescriptorSetInlineUniformBlocks = {}; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceInlineUniformBlockProperties; + }; + + using PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties; + + struct PhysicalDeviceInvocationMaskFeaturesHUAWEI + { + using NativeType = VkPhysicalDeviceInvocationMaskFeaturesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , invocationMask{ invocationMask_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceInvocationMaskFeaturesHUAWEI( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceInvocationMaskFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setInvocationMask( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ ) VULKAN_HPP_NOEXCEPT + { + invocationMask = invocationMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, invocationMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( invocationMask == rhs.invocationMask ); +# endif + } + + bool operator!=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 invocationMask = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceInvocationMaskFeaturesHUAWEI; + }; + + struct PhysicalDeviceLayeredApiPropertiesKHR + { + using NativeType = VkPhysicalDeviceLayeredApiPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredApiPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesKHR( + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR layeredAPI_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR::eVulkan, + std::array const & deviceName_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vendorID{ vendorID_ } + , deviceID{ deviceID_ } + , layeredAPI{ layeredAPI_ } + , deviceName{ deviceName_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesKHR( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLayeredApiPropertiesKHR( VkPhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLayeredApiPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLayeredApiPropertiesKHR & operator=( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLayeredApiPropertiesKHR & operator=( VkPhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLayeredApiPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredApiPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vendorID, deviceID, layeredAPI, deviceName ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLayeredApiPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && + ( layeredAPI == rhs.layeredAPI ) && ( deviceName == rhs.deviceName ); +# endif + } + + bool operator!=( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredApiPropertiesKHR; + void * pNext = {}; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR layeredAPI = VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR::eVulkan; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceName = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLayeredApiPropertiesKHR; + }; + + struct PhysicalDeviceLayeredApiPropertiesListKHR + { + using NativeType = VkPhysicalDeviceLayeredApiPropertiesListKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredApiPropertiesListKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR( uint32_t layeredApiCount_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , layeredApiCount{ layeredApiCount_ } + , pLayeredApis{ pLayeredApis_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLayeredApiPropertiesListKHR( VkPhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLayeredApiPropertiesListKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceLayeredApiPropertiesListKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & layeredApis_, void * pNext_ = nullptr ) + : pNext( pNext_ ), layeredApiCount( static_cast( layeredApis_.size() ) ), pLayeredApis( layeredApis_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PhysicalDeviceLayeredApiPropertiesListKHR & operator=( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLayeredApiPropertiesListKHR & operator=( VkPhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR & setLayeredApiCount( uint32_t layeredApiCount_ ) VULKAN_HPP_NOEXCEPT + { + layeredApiCount = layeredApiCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR & + setPLayeredApis( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis_ ) VULKAN_HPP_NOEXCEPT + { + pLayeredApis = pLayeredApis_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceLayeredApiPropertiesListKHR & setLayeredApis( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & layeredApis_ ) VULKAN_HPP_NOEXCEPT + { + layeredApiCount = static_cast( layeredApis_.size() ); + pLayeredApis = layeredApis_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceLayeredApiPropertiesListKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredApiPropertiesListKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, layeredApiCount, pLayeredApis ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLayeredApiPropertiesListKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( layeredApiCount == rhs.layeredApiCount ) && ( pLayeredApis == rhs.pLayeredApis ); +# endif + } + + bool operator!=( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredApiPropertiesListKHR; + void * pNext = {}; + uint32_t layeredApiCount = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLayeredApiPropertiesListKHR; + }; + + struct PhysicalDeviceLimits + { + using NativeType = VkPhysicalDeviceLimits; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {}, + uint32_t maxImageDimension2D_ = {}, + uint32_t maxImageDimension3D_ = {}, + uint32_t maxImageDimensionCube_ = {}, + uint32_t maxImageArrayLayers_ = {}, + uint32_t maxTexelBufferElements_ = {}, + uint32_t maxUniformBufferRange_ = {}, + uint32_t maxStorageBufferRange_ = {}, + uint32_t maxPushConstantsSize_ = {}, + uint32_t maxMemoryAllocationCount_ = {}, + uint32_t maxSamplerAllocationCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, + uint32_t maxBoundDescriptorSets_ = {}, + uint32_t maxPerStageDescriptorSamplers_ = {}, + uint32_t maxPerStageDescriptorUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorSampledImages_ = {}, + uint32_t maxPerStageDescriptorStorageImages_ = {}, + uint32_t maxPerStageDescriptorInputAttachments_ = {}, + uint32_t maxPerStageResources_ = {}, + uint32_t maxDescriptorSetSamplers_ = {}, + uint32_t maxDescriptorSetUniformBuffers_ = {}, + uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetStorageBuffers_ = {}, + uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetSampledImages_ = {}, + uint32_t maxDescriptorSetStorageImages_ = {}, + uint32_t maxDescriptorSetInputAttachments_ = {}, + uint32_t maxVertexInputAttributes_ = {}, + uint32_t maxVertexInputBindings_ = {}, + uint32_t maxVertexInputAttributeOffset_ = {}, + uint32_t maxVertexInputBindingStride_ = {}, + uint32_t maxVertexOutputComponents_ = {}, + uint32_t maxTessellationGenerationLevel_ = {}, + uint32_t maxTessellationPatchSize_ = {}, + uint32_t maxTessellationControlPerVertexInputComponents_ = {}, + uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, + uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, + uint32_t maxTessellationControlTotalOutputComponents_ = {}, + uint32_t maxTessellationEvaluationInputComponents_ = {}, + uint32_t maxTessellationEvaluationOutputComponents_ = {}, + uint32_t maxGeometryShaderInvocations_ = {}, + uint32_t maxGeometryInputComponents_ = {}, + uint32_t maxGeometryOutputComponents_ = {}, + uint32_t maxGeometryOutputVertices_ = {}, + uint32_t maxGeometryTotalOutputComponents_ = {}, + uint32_t maxFragmentInputComponents_ = {}, + uint32_t maxFragmentOutputAttachments_ = {}, + uint32_t maxFragmentDualSrcAttachments_ = {}, + uint32_t maxFragmentCombinedOutputResources_ = {}, + uint32_t maxComputeSharedMemorySize_ = {}, + std::array const & maxComputeWorkGroupCount_ = {}, + uint32_t maxComputeWorkGroupInvocations_ = {}, + std::array const & maxComputeWorkGroupSize_ = {}, + uint32_t subPixelPrecisionBits_ = {}, + uint32_t subTexelPrecisionBits_ = {}, + uint32_t mipmapPrecisionBits_ = {}, + uint32_t maxDrawIndexedIndexValue_ = {}, + uint32_t maxDrawIndirectCount_ = {}, + float maxSamplerLodBias_ = {}, + float maxSamplerAnisotropy_ = {}, + uint32_t maxViewports_ = {}, + std::array const & maxViewportDimensions_ = {}, + std::array const & viewportBoundsRange_ = {}, + uint32_t viewportSubPixelBits_ = {}, + size_t minMemoryMapAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, + int32_t minTexelOffset_ = {}, + uint32_t maxTexelOffset_ = {}, + int32_t minTexelGatherOffset_ = {}, + uint32_t maxTexelGatherOffset_ = {}, + float minInterpolationOffset_ = {}, + float maxInterpolationOffset_ = {}, + uint32_t subPixelInterpolationOffsetBits_ = {}, + uint32_t maxFramebufferWidth_ = {}, + uint32_t maxFramebufferHeight_ = {}, + uint32_t maxFramebufferLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, + uint32_t maxColorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, + uint32_t maxSampleMaskWords_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, + float timestampPeriod_ = {}, + uint32_t maxClipDistances_ = {}, + uint32_t maxCullDistances_ = {}, + uint32_t maxCombinedClipAndCullDistances_ = {}, + uint32_t discreteQueuePriorities_ = {}, + std::array const & pointSizeRange_ = {}, + std::array const & lineWidthRange_ = {}, + float pointSizeGranularity_ = {}, + float lineWidthGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxImageDimension1D{ maxImageDimension1D_ } + , maxImageDimension2D{ maxImageDimension2D_ } + , maxImageDimension3D{ maxImageDimension3D_ } + , maxImageDimensionCube{ maxImageDimensionCube_ } + , maxImageArrayLayers{ maxImageArrayLayers_ } + , maxTexelBufferElements{ maxTexelBufferElements_ } + , maxUniformBufferRange{ maxUniformBufferRange_ } + , maxStorageBufferRange{ maxStorageBufferRange_ } + , maxPushConstantsSize{ maxPushConstantsSize_ } + , maxMemoryAllocationCount{ maxMemoryAllocationCount_ } + , maxSamplerAllocationCount{ maxSamplerAllocationCount_ } + , bufferImageGranularity{ bufferImageGranularity_ } + , sparseAddressSpaceSize{ sparseAddressSpaceSize_ } + , maxBoundDescriptorSets{ maxBoundDescriptorSets_ } + , maxPerStageDescriptorSamplers{ maxPerStageDescriptorSamplers_ } + , maxPerStageDescriptorUniformBuffers{ maxPerStageDescriptorUniformBuffers_ } + , maxPerStageDescriptorStorageBuffers{ maxPerStageDescriptorStorageBuffers_ } + , maxPerStageDescriptorSampledImages{ maxPerStageDescriptorSampledImages_ } + , maxPerStageDescriptorStorageImages{ maxPerStageDescriptorStorageImages_ } + , maxPerStageDescriptorInputAttachments{ maxPerStageDescriptorInputAttachments_ } + , maxPerStageResources{ maxPerStageResources_ } + , maxDescriptorSetSamplers{ maxDescriptorSetSamplers_ } + , maxDescriptorSetUniformBuffers{ maxDescriptorSetUniformBuffers_ } + , maxDescriptorSetUniformBuffersDynamic{ maxDescriptorSetUniformBuffersDynamic_ } + , maxDescriptorSetStorageBuffers{ maxDescriptorSetStorageBuffers_ } + , maxDescriptorSetStorageBuffersDynamic{ maxDescriptorSetStorageBuffersDynamic_ } + , maxDescriptorSetSampledImages{ maxDescriptorSetSampledImages_ } + , maxDescriptorSetStorageImages{ maxDescriptorSetStorageImages_ } + , maxDescriptorSetInputAttachments{ maxDescriptorSetInputAttachments_ } + , maxVertexInputAttributes{ maxVertexInputAttributes_ } + , maxVertexInputBindings{ maxVertexInputBindings_ } + , maxVertexInputAttributeOffset{ maxVertexInputAttributeOffset_ } + , maxVertexInputBindingStride{ maxVertexInputBindingStride_ } + , maxVertexOutputComponents{ maxVertexOutputComponents_ } + , maxTessellationGenerationLevel{ maxTessellationGenerationLevel_ } + , maxTessellationPatchSize{ maxTessellationPatchSize_ } + , maxTessellationControlPerVertexInputComponents{ maxTessellationControlPerVertexInputComponents_ } + , maxTessellationControlPerVertexOutputComponents{ maxTessellationControlPerVertexOutputComponents_ } + , maxTessellationControlPerPatchOutputComponents{ maxTessellationControlPerPatchOutputComponents_ } + , maxTessellationControlTotalOutputComponents{ maxTessellationControlTotalOutputComponents_ } + , maxTessellationEvaluationInputComponents{ maxTessellationEvaluationInputComponents_ } + , maxTessellationEvaluationOutputComponents{ maxTessellationEvaluationOutputComponents_ } + , maxGeometryShaderInvocations{ maxGeometryShaderInvocations_ } + , maxGeometryInputComponents{ maxGeometryInputComponents_ } + , maxGeometryOutputComponents{ maxGeometryOutputComponents_ } + , maxGeometryOutputVertices{ maxGeometryOutputVertices_ } + , maxGeometryTotalOutputComponents{ maxGeometryTotalOutputComponents_ } + , maxFragmentInputComponents{ maxFragmentInputComponents_ } + , maxFragmentOutputAttachments{ maxFragmentOutputAttachments_ } + , maxFragmentDualSrcAttachments{ maxFragmentDualSrcAttachments_ } + , maxFragmentCombinedOutputResources{ maxFragmentCombinedOutputResources_ } + , maxComputeSharedMemorySize{ maxComputeSharedMemorySize_ } + , maxComputeWorkGroupCount{ maxComputeWorkGroupCount_ } + , maxComputeWorkGroupInvocations{ maxComputeWorkGroupInvocations_ } + , maxComputeWorkGroupSize{ maxComputeWorkGroupSize_ } + , subPixelPrecisionBits{ subPixelPrecisionBits_ } + , subTexelPrecisionBits{ subTexelPrecisionBits_ } + , mipmapPrecisionBits{ mipmapPrecisionBits_ } + , maxDrawIndexedIndexValue{ maxDrawIndexedIndexValue_ } + , maxDrawIndirectCount{ maxDrawIndirectCount_ } + , maxSamplerLodBias{ maxSamplerLodBias_ } + , maxSamplerAnisotropy{ maxSamplerAnisotropy_ } + , maxViewports{ maxViewports_ } + , maxViewportDimensions{ maxViewportDimensions_ } + , viewportBoundsRange{ viewportBoundsRange_ } + , viewportSubPixelBits{ viewportSubPixelBits_ } + , minMemoryMapAlignment{ minMemoryMapAlignment_ } + , minTexelBufferOffsetAlignment{ minTexelBufferOffsetAlignment_ } + , minUniformBufferOffsetAlignment{ minUniformBufferOffsetAlignment_ } + , minStorageBufferOffsetAlignment{ minStorageBufferOffsetAlignment_ } + , minTexelOffset{ minTexelOffset_ } + , maxTexelOffset{ maxTexelOffset_ } + , minTexelGatherOffset{ minTexelGatherOffset_ } + , maxTexelGatherOffset{ maxTexelGatherOffset_ } + , minInterpolationOffset{ minInterpolationOffset_ } + , maxInterpolationOffset{ maxInterpolationOffset_ } + , subPixelInterpolationOffsetBits{ subPixelInterpolationOffsetBits_ } + , maxFramebufferWidth{ maxFramebufferWidth_ } + , maxFramebufferHeight{ maxFramebufferHeight_ } + , maxFramebufferLayers{ maxFramebufferLayers_ } + , framebufferColorSampleCounts{ framebufferColorSampleCounts_ } + , framebufferDepthSampleCounts{ framebufferDepthSampleCounts_ } + , framebufferStencilSampleCounts{ framebufferStencilSampleCounts_ } + , framebufferNoAttachmentsSampleCounts{ framebufferNoAttachmentsSampleCounts_ } + , maxColorAttachments{ maxColorAttachments_ } + , sampledImageColorSampleCounts{ sampledImageColorSampleCounts_ } + , sampledImageIntegerSampleCounts{ sampledImageIntegerSampleCounts_ } + , sampledImageDepthSampleCounts{ sampledImageDepthSampleCounts_ } + , sampledImageStencilSampleCounts{ sampledImageStencilSampleCounts_ } + , storageImageSampleCounts{ storageImageSampleCounts_ } + , maxSampleMaskWords{ maxSampleMaskWords_ } + , timestampComputeAndGraphics{ timestampComputeAndGraphics_ } + , timestampPeriod{ timestampPeriod_ } + , maxClipDistances{ maxClipDistances_ } + , maxCullDistances{ maxCullDistances_ } + , maxCombinedClipAndCullDistances{ maxCombinedClipAndCullDistances_ } + , discreteQueuePriorities{ discreteQueuePriorities_ } + , pointSizeRange{ pointSizeRange_ } + , lineWidthRange{ lineWidthRange_ } + , pointSizeGranularity{ pointSizeGranularity_ } + , lineWidthGranularity{ lineWidthGranularity_ } + , strictLines{ strictLines_ } + , standardSampleLocations{ standardSampleLocations_ } + , optimalBufferCopyOffsetAlignment{ optimalBufferCopyOffsetAlignment_ } + , optimalBufferCopyRowPitchAlignment{ optimalBufferCopyRowPitchAlignment_ } + , nonCoherentAtomSize{ nonCoherentAtomSize_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLimits( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + float const &, + float const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + size_t const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + int32_t const &, + uint32_t const &, + int32_t const &, + uint32_t const &, + float const &, + float const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + float const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + float const &, + float const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( maxImageDimension1D, + maxImageDimension2D, + maxImageDimension3D, + maxImageDimensionCube, + maxImageArrayLayers, + maxTexelBufferElements, + maxUniformBufferRange, + maxStorageBufferRange, + maxPushConstantsSize, + maxMemoryAllocationCount, + maxSamplerAllocationCount, + bufferImageGranularity, + sparseAddressSpaceSize, + maxBoundDescriptorSets, + maxPerStageDescriptorSamplers, + maxPerStageDescriptorUniformBuffers, + maxPerStageDescriptorStorageBuffers, + maxPerStageDescriptorSampledImages, + maxPerStageDescriptorStorageImages, + maxPerStageDescriptorInputAttachments, + maxPerStageResources, + maxDescriptorSetSamplers, + maxDescriptorSetUniformBuffers, + maxDescriptorSetUniformBuffersDynamic, + maxDescriptorSetStorageBuffers, + maxDescriptorSetStorageBuffersDynamic, + maxDescriptorSetSampledImages, + maxDescriptorSetStorageImages, + maxDescriptorSetInputAttachments, + maxVertexInputAttributes, + maxVertexInputBindings, + maxVertexInputAttributeOffset, + maxVertexInputBindingStride, + maxVertexOutputComponents, + maxTessellationGenerationLevel, + maxTessellationPatchSize, + maxTessellationControlPerVertexInputComponents, + maxTessellationControlPerVertexOutputComponents, + maxTessellationControlPerPatchOutputComponents, + maxTessellationControlTotalOutputComponents, + maxTessellationEvaluationInputComponents, + maxTessellationEvaluationOutputComponents, + maxGeometryShaderInvocations, + maxGeometryInputComponents, + maxGeometryOutputComponents, + maxGeometryOutputVertices, + maxGeometryTotalOutputComponents, + maxFragmentInputComponents, + maxFragmentOutputAttachments, + maxFragmentDualSrcAttachments, + maxFragmentCombinedOutputResources, + maxComputeSharedMemorySize, + maxComputeWorkGroupCount, + maxComputeWorkGroupInvocations, + maxComputeWorkGroupSize, + subPixelPrecisionBits, + subTexelPrecisionBits, + mipmapPrecisionBits, + maxDrawIndexedIndexValue, + maxDrawIndirectCount, + maxSamplerLodBias, + maxSamplerAnisotropy, + maxViewports, + maxViewportDimensions, + viewportBoundsRange, + viewportSubPixelBits, + minMemoryMapAlignment, + minTexelBufferOffsetAlignment, + minUniformBufferOffsetAlignment, + minStorageBufferOffsetAlignment, + minTexelOffset, + maxTexelOffset, + minTexelGatherOffset, + maxTexelGatherOffset, + minInterpolationOffset, + maxInterpolationOffset, + subPixelInterpolationOffsetBits, + maxFramebufferWidth, + maxFramebufferHeight, + maxFramebufferLayers, + framebufferColorSampleCounts, + framebufferDepthSampleCounts, + framebufferStencilSampleCounts, + framebufferNoAttachmentsSampleCounts, + maxColorAttachments, + sampledImageColorSampleCounts, + sampledImageIntegerSampleCounts, + sampledImageDepthSampleCounts, + sampledImageStencilSampleCounts, + storageImageSampleCounts, + maxSampleMaskWords, + timestampComputeAndGraphics, + timestampPeriod, + maxClipDistances, + maxCullDistances, + maxCombinedClipAndCullDistances, + discreteQueuePriorities, + pointSizeRange, + lineWidthRange, + pointSizeGranularity, + lineWidthGranularity, + strictLines, + standardSampleLocations, + optimalBufferCopyOffsetAlignment, + optimalBufferCopyRowPitchAlignment, + nonCoherentAtomSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLimits const & ) const = default; +#else + bool operator==( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( maxImageDimension1D == rhs.maxImageDimension1D ) && ( maxImageDimension2D == rhs.maxImageDimension2D ) && + ( maxImageDimension3D == rhs.maxImageDimension3D ) && ( maxImageDimensionCube == rhs.maxImageDimensionCube ) && + ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( maxTexelBufferElements == rhs.maxTexelBufferElements ) && + ( maxUniformBufferRange == rhs.maxUniformBufferRange ) && ( maxStorageBufferRange == rhs.maxStorageBufferRange ) && + ( maxPushConstantsSize == rhs.maxPushConstantsSize ) && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) && + ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) && ( bufferImageGranularity == rhs.bufferImageGranularity ) && + ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) && + ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) && + ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) && + ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) && + ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) && + ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) && + ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) && ( maxPerStageResources == rhs.maxPerStageResources ) && + ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) && + ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) && + ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) && + ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) && + ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) && + ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) && + ( maxVertexInputBindings == rhs.maxVertexInputBindings ) && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) && + ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) && + ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) && + ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) && + ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents ) && + ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) && + ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) && + ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) && + ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) && + ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) && + ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) && + ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) && + ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) && + ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) && + ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) && ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount ) && + ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) && ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize ) && + ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) && + ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) && + ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) && + ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) && ( maxViewports == rhs.maxViewports ) && + ( maxViewportDimensions == rhs.maxViewportDimensions ) && ( viewportBoundsRange == rhs.viewportBoundsRange ) && + ( viewportSubPixelBits == rhs.viewportSubPixelBits ) && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) && + ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) && + ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) && + ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) && ( minTexelOffset == rhs.minTexelOffset ) && + ( maxTexelOffset == rhs.maxTexelOffset ) && ( minTexelGatherOffset == rhs.minTexelGatherOffset ) && + ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) && ( minInterpolationOffset == rhs.minInterpolationOffset ) && + ( maxInterpolationOffset == rhs.maxInterpolationOffset ) && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) && + ( maxFramebufferWidth == rhs.maxFramebufferWidth ) && ( maxFramebufferHeight == rhs.maxFramebufferHeight ) && + ( maxFramebufferLayers == rhs.maxFramebufferLayers ) && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) && + ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) && + ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) && ( maxColorAttachments == rhs.maxColorAttachments ) && + ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) && + ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) && + ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) && + ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) && ( storageImageSampleCounts == rhs.storageImageSampleCounts ) && + ( maxSampleMaskWords == rhs.maxSampleMaskWords ) && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) && + ( timestampPeriod == rhs.timestampPeriod ) && ( maxClipDistances == rhs.maxClipDistances ) && ( maxCullDistances == rhs.maxCullDistances ) && + ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) && ( discreteQueuePriorities == rhs.discreteQueuePriorities ) && + ( pointSizeRange == rhs.pointSizeRange ) && ( lineWidthRange == rhs.lineWidthRange ) && ( pointSizeGranularity == rhs.pointSizeGranularity ) && + ( lineWidthGranularity == rhs.lineWidthGranularity ) && ( strictLines == rhs.strictLines ) && + ( standardSampleLocations == rhs.standardSampleLocations ) && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) && + ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize ); +# endif + } + + bool operator!=( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t maxImageDimension1D = {}; + uint32_t maxImageDimension2D = {}; + uint32_t maxImageDimension3D = {}; + uint32_t maxImageDimensionCube = {}; + uint32_t maxImageArrayLayers = {}; + uint32_t maxTexelBufferElements = {}; + uint32_t maxUniformBufferRange = {}; + uint32_t maxStorageBufferRange = {}; + uint32_t maxPushConstantsSize = {}; + uint32_t maxMemoryAllocationCount = {}; + uint32_t maxSamplerAllocationCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {}; + uint32_t maxBoundDescriptorSets = {}; uint32_t maxPerStageDescriptorSamplers = {}; uint32_t maxPerStageDescriptorUniformBuffers = {}; uint32_t maxPerStageDescriptorStorageBuffers = {}; @@ -55666,103 +80212,15733 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {}; }; - struct PhysicalDeviceLineRasterizationFeaturesEXT + struct PhysicalDeviceSparseProperties + { + using NativeType = VkPhysicalDeviceSparseProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT + : residencyStandard2DBlockShape{ residencyStandard2DBlockShape_ } + , residencyStandard2DMultisampleBlockShape{ residencyStandard2DMultisampleBlockShape_ } + , residencyStandard3DBlockShape{ residencyStandard3DBlockShape_ } + , residencyAlignedMipSize{ residencyAlignedMipSize_ } + , residencyNonResidentStrict{ residencyNonResidentStrict_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSparseProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( residencyStandard2DBlockShape, + residencyStandard2DMultisampleBlockShape, + residencyStandard3DBlockShape, + residencyAlignedMipSize, + residencyNonResidentStrict ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSparseProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) && + ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) && + ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) && + ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); +# endif + } + + bool operator!=( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {}; + }; + + struct PhysicalDeviceProperties + { + using NativeType = VkPhysicalDeviceProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( uint32_t apiVersion_ = {}, + uint32_t driverVersion_ = {}, + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, + std::array const & deviceName_ = {}, + std::array const & pipelineCacheUUID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : apiVersion{ apiVersion_ } + , driverVersion{ driverVersion_ } + , vendorID{ vendorID_ } + , deviceID{ deviceID_ } + , deviceType{ deviceType_ } + , deviceName{ deviceName_ } + , pipelineCacheUUID{ pipelineCacheUUID_ } + , limits{ limits_ } + , sparseProperties{ sparseProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const &, + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( apiVersion, driverVersion, vendorID, deviceID, deviceType, deviceName, pipelineCacheUUID, limits, sparseProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::partial_ordering operator<=>( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 ) + return cmp; + if ( auto cmp = driverVersion <=> rhs.driverVersion; cmp != 0 ) + return cmp; + if ( auto cmp = vendorID <=> rhs.vendorID; cmp != 0 ) + return cmp; + if ( auto cmp = deviceID <=> rhs.deviceID; cmp != 0 ) + return cmp; + if ( auto cmp = deviceType <=> rhs.deviceType; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( deviceName, rhs.deviceName ); cmp != 0 ) + return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; + if ( auto cmp = pipelineCacheUUID <=> rhs.pipelineCacheUUID; cmp != 0 ) + return cmp; + if ( auto cmp = limits <=> rhs.limits; cmp != 0 ) + return cmp; + if ( auto cmp = sparseProperties <=> rhs.sparseProperties; cmp != 0 ) + return cmp; + + return std::partial_ordering::equivalent; + } +#endif + + bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && + ( deviceType == rhs.deviceType ) && ( strcmp( deviceName, rhs.deviceName ) == 0 ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && + ( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties ); + } + + bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t apiVersion = {}; + uint32_t driverVersion = {}; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {}; + }; + + struct PhysicalDeviceProperties2 + { + using NativeType = VkPhysicalDeviceProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , properties{ properties_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProperties2( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, properties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProperties2 const & ) const = default; +#else + bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); +# endif + } + + bool operator!=( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceProperties2; + }; + + using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; + + struct PhysicalDeviceLayeredApiVulkanPropertiesKHR + { + using NativeType = VkPhysicalDeviceLayeredApiVulkanPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiVulkanPropertiesKHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , properties{ properties_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceLayeredApiVulkanPropertiesKHR( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLayeredApiVulkanPropertiesKHR( VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLayeredApiVulkanPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLayeredApiVulkanPropertiesKHR & operator=( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLayeredApiVulkanPropertiesKHR & operator=( VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, properties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); +# endif + } + + bool operator!=( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLayeredApiVulkanPropertiesKHR; + }; + + struct PhysicalDeviceLayeredDriverPropertiesMSFT + { + using NativeType = VkPhysicalDeviceLayeredDriverPropertiesMSFT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLayeredDriverPropertiesMSFT( + VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT underlyingAPI_ = VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT::eNone, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , underlyingAPI{ underlyingAPI_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLayeredDriverPropertiesMSFT( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLayeredDriverPropertiesMSFT( VkPhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLayeredDriverPropertiesMSFT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLayeredDriverPropertiesMSFT & operator=( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLayeredDriverPropertiesMSFT & operator=( VkPhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLayeredDriverPropertiesMSFT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredDriverPropertiesMSFT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, underlyingAPI ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLayeredDriverPropertiesMSFT const & ) const = default; +#else + bool operator==( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( underlyingAPI == rhs.underlyingAPI ); +# endif + } + + bool operator!=( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT underlyingAPI = VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT::eNone; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLayeredDriverPropertiesMSFT; + }; + + struct PhysicalDeviceLegacyDitheringFeaturesEXT + { + using NativeType = VkPhysicalDeviceLegacyDitheringFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , legacyDithering{ legacyDithering_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLegacyDitheringFeaturesEXT( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLegacyDitheringFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setLegacyDithering( VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ ) VULKAN_HPP_NOEXCEPT + { + legacyDithering = legacyDithering_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceLegacyDitheringFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLegacyDitheringFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, legacyDithering ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLegacyDitheringFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( legacyDithering == rhs.legacyDithering ); +# endif + } + + bool operator!=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 legacyDithering = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLegacyDitheringFeaturesEXT; + }; + + struct PhysicalDeviceLegacyVertexAttributesFeaturesEXT + { + using NativeType = VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyVertexAttributesFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyVertexAttributesFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 legacyVertexAttributes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , legacyVertexAttributes{ legacyVertexAttributes_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceLegacyVertexAttributesFeaturesEXT( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLegacyVertexAttributesFeaturesEXT( VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLegacyVertexAttributesFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLegacyVertexAttributesFeaturesEXT & operator=( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLegacyVertexAttributesFeaturesEXT & operator=( VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesFeaturesEXT & + setLegacyVertexAttributes( VULKAN_HPP_NAMESPACE::Bool32 legacyVertexAttributes_ ) VULKAN_HPP_NOEXCEPT + { + legacyVertexAttributes = legacyVertexAttributes_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, legacyVertexAttributes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( legacyVertexAttributes == rhs.legacyVertexAttributes ); +# endif + } + + bool operator!=( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLegacyVertexAttributesFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 legacyVertexAttributes = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLegacyVertexAttributesFeaturesEXT; + }; + + struct PhysicalDeviceLegacyVertexAttributesPropertiesEXT + { + using NativeType = VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyVertexAttributesPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyVertexAttributesPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 nativeUnalignedPerformance_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , nativeUnalignedPerformance{ nativeUnalignedPerformance_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceLegacyVertexAttributesPropertiesEXT( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLegacyVertexAttributesPropertiesEXT( VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLegacyVertexAttributesPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLegacyVertexAttributesPropertiesEXT & + operator=( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLegacyVertexAttributesPropertiesEXT & operator=( VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesPropertiesEXT & + setNativeUnalignedPerformance( VULKAN_HPP_NAMESPACE::Bool32 nativeUnalignedPerformance_ ) VULKAN_HPP_NOEXCEPT + { + nativeUnalignedPerformance = nativeUnalignedPerformance_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, nativeUnalignedPerformance ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nativeUnalignedPerformance == rhs.nativeUnalignedPerformance ); +# endif + } + + bool operator!=( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLegacyVertexAttributesPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 nativeUnalignedPerformance = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLegacyVertexAttributesPropertiesEXT; + }; + + struct PhysicalDeviceLineRasterizationFeatures + { + using NativeType = VkPhysicalDeviceLineRasterizationFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeatures( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rectangularLines{ rectangularLines_ } + , bresenhamLines{ bresenhamLines_ } + , smoothLines{ smoothLines_ } + , stippledRectangularLines{ stippledRectangularLines_ } + , stippledBresenhamLines{ stippledBresenhamLines_ } + , stippledSmoothLines{ stippledSmoothLines_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeatures( PhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationFeatures( VkPhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLineRasterizationFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLineRasterizationFeatures & operator=( PhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLineRasterizationFeatures & operator=( VkPhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + rectangularLines = rectangularLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + bresenhamLines = bresenhamLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT + { + smoothLines = smoothLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & + setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledRectangularLines = stippledRectangularLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & + setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledBresenhamLines = stippledBresenhamLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & + setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledSmoothLines = stippledSmoothLines_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceLineRasterizationFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLineRasterizationFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rectangularLines, bresenhamLines, smoothLines, stippledRectangularLines, stippledBresenhamLines, stippledSmoothLines ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLineRasterizationFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceLineRasterizationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rectangularLines == rhs.rectangularLines ) && ( bresenhamLines == rhs.bresenhamLines ) && + ( smoothLines == rhs.smoothLines ) && ( stippledRectangularLines == rhs.stippledRectangularLines ) && + ( stippledBresenhamLines == rhs.stippledBresenhamLines ) && ( stippledSmoothLines == rhs.stippledSmoothLines ); +# endif + } + + bool operator!=( PhysicalDeviceLineRasterizationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLineRasterizationFeatures; + }; + + using PhysicalDeviceLineRasterizationFeaturesEXT = PhysicalDeviceLineRasterizationFeatures; + using PhysicalDeviceLineRasterizationFeaturesKHR = PhysicalDeviceLineRasterizationFeatures; + + struct PhysicalDeviceLineRasterizationProperties + { + using NativeType = VkPhysicalDeviceLineRasterizationProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationProperties( uint32_t lineSubPixelPrecisionBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , lineSubPixelPrecisionBits{ lineSubPixelPrecisionBits_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationProperties( PhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLineRasterizationProperties( VkPhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLineRasterizationProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLineRasterizationProperties & operator=( PhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLineRasterizationProperties & operator=( VkPhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLineRasterizationProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLineRasterizationProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, lineSubPixelPrecisionBits ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLineRasterizationProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceLineRasterizationProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits ); +# endif + } + + bool operator!=( PhysicalDeviceLineRasterizationProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationProperties; + void * pNext = {}; + uint32_t lineSubPixelPrecisionBits = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLineRasterizationProperties; + }; + + using PhysicalDeviceLineRasterizationPropertiesEXT = PhysicalDeviceLineRasterizationProperties; + using PhysicalDeviceLineRasterizationPropertiesKHR = PhysicalDeviceLineRasterizationProperties; + + struct PhysicalDeviceLinearColorAttachmentFeaturesNV + { + using NativeType = VkPhysicalDeviceLinearColorAttachmentFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , linearColorAttachment{ linearColorAttachment_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceLinearColorAttachmentFeaturesNV( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLinearColorAttachmentFeaturesNV( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLinearColorAttachmentFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & + setLinearColorAttachment( VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ ) VULKAN_HPP_NOEXCEPT + { + linearColorAttachment = linearColorAttachment_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, linearColorAttachment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLinearColorAttachmentFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( linearColorAttachment == rhs.linearColorAttachment ); +# endif + } + + bool operator!=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLinearColorAttachmentFeaturesNV; + }; + + struct PhysicalDeviceMaintenance3Properties + { + using NativeType = VkPhysicalDeviceMaintenance3Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance3Properties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( uint32_t maxPerSetDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxPerSetDescriptors{ maxPerSetDescriptors_ } + , maxMemoryAllocationSize{ maxMemoryAllocationSize_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance3Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance3Properties & operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxPerSetDescriptors, maxMemoryAllocationSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance3Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) && + ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties; + void * pNext = {}; + uint32_t maxPerSetDescriptors = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance3Properties; + }; + + using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; + + struct PhysicalDeviceMaintenance4Features + { + using NativeType = VkPhysicalDeviceMaintenance4Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maintenance4{ maintenance4_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance4Features( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance4Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance4Features & operator=( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance4Features & operator=( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT + { + maintenance4 = maintenance4_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMaintenance4Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance4Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maintenance4 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance4Features const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance4 == rhs.maintenance4 ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance4Features; + }; + + using PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features; + + struct PhysicalDeviceMaintenance4Properties + { + using NativeType = VkPhysicalDeviceMaintenance4Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Properties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxBufferSize{ maxBufferSize_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance4Properties( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance4Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance4Properties & operator=( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance4Properties & operator=( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance4Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance4Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxBufferSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance4Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxBufferSize == rhs.maxBufferSize ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance4Properties; + }; + + using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties; + + struct PhysicalDeviceMaintenance5Features + { + using NativeType = VkPhysicalDeviceMaintenance5Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Features( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maintenance5{ maintenance5_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Features( PhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance5Features( VkPhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance5Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance5Features & operator=( PhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance5Features & operator=( VkPhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5Features & setMaintenance5( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ ) VULKAN_HPP_NOEXCEPT + { + maintenance5 = maintenance5_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMaintenance5Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance5Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maintenance5 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance5Features const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance5Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance5 == rhs.maintenance5 ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance5Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance5Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance5 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance5Features; + }; + + using PhysicalDeviceMaintenance5FeaturesKHR = PhysicalDeviceMaintenance5Features; + + struct PhysicalDeviceMaintenance5Properties + { + using NativeType = VkPhysicalDeviceMaintenance5Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5Properties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Properties( VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , earlyFragmentMultisampleCoverageAfterSampleCounting{ earlyFragmentMultisampleCoverageAfterSampleCounting_ } + , earlyFragmentSampleMaskTestBeforeSampleCounting{ earlyFragmentSampleMaskTestBeforeSampleCounting_ } + , depthStencilSwizzleOneSupport{ depthStencilSwizzleOneSupport_ } + , polygonModePointSize{ polygonModePointSize_ } + , nonStrictSinglePixelWideLinesUseParallelogram{ nonStrictSinglePixelWideLinesUseParallelogram_ } + , nonStrictWideLinesUseParallelogram{ nonStrictWideLinesUseParallelogram_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Properties( PhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance5Properties( VkPhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance5Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance5Properties & operator=( PhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance5Properties & operator=( VkPhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance5Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance5Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + earlyFragmentMultisampleCoverageAfterSampleCounting, + earlyFragmentSampleMaskTestBeforeSampleCounting, + depthStencilSwizzleOneSupport, + polygonModePointSize, + nonStrictSinglePixelWideLinesUseParallelogram, + nonStrictWideLinesUseParallelogram ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance5Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance5Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( earlyFragmentMultisampleCoverageAfterSampleCounting == rhs.earlyFragmentMultisampleCoverageAfterSampleCounting ) && + ( earlyFragmentSampleMaskTestBeforeSampleCounting == rhs.earlyFragmentSampleMaskTestBeforeSampleCounting ) && + ( depthStencilSwizzleOneSupport == rhs.depthStencilSwizzleOneSupport ) && ( polygonModePointSize == rhs.polygonModePointSize ) && + ( nonStrictSinglePixelWideLinesUseParallelogram == rhs.nonStrictSinglePixelWideLinesUseParallelogram ) && + ( nonStrictWideLinesUseParallelogram == rhs.nonStrictWideLinesUseParallelogram ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance5Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance5Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting = {}; + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport = {}; + VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram = {}; + VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance5Properties; + }; + + using PhysicalDeviceMaintenance5PropertiesKHR = PhysicalDeviceMaintenance5Properties; + + struct PhysicalDeviceMaintenance6Features + { + using NativeType = VkPhysicalDeviceMaintenance6Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance6Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Features( VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maintenance6{ maintenance6_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Features( PhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance6Features( VkPhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance6Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance6Features & operator=( PhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance6Features & operator=( VkPhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6Features & setMaintenance6( VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ ) VULKAN_HPP_NOEXCEPT + { + maintenance6 = maintenance6_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMaintenance6Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance6Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maintenance6 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance6Features const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance6Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance6 == rhs.maintenance6 ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance6Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance6Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance6 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance6Features; + }; + + using PhysicalDeviceMaintenance6FeaturesKHR = PhysicalDeviceMaintenance6Features; + + struct PhysicalDeviceMaintenance6Properties + { + using NativeType = VkPhysicalDeviceMaintenance6Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance6Properties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Properties( VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers_ = {}, + uint32_t maxCombinedImageSamplerDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , blockTexelViewCompatibleMultipleLayers{ blockTexelViewCompatibleMultipleLayers_ } + , maxCombinedImageSamplerDescriptorCount{ maxCombinedImageSamplerDescriptorCount_ } + , fragmentShadingRateClampCombinerInputs{ fragmentShadingRateClampCombinerInputs_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Properties( PhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance6Properties( VkPhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance6Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance6Properties & operator=( PhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance6Properties & operator=( VkPhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance6Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance6Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, blockTexelViewCompatibleMultipleLayers, maxCombinedImageSamplerDescriptorCount, fragmentShadingRateClampCombinerInputs ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance6Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance6Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( blockTexelViewCompatibleMultipleLayers == rhs.blockTexelViewCompatibleMultipleLayers ) && + ( maxCombinedImageSamplerDescriptorCount == rhs.maxCombinedImageSamplerDescriptorCount ) && + ( fragmentShadingRateClampCombinerInputs == rhs.fragmentShadingRateClampCombinerInputs ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance6Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance6Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers = {}; + uint32_t maxCombinedImageSamplerDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance6Properties; + }; + + using PhysicalDeviceMaintenance6PropertiesKHR = PhysicalDeviceMaintenance6Properties; + + struct PhysicalDeviceMaintenance7FeaturesKHR + { + using NativeType = VkPhysicalDeviceMaintenance7FeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance7FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 maintenance7_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maintenance7{ maintenance7_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7FeaturesKHR( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance7FeaturesKHR( VkPhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance7FeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance7FeaturesKHR & operator=( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance7FeaturesKHR & operator=( VkPhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance7FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance7FeaturesKHR & setMaintenance7( VULKAN_HPP_NAMESPACE::Bool32 maintenance7_ ) VULKAN_HPP_NOEXCEPT + { + maintenance7 = maintenance7_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMaintenance7FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance7FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maintenance7 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance7FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance7 == rhs.maintenance7 ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance7FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance7 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance7FeaturesKHR; + }; + + struct PhysicalDeviceMaintenance7PropertiesKHR + { + using NativeType = VkPhysicalDeviceMaintenance7PropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance7PropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7PropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 robustFragmentShadingRateAttachmentAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilAttachmentAccess_ = {}, + uint32_t maxDescriptorSetTotalUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetTotalStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetTotalBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindTotalBuffersDynamic_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , robustFragmentShadingRateAttachmentAccess{ robustFragmentShadingRateAttachmentAccess_ } + , separateDepthStencilAttachmentAccess{ separateDepthStencilAttachmentAccess_ } + , maxDescriptorSetTotalUniformBuffersDynamic{ maxDescriptorSetTotalUniformBuffersDynamic_ } + , maxDescriptorSetTotalStorageBuffersDynamic{ maxDescriptorSetTotalStorageBuffersDynamic_ } + , maxDescriptorSetTotalBuffersDynamic{ maxDescriptorSetTotalBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindTotalBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalBuffersDynamic_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7PropertiesKHR( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance7PropertiesKHR( VkPhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance7PropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance7PropertiesKHR & operator=( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance7PropertiesKHR & operator=( VkPhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance7PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance7PropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + robustFragmentShadingRateAttachmentAccess, + separateDepthStencilAttachmentAccess, + maxDescriptorSetTotalUniformBuffersDynamic, + maxDescriptorSetTotalStorageBuffersDynamic, + maxDescriptorSetTotalBuffersDynamic, + maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic, + maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic, + maxDescriptorSetUpdateAfterBindTotalBuffersDynamic ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance7PropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( robustFragmentShadingRateAttachmentAccess == rhs.robustFragmentShadingRateAttachmentAccess ) && + ( separateDepthStencilAttachmentAccess == rhs.separateDepthStencilAttachmentAccess ) && + ( maxDescriptorSetTotalUniformBuffersDynamic == rhs.maxDescriptorSetTotalUniformBuffersDynamic ) && + ( maxDescriptorSetTotalStorageBuffersDynamic == rhs.maxDescriptorSetTotalStorageBuffersDynamic ) && + ( maxDescriptorSetTotalBuffersDynamic == rhs.maxDescriptorSetTotalBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindTotalBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalBuffersDynamic ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance7PropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustFragmentShadingRateAttachmentAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilAttachmentAccess = {}; + uint32_t maxDescriptorSetTotalUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetTotalStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetTotalBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindTotalBuffersDynamic = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance7PropertiesKHR; + }; + + struct PhysicalDeviceMaintenance8FeaturesKHR + { + using NativeType = VkPhysicalDeviceMaintenance8FeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance8FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance8FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 maintenance8_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maintenance8{ maintenance8_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance8FeaturesKHR( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance8FeaturesKHR( VkPhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance8FeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance8FeaturesKHR & operator=( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance8FeaturesKHR & operator=( VkPhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance8FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance8FeaturesKHR & setMaintenance8( VULKAN_HPP_NAMESPACE::Bool32 maintenance8_ ) VULKAN_HPP_NOEXCEPT + { + maintenance8 = maintenance8_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMaintenance8FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance8FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maintenance8 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance8FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance8 == rhs.maintenance8 ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance8FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance8 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance8FeaturesKHR; + }; + + struct PhysicalDeviceMapMemoryPlacedFeaturesEXT + { + using NativeType = VkPhysicalDeviceMapMemoryPlacedFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryMapPlaced_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 memoryMapRangePlaced_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 memoryUnmapReserve_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryMapPlaced{ memoryMapPlaced_ } + , memoryMapRangePlaced{ memoryMapRangePlaced_ } + , memoryUnmapReserve{ memoryUnmapReserve_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedFeaturesEXT( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMapMemoryPlacedFeaturesEXT( VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMapMemoryPlacedFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMapMemoryPlacedFeaturesEXT & operator=( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMapMemoryPlacedFeaturesEXT & operator=( VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setMemoryMapPlaced( VULKAN_HPP_NAMESPACE::Bool32 memoryMapPlaced_ ) VULKAN_HPP_NOEXCEPT + { + memoryMapPlaced = memoryMapPlaced_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & + setMemoryMapRangePlaced( VULKAN_HPP_NAMESPACE::Bool32 memoryMapRangePlaced_ ) VULKAN_HPP_NOEXCEPT + { + memoryMapRangePlaced = memoryMapRangePlaced_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & + setMemoryUnmapReserve( VULKAN_HPP_NAMESPACE::Bool32 memoryUnmapReserve_ ) VULKAN_HPP_NOEXCEPT + { + memoryUnmapReserve = memoryUnmapReserve_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryMapPlaced, memoryMapRangePlaced, memoryUnmapReserve ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryMapPlaced == rhs.memoryMapPlaced ) && + ( memoryMapRangePlaced == rhs.memoryMapRangePlaced ) && ( memoryUnmapReserve == rhs.memoryUnmapReserve ); +# endif + } + + bool operator!=( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryMapPlaced = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryMapRangePlaced = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryUnmapReserve = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMapMemoryPlacedFeaturesEXT; + }; + + struct PhysicalDeviceMapMemoryPlacedPropertiesEXT + { + using NativeType = VkPhysicalDeviceMapMemoryPlacedPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minPlacedMemoryMapAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minPlacedMemoryMapAlignment{ minPlacedMemoryMapAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedPropertiesEXT( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMapMemoryPlacedPropertiesEXT( VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMapMemoryPlacedPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMapMemoryPlacedPropertiesEXT & operator=( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMapMemoryPlacedPropertiesEXT & operator=( VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minPlacedMemoryMapAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minPlacedMemoryMapAlignment == rhs.minPlacedMemoryMapAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minPlacedMemoryMapAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMapMemoryPlacedPropertiesEXT; + }; + + struct PhysicalDeviceMemoryBudgetPropertiesEXT + { + using NativeType = VkPhysicalDeviceMemoryBudgetPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( std::array const & heapBudget_ = {}, + std::array const & heapUsage_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , heapBudget{ heapBudget_ } + , heapUsage{ heapUsage_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryBudgetPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, heapBudget, heapUsage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( heapBudget == rhs.heapBudget ) && ( heapUsage == rhs.heapUsage ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapBudget = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapUsage = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryBudgetPropertiesEXT; + }; + + struct PhysicalDeviceMemoryDecompressionFeaturesNV + { + using NativeType = VkPhysicalDeviceMemoryDecompressionFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryDecompression{ memoryDecompression_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionFeaturesNV( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryDecompressionFeaturesNV( VkPhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryDecompressionFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryDecompressionFeaturesNV & operator=( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMemoryDecompressionFeaturesNV & operator=( VkPhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesNV & + setMemoryDecompression( VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression_ ) VULKAN_HPP_NOEXCEPT + { + memoryDecompression = memoryDecompression_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMemoryDecompressionFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryDecompressionFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryDecompression ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryDecompressionFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryDecompression == rhs.memoryDecompression ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryDecompressionFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryDecompressionFeaturesNV; + }; + + struct PhysicalDeviceMemoryDecompressionPropertiesNV + { + using NativeType = VkPhysicalDeviceMemoryDecompressionPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionPropertiesNV( VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethods_ = {}, + uint64_t maxDecompressionIndirectCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , decompressionMethods{ decompressionMethods_ } + , maxDecompressionIndirectCount{ maxDecompressionIndirectCount_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMemoryDecompressionPropertiesNV( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryDecompressionPropertiesNV( VkPhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryDecompressionPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryDecompressionPropertiesNV & operator=( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMemoryDecompressionPropertiesNV & operator=( VkPhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMemoryDecompressionPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryDecompressionPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, decompressionMethods, maxDecompressionIndirectCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryDecompressionPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decompressionMethods == rhs.decompressionMethods ) && + ( maxDecompressionIndirectCount == rhs.maxDecompressionIndirectCount ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryDecompressionPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethods = {}; + uint64_t maxDecompressionIndirectCount = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryDecompressionPropertiesNV; + }; + + struct PhysicalDeviceMemoryPriorityFeaturesEXT + { + using NativeType = VkPhysicalDeviceMemoryPriorityFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryPriority{ memoryPriority_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryPriorityFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT + { + memoryPriority = memoryPriority_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryPriority ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryPriority == rhs.memoryPriority ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryPriorityFeaturesEXT; + }; + + struct PhysicalDeviceMemoryProperties + { + using NativeType = VkPhysicalDeviceMemoryProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceMemoryProperties( uint32_t memoryTypeCount_ = {}, + std::array const & memoryTypes_ = {}, + uint32_t memoryHeapCount_ = {}, + std::array const & memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeCount{ memoryTypeCount_ } + , memoryTypes{ memoryTypes_ } + , memoryHeapCount{ memoryHeapCount_ } + , memoryHeaps{ memoryHeaps_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( memoryTypeCount, memoryTypes, memoryHeapCount, memoryHeaps ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = memoryTypeCount <=> rhs.memoryTypeCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < memoryTypeCount; ++i ) + { + if ( auto cmp = memoryTypes[i] <=> rhs.memoryTypes[i]; cmp != 0 ) + return cmp; + } + if ( auto cmp = memoryHeapCount <=> rhs.memoryHeapCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < memoryHeapCount; ++i ) + { + if ( auto cmp = memoryHeaps[i] <=> rhs.memoryHeaps[i]; cmp != 0 ) + return cmp; + } + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( memoryTypeCount == rhs.memoryTypeCount ) && + ( memcmp( memoryTypes, rhs.memoryTypes, memoryTypeCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) ) == 0 ) && + ( memoryHeapCount == rhs.memoryHeapCount ) && + ( memcmp( memoryHeaps, rhs.memoryHeaps, memoryHeapCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) ) == 0 ); + } + + bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t memoryTypeCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryTypes = {}; + uint32_t memoryHeapCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryHeaps = {}; + }; + + struct PhysicalDeviceMemoryProperties2 + { + using NativeType = VkPhysicalDeviceMemoryProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryProperties{ memoryProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMemoryProperties2( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, memoryProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMemoryProperties2 const & ) const = default; +#else + bool operator==( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryProperties == rhs.memoryProperties ); +# endif + } + + bool operator!=( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMemoryProperties2; + }; + + using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; + + struct PhysicalDeviceMeshShaderFeaturesEXT + { + using NativeType = VkPhysicalDeviceMeshShaderFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , taskShader{ taskShader_ } + , meshShader{ meshShader_ } + , multiviewMeshShader{ multiviewMeshShader_ } + , primitiveFragmentShadingRateMeshShader{ primitiveFragmentShadingRateMeshShader_ } + , meshShaderQueries{ meshShaderQueries_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderFeaturesEXT( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMeshShaderFeaturesEXT & operator=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMeshShaderFeaturesEXT & operator=( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT + { + taskShader = taskShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT + { + meshShader = meshShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & + setMultiviewMeshShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewMeshShader = multiviewMeshShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & + setPrimitiveFragmentShadingRateMeshShader( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader_ ) VULKAN_HPP_NOEXCEPT + { + primitiveFragmentShadingRateMeshShader = primitiveFragmentShadingRateMeshShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShaderQueries( VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries_ ) VULKAN_HPP_NOEXCEPT + { + meshShaderQueries = meshShaderQueries_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMeshShaderFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, taskShader, meshShader, multiviewMeshShader, primitiveFragmentShadingRateMeshShader, meshShaderQueries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && ( meshShader == rhs.meshShader ) && + ( multiviewMeshShader == rhs.multiviewMeshShader ) && ( primitiveFragmentShadingRateMeshShader == rhs.primitiveFragmentShadingRateMeshShader ) && + ( meshShaderQueries == rhs.meshShaderQueries ); +# endif + } + + bool operator!=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 taskShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 meshShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderFeaturesEXT; + }; + + struct PhysicalDeviceMeshShaderFeaturesNV + { + using NativeType = VkPhysicalDeviceMeshShaderFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , taskShader{ taskShader_ } + , meshShader{ meshShader_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMeshShaderFeaturesNV & operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT + { + taskShader = taskShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT + { + meshShader = meshShader_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, taskShader, meshShader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && ( meshShader == rhs.meshShader ); +# endif + } + + bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 taskShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 meshShader = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderFeaturesNV; + }; + + struct PhysicalDeviceMeshShaderPropertiesEXT + { + using NativeType = VkPhysicalDeviceMeshShaderPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT( uint32_t maxTaskWorkGroupTotalCount_ = {}, + std::array const & maxTaskWorkGroupCount_ = {}, + uint32_t maxTaskWorkGroupInvocations_ = {}, + std::array const & maxTaskWorkGroupSize_ = {}, + uint32_t maxTaskPayloadSize_ = {}, + uint32_t maxTaskSharedMemorySize_ = {}, + uint32_t maxTaskPayloadAndSharedMemorySize_ = {}, + uint32_t maxMeshWorkGroupTotalCount_ = {}, + std::array const & maxMeshWorkGroupCount_ = {}, + uint32_t maxMeshWorkGroupInvocations_ = {}, + std::array const & maxMeshWorkGroupSize_ = {}, + uint32_t maxMeshSharedMemorySize_ = {}, + uint32_t maxMeshPayloadAndSharedMemorySize_ = {}, + uint32_t maxMeshOutputMemorySize_ = {}, + uint32_t maxMeshPayloadAndOutputMemorySize_ = {}, + uint32_t maxMeshOutputComponents_ = {}, + uint32_t maxMeshOutputVertices_ = {}, + uint32_t maxMeshOutputPrimitives_ = {}, + uint32_t maxMeshOutputLayers_ = {}, + uint32_t maxMeshMultiviewViewCount_ = {}, + uint32_t meshOutputPerVertexGranularity_ = {}, + uint32_t meshOutputPerPrimitiveGranularity_ = {}, + uint32_t maxPreferredTaskWorkGroupInvocations_ = {}, + uint32_t maxPreferredMeshWorkGroupInvocations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationVertexOutput_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationPrimitiveOutput_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersCompactVertexOutput_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersCompactPrimitiveOutput_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxTaskWorkGroupTotalCount{ maxTaskWorkGroupTotalCount_ } + , maxTaskWorkGroupCount{ maxTaskWorkGroupCount_ } + , maxTaskWorkGroupInvocations{ maxTaskWorkGroupInvocations_ } + , maxTaskWorkGroupSize{ maxTaskWorkGroupSize_ } + , maxTaskPayloadSize{ maxTaskPayloadSize_ } + , maxTaskSharedMemorySize{ maxTaskSharedMemorySize_ } + , maxTaskPayloadAndSharedMemorySize{ maxTaskPayloadAndSharedMemorySize_ } + , maxMeshWorkGroupTotalCount{ maxMeshWorkGroupTotalCount_ } + , maxMeshWorkGroupCount{ maxMeshWorkGroupCount_ } + , maxMeshWorkGroupInvocations{ maxMeshWorkGroupInvocations_ } + , maxMeshWorkGroupSize{ maxMeshWorkGroupSize_ } + , maxMeshSharedMemorySize{ maxMeshSharedMemorySize_ } + , maxMeshPayloadAndSharedMemorySize{ maxMeshPayloadAndSharedMemorySize_ } + , maxMeshOutputMemorySize{ maxMeshOutputMemorySize_ } + , maxMeshPayloadAndOutputMemorySize{ maxMeshPayloadAndOutputMemorySize_ } + , maxMeshOutputComponents{ maxMeshOutputComponents_ } + , maxMeshOutputVertices{ maxMeshOutputVertices_ } + , maxMeshOutputPrimitives{ maxMeshOutputPrimitives_ } + , maxMeshOutputLayers{ maxMeshOutputLayers_ } + , maxMeshMultiviewViewCount{ maxMeshMultiviewViewCount_ } + , meshOutputPerVertexGranularity{ meshOutputPerVertexGranularity_ } + , meshOutputPerPrimitiveGranularity{ meshOutputPerPrimitiveGranularity_ } + , maxPreferredTaskWorkGroupInvocations{ maxPreferredTaskWorkGroupInvocations_ } + , maxPreferredMeshWorkGroupInvocations{ maxPreferredMeshWorkGroupInvocations_ } + , prefersLocalInvocationVertexOutput{ prefersLocalInvocationVertexOutput_ } + , prefersLocalInvocationPrimitiveOutput{ prefersLocalInvocationPrimitiveOutput_ } + , prefersCompactVertexOutput{ prefersCompactVertexOutput_ } + , prefersCompactPrimitiveOutput{ prefersCompactPrimitiveOutput_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderPropertiesEXT( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMeshShaderPropertiesEXT & operator=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMeshShaderPropertiesEXT & operator=( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMeshShaderPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxTaskWorkGroupTotalCount, + maxTaskWorkGroupCount, + maxTaskWorkGroupInvocations, + maxTaskWorkGroupSize, + maxTaskPayloadSize, + maxTaskSharedMemorySize, + maxTaskPayloadAndSharedMemorySize, + maxMeshWorkGroupTotalCount, + maxMeshWorkGroupCount, + maxMeshWorkGroupInvocations, + maxMeshWorkGroupSize, + maxMeshSharedMemorySize, + maxMeshPayloadAndSharedMemorySize, + maxMeshOutputMemorySize, + maxMeshPayloadAndOutputMemorySize, + maxMeshOutputComponents, + maxMeshOutputVertices, + maxMeshOutputPrimitives, + maxMeshOutputLayers, + maxMeshMultiviewViewCount, + meshOutputPerVertexGranularity, + meshOutputPerPrimitiveGranularity, + maxPreferredTaskWorkGroupInvocations, + maxPreferredMeshWorkGroupInvocations, + prefersLocalInvocationVertexOutput, + prefersLocalInvocationPrimitiveOutput, + prefersCompactVertexOutput, + prefersCompactPrimitiveOutput ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTaskWorkGroupTotalCount == rhs.maxTaskWorkGroupTotalCount ) && + ( maxTaskWorkGroupCount == rhs.maxTaskWorkGroupCount ) && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) && + ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) && ( maxTaskPayloadSize == rhs.maxTaskPayloadSize ) && + ( maxTaskSharedMemorySize == rhs.maxTaskSharedMemorySize ) && ( maxTaskPayloadAndSharedMemorySize == rhs.maxTaskPayloadAndSharedMemorySize ) && + ( maxMeshWorkGroupTotalCount == rhs.maxMeshWorkGroupTotalCount ) && ( maxMeshWorkGroupCount == rhs.maxMeshWorkGroupCount ) && + ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) && + ( maxMeshSharedMemorySize == rhs.maxMeshSharedMemorySize ) && ( maxMeshPayloadAndSharedMemorySize == rhs.maxMeshPayloadAndSharedMemorySize ) && + ( maxMeshOutputMemorySize == rhs.maxMeshOutputMemorySize ) && ( maxMeshPayloadAndOutputMemorySize == rhs.maxMeshPayloadAndOutputMemorySize ) && + ( maxMeshOutputComponents == rhs.maxMeshOutputComponents ) && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) && + ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && ( maxMeshOutputLayers == rhs.maxMeshOutputLayers ) && + ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) && + ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ) && + ( maxPreferredTaskWorkGroupInvocations == rhs.maxPreferredTaskWorkGroupInvocations ) && + ( maxPreferredMeshWorkGroupInvocations == rhs.maxPreferredMeshWorkGroupInvocations ) && + ( prefersLocalInvocationVertexOutput == rhs.prefersLocalInvocationVertexOutput ) && + ( prefersLocalInvocationPrimitiveOutput == rhs.prefersLocalInvocationPrimitiveOutput ) && + ( prefersCompactVertexOutput == rhs.prefersCompactVertexOutput ) && ( prefersCompactPrimitiveOutput == rhs.prefersCompactPrimitiveOutput ); +# endif + } + + bool operator!=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT; + void * pNext = {}; + uint32_t maxTaskWorkGroupTotalCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupCount = {}; + uint32_t maxTaskWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupSize = {}; + uint32_t maxTaskPayloadSize = {}; + uint32_t maxTaskSharedMemorySize = {}; + uint32_t maxTaskPayloadAndSharedMemorySize = {}; + uint32_t maxMeshWorkGroupTotalCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupCount = {}; + uint32_t maxMeshWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupSize = {}; + uint32_t maxMeshSharedMemorySize = {}; + uint32_t maxMeshPayloadAndSharedMemorySize = {}; + uint32_t maxMeshOutputMemorySize = {}; + uint32_t maxMeshPayloadAndOutputMemorySize = {}; + uint32_t maxMeshOutputComponents = {}; + uint32_t maxMeshOutputVertices = {}; + uint32_t maxMeshOutputPrimitives = {}; + uint32_t maxMeshOutputLayers = {}; + uint32_t maxMeshMultiviewViewCount = {}; + uint32_t meshOutputPerVertexGranularity = {}; + uint32_t meshOutputPerPrimitiveGranularity = {}; + uint32_t maxPreferredTaskWorkGroupInvocations = {}; + uint32_t maxPreferredMeshWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationVertexOutput = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationPrimitiveOutput = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersCompactVertexOutput = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersCompactPrimitiveOutput = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderPropertiesEXT; + }; + + struct PhysicalDeviceMeshShaderPropertiesNV + { + using NativeType = VkPhysicalDeviceMeshShaderPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {}, + uint32_t maxTaskWorkGroupInvocations_ = {}, + std::array const & maxTaskWorkGroupSize_ = {}, + uint32_t maxTaskTotalMemorySize_ = {}, + uint32_t maxTaskOutputCount_ = {}, + uint32_t maxMeshWorkGroupInvocations_ = {}, + std::array const & maxMeshWorkGroupSize_ = {}, + uint32_t maxMeshTotalMemorySize_ = {}, + uint32_t maxMeshOutputVertices_ = {}, + uint32_t maxMeshOutputPrimitives_ = {}, + uint32_t maxMeshMultiviewViewCount_ = {}, + uint32_t meshOutputPerVertexGranularity_ = {}, + uint32_t meshOutputPerPrimitiveGranularity_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxDrawMeshTasksCount{ maxDrawMeshTasksCount_ } + , maxTaskWorkGroupInvocations{ maxTaskWorkGroupInvocations_ } + , maxTaskWorkGroupSize{ maxTaskWorkGroupSize_ } + , maxTaskTotalMemorySize{ maxTaskTotalMemorySize_ } + , maxTaskOutputCount{ maxTaskOutputCount_ } + , maxMeshWorkGroupInvocations{ maxMeshWorkGroupInvocations_ } + , maxMeshWorkGroupSize{ maxMeshWorkGroupSize_ } + , maxMeshTotalMemorySize{ maxMeshTotalMemorySize_ } + , maxMeshOutputVertices{ maxMeshOutputVertices_ } + , maxMeshOutputPrimitives{ maxMeshOutputPrimitives_ } + , maxMeshMultiviewViewCount{ maxMeshMultiviewViewCount_ } + , meshOutputPerVertexGranularity{ meshOutputPerVertexGranularity_ } + , meshOutputPerPrimitiveGranularity{ meshOutputPerPrimitiveGranularity_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMeshShaderPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMeshShaderPropertiesNV & operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMeshShaderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxDrawMeshTasksCount, + maxTaskWorkGroupInvocations, + maxTaskWorkGroupSize, + maxTaskTotalMemorySize, + maxTaskOutputCount, + maxMeshWorkGroupInvocations, + maxMeshWorkGroupSize, + maxMeshTotalMemorySize, + maxMeshOutputVertices, + maxMeshOutputPrimitives, + maxMeshMultiviewViewCount, + meshOutputPerVertexGranularity, + meshOutputPerPrimitiveGranularity ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) && + ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) && ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) && + ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) && ( maxTaskOutputCount == rhs.maxTaskOutputCount ) && + ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) && + ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) && + ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) && + ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) && + ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ); +# endif + } + + bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; + void * pNext = {}; + uint32_t maxDrawMeshTasksCount = {}; + uint32_t maxTaskWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupSize = {}; + uint32_t maxTaskTotalMemorySize = {}; + uint32_t maxTaskOutputCount = {}; + uint32_t maxMeshWorkGroupInvocations = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupSize = {}; + uint32_t maxMeshTotalMemorySize = {}; + uint32_t maxMeshOutputVertices = {}; + uint32_t maxMeshOutputPrimitives = {}; + uint32_t maxMeshMultiviewViewCount = {}; + uint32_t meshOutputPerVertexGranularity = {}; + uint32_t meshOutputPerPrimitiveGranularity = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMeshShaderPropertiesNV; + }; + + struct PhysicalDeviceMultiDrawFeaturesEXT + { + using NativeType = VkPhysicalDeviceMultiDrawFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , multiDraw{ multiDraw_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiDrawFeaturesEXT( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiDrawFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiDrawFeaturesEXT & operator=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMultiDrawFeaturesEXT & operator=( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setMultiDraw( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ ) VULKAN_HPP_NOEXCEPT + { + multiDraw = multiDraw_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMultiDrawFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiDrawFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multiDraw ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiDrawFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiDraw == rhs.multiDraw ); +# endif + } + + bool operator!=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiDraw = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiDrawFeaturesEXT; + }; + + struct PhysicalDeviceMultiDrawPropertiesEXT + { + using NativeType = VkPhysicalDeviceMultiDrawPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( uint32_t maxMultiDrawCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxMultiDrawCount{ maxMultiDrawCount_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiDrawPropertiesEXT( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiDrawPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiDrawPropertiesEXT & operator=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMultiDrawPropertiesEXT & operator=( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMultiDrawPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiDrawPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxMultiDrawCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiDrawPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxMultiDrawCount == rhs.maxMultiDrawCount ); +# endif + } + + bool operator!=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT; + void * pNext = {}; + uint32_t maxMultiDrawCount = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiDrawPropertiesEXT; + }; + + struct PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT + { + using NativeType = VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , multisampledRenderToSingleSampled{ multisampledRenderToSingleSampled_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & + operator=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & + operator=( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & + setMultisampledRenderToSingleSampled( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled_ ) VULKAN_HPP_NOEXCEPT + { + multisampledRenderToSingleSampled = multisampledRenderToSingleSampled_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multisampledRenderToSingleSampled ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multisampledRenderToSingleSampled == rhs.multisampledRenderToSingleSampled ); +# endif + } + + bool operator!=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + }; + + struct PhysicalDeviceMultiviewFeatures + { + using NativeType = VkPhysicalDeviceMultiviewFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , multiview{ multiview_ } + , multiviewGeometryShader{ multiviewGeometryShader_ } + , multiviewTessellationShader{ multiviewTessellationShader_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT + { + multiview = multiview_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & + setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewGeometryShader = multiviewGeometryShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & + setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewTessellationShader = multiviewTessellationShader_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multiview, multiviewGeometryShader, multiviewTessellationShader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiview == rhs.multiview ) && ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && + ( multiviewTessellationShader == rhs.multiviewTessellationShader ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewFeatures; + }; + + using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; + + struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX + { + using NativeType = VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , perViewPositionAllComponents{ perViewPositionAllComponents_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & + operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, perViewPositionAllComponents ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + }; + + struct PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM + { + using NativeType = VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewRenderAreas_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , multiviewPerViewRenderAreas{ multiviewPerViewRenderAreas_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & + operator=( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & operator=( VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & + setMultiviewPerViewRenderAreas( VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + multiviewPerViewRenderAreas = multiviewPerViewRenderAreas_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multiviewPerViewRenderAreas ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiviewPerViewRenderAreas == rhs.multiviewPerViewRenderAreas ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewRenderAreas = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + }; + + struct PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM + { + using NativeType = VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewViewports_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , multiviewPerViewViewports{ multiviewPerViewViewports_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & + operator=( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & operator=( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & + setMultiviewPerViewViewports( VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewViewports_ ) VULKAN_HPP_NOEXCEPT + { + multiviewPerViewViewports = multiviewPerViewViewports_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, multiviewPerViewViewports ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiviewPerViewViewports == rhs.multiviewPerViewViewports ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewViewports = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + }; + + struct PhysicalDeviceMultiviewProperties + { + using NativeType = VkPhysicalDeviceMultiviewProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {}, + uint32_t maxMultiviewInstanceIndex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxMultiviewViewCount{ maxMultiviewViewCount_ } + , maxMultiviewInstanceIndex{ maxMultiviewInstanceIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMultiviewProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxMultiviewViewCount, maxMultiviewInstanceIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMultiviewProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && + ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ); +# endif + } + + bool operator!=( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties; + void * pNext = {}; + uint32_t maxMultiviewViewCount = {}; + uint32_t maxMultiviewInstanceIndex = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMultiviewProperties; + }; + + using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; + + struct PhysicalDeviceMutableDescriptorTypeFeaturesEXT + { + using NativeType = VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mutableDescriptorType{ mutableDescriptorType_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceMutableDescriptorTypeFeaturesEXT( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMutableDescriptorTypeFeaturesEXT( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMutableDescriptorTypeFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT & + setMutableDescriptorType( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ ) VULKAN_HPP_NOEXCEPT + { + mutableDescriptorType = mutableDescriptorType_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mutableDescriptorType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorType == rhs.mutableDescriptorType ); +# endif + } + + bool operator!=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMutableDescriptorTypeFeaturesEXT; + }; + + using PhysicalDeviceMutableDescriptorTypeFeaturesVALVE = PhysicalDeviceMutableDescriptorTypeFeaturesEXT; + + struct PhysicalDeviceNestedCommandBufferFeaturesEXT + { + using NativeType = VkPhysicalDeviceNestedCommandBufferFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNestedCommandBufferFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferRendering_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferSimultaneousUse_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , nestedCommandBuffer{ nestedCommandBuffer_ } + , nestedCommandBufferRendering{ nestedCommandBufferRendering_ } + , nestedCommandBufferSimultaneousUse{ nestedCommandBufferSimultaneousUse_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferFeaturesEXT( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceNestedCommandBufferFeaturesEXT( VkPhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceNestedCommandBufferFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceNestedCommandBufferFeaturesEXT & operator=( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceNestedCommandBufferFeaturesEXT & operator=( VkPhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & + setNestedCommandBuffer( VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBuffer_ ) VULKAN_HPP_NOEXCEPT + { + nestedCommandBuffer = nestedCommandBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & + setNestedCommandBufferRendering( VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferRendering_ ) VULKAN_HPP_NOEXCEPT + { + nestedCommandBufferRendering = nestedCommandBufferRendering_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & + setNestedCommandBufferSimultaneousUse( VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferSimultaneousUse_ ) VULKAN_HPP_NOEXCEPT + { + nestedCommandBufferSimultaneousUse = nestedCommandBufferSimultaneousUse_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, nestedCommandBuffer, nestedCommandBufferRendering, nestedCommandBufferSimultaneousUse ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceNestedCommandBufferFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nestedCommandBuffer == rhs.nestedCommandBuffer ) && + ( nestedCommandBufferRendering == rhs.nestedCommandBufferRendering ) && + ( nestedCommandBufferSimultaneousUse == rhs.nestedCommandBufferSimultaneousUse ); +# endif + } + + bool operator!=( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceNestedCommandBufferFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferRendering = {}; + VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferSimultaneousUse = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceNestedCommandBufferFeaturesEXT; + }; + + struct PhysicalDeviceNestedCommandBufferPropertiesEXT + { + using NativeType = VkPhysicalDeviceNestedCommandBufferPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNestedCommandBufferPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferPropertiesEXT( uint32_t maxCommandBufferNestingLevel_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxCommandBufferNestingLevel{ maxCommandBufferNestingLevel_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceNestedCommandBufferPropertiesEXT( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceNestedCommandBufferPropertiesEXT( VkPhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceNestedCommandBufferPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceNestedCommandBufferPropertiesEXT & operator=( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceNestedCommandBufferPropertiesEXT & operator=( VkPhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferPropertiesEXT & + setMaxCommandBufferNestingLevel( uint32_t maxCommandBufferNestingLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxCommandBufferNestingLevel = maxCommandBufferNestingLevel_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxCommandBufferNestingLevel ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceNestedCommandBufferPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxCommandBufferNestingLevel == rhs.maxCommandBufferNestingLevel ); +# endif + } + + bool operator!=( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceNestedCommandBufferPropertiesEXT; + void * pNext = {}; + uint32_t maxCommandBufferNestingLevel = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceNestedCommandBufferPropertiesEXT; + }; + + struct PhysicalDeviceNonSeamlessCubeMapFeaturesEXT + { + using NativeType = VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , nonSeamlessCubeMap{ nonSeamlessCubeMap_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & + setNonSeamlessCubeMap( VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap_ ) VULKAN_HPP_NOEXCEPT + { + nonSeamlessCubeMap = nonSeamlessCubeMap_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, nonSeamlessCubeMap ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nonSeamlessCubeMap == rhs.nonSeamlessCubeMap ); +# endif + } + + bool operator!=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + }; + + struct PhysicalDeviceOpacityMicromapFeaturesEXT + { + using NativeType = VkPhysicalDeviceOpacityMicromapFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 micromap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , micromap{ micromap_ } + , micromapCaptureReplay{ micromapCaptureReplay_ } + , micromapHostCommands{ micromapHostCommands_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceOpacityMicromapFeaturesEXT( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceOpacityMicromapFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setMicromap( VULKAN_HPP_NAMESPACE::Bool32 micromap_ ) VULKAN_HPP_NOEXCEPT + { + micromap = micromap_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & + setMicromapCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + micromapCaptureReplay = micromapCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & + setMicromapHostCommands( VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands_ ) VULKAN_HPP_NOEXCEPT + { + micromapHostCommands = micromapHostCommands_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceOpacityMicromapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpacityMicromapFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, micromap, micromapCaptureReplay, micromapHostCommands ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceOpacityMicromapFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( micromap == rhs.micromap ) && ( micromapCaptureReplay == rhs.micromapCaptureReplay ) && + ( micromapHostCommands == rhs.micromapHostCommands ); +# endif + } + + bool operator!=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 micromap = {}; + VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceOpacityMicromapFeaturesEXT; + }; + + struct PhysicalDeviceOpacityMicromapPropertiesEXT + { + using NativeType = VkPhysicalDeviceOpacityMicromapPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT( uint32_t maxOpacity2StateSubdivisionLevel_ = {}, + uint32_t maxOpacity4StateSubdivisionLevel_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxOpacity2StateSubdivisionLevel{ maxOpacity2StateSubdivisionLevel_ } + , maxOpacity4StateSubdivisionLevel{ maxOpacity4StateSubdivisionLevel_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceOpacityMicromapPropertiesEXT( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceOpacityMicromapPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceOpacityMicromapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpacityMicromapPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxOpacity2StateSubdivisionLevel, maxOpacity4StateSubdivisionLevel ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceOpacityMicromapPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxOpacity2StateSubdivisionLevel == rhs.maxOpacity2StateSubdivisionLevel ) && + ( maxOpacity4StateSubdivisionLevel == rhs.maxOpacity4StateSubdivisionLevel ); +# endif + } + + bool operator!=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT; + void * pNext = {}; + uint32_t maxOpacity2StateSubdivisionLevel = {}; + uint32_t maxOpacity4StateSubdivisionLevel = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceOpacityMicromapPropertiesEXT; + }; + + struct PhysicalDeviceOpticalFlowFeaturesNV + { + using NativeType = VkPhysicalDeviceOpticalFlowFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 opticalFlow_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , opticalFlow{ opticalFlow_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceOpticalFlowFeaturesNV( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceOpticalFlowFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceOpticalFlowFeaturesNV & operator=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceOpticalFlowFeaturesNV & operator=( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setOpticalFlow( VULKAN_HPP_NAMESPACE::Bool32 opticalFlow_ ) VULKAN_HPP_NOEXCEPT + { + opticalFlow = opticalFlow_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceOpticalFlowFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpticalFlowFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, opticalFlow ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceOpticalFlowFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opticalFlow == rhs.opticalFlow ); +# endif + } + + bool operator!=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 opticalFlow = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceOpticalFlowFeaturesNV; + }; + + struct PhysicalDeviceOpticalFlowPropertiesNV + { + using NativeType = VkPhysicalDeviceOpticalFlowPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV( VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedOutputGridSizes_ = {}, + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedHintGridSizes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hintSupported_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 costSupported_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bidirectionalFlowSupported_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 globalFlowSupported_ = {}, + uint32_t minWidth_ = {}, + uint32_t minHeight_ = {}, + uint32_t maxWidth_ = {}, + uint32_t maxHeight_ = {}, + uint32_t maxNumRegionsOfInterest_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , supportedOutputGridSizes{ supportedOutputGridSizes_ } + , supportedHintGridSizes{ supportedHintGridSizes_ } + , hintSupported{ hintSupported_ } + , costSupported{ costSupported_ } + , bidirectionalFlowSupported{ bidirectionalFlowSupported_ } + , globalFlowSupported{ globalFlowSupported_ } + , minWidth{ minWidth_ } + , minHeight{ minHeight_ } + , maxWidth{ maxWidth_ } + , maxHeight{ maxHeight_ } + , maxNumRegionsOfInterest{ maxNumRegionsOfInterest_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceOpticalFlowPropertiesNV( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceOpticalFlowPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceOpticalFlowPropertiesNV & operator=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceOpticalFlowPropertiesNV & operator=( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceOpticalFlowPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceOpticalFlowPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + supportedOutputGridSizes, + supportedHintGridSizes, + hintSupported, + costSupported, + bidirectionalFlowSupported, + globalFlowSupported, + minWidth, + minHeight, + maxWidth, + maxHeight, + maxNumRegionsOfInterest ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceOpticalFlowPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedOutputGridSizes == rhs.supportedOutputGridSizes ) && + ( supportedHintGridSizes == rhs.supportedHintGridSizes ) && ( hintSupported == rhs.hintSupported ) && ( costSupported == rhs.costSupported ) && + ( bidirectionalFlowSupported == rhs.bidirectionalFlowSupported ) && ( globalFlowSupported == rhs.globalFlowSupported ) && + ( minWidth == rhs.minWidth ) && ( minHeight == rhs.minHeight ) && ( maxWidth == rhs.maxWidth ) && ( maxHeight == rhs.maxHeight ) && + ( maxNumRegionsOfInterest == rhs.maxNumRegionsOfInterest ); +# endif + } + + bool operator!=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedOutputGridSizes = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV supportedHintGridSizes = {}; + VULKAN_HPP_NAMESPACE::Bool32 hintSupported = {}; + VULKAN_HPP_NAMESPACE::Bool32 costSupported = {}; + VULKAN_HPP_NAMESPACE::Bool32 bidirectionalFlowSupported = {}; + VULKAN_HPP_NAMESPACE::Bool32 globalFlowSupported = {}; + uint32_t minWidth = {}; + uint32_t minHeight = {}; + uint32_t maxWidth = {}; + uint32_t maxHeight = {}; + uint32_t maxNumRegionsOfInterest = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceOpticalFlowPropertiesNV; + }; + + struct PhysicalDevicePCIBusInfoPropertiesEXT + { + using NativeType = VkPhysicalDevicePCIBusInfoPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( + uint32_t pciDomain_ = {}, uint32_t pciBus_ = {}, uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pciDomain{ pciDomain_ } + , pciBus{ pciBus_ } + , pciDevice{ pciDevice_ } + , pciFunction{ pciFunction_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePCIBusInfoPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pciDomain, pciBus, pciDevice, pciFunction ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pciDomain == rhs.pciDomain ) && ( pciBus == rhs.pciBus ) && ( pciDevice == rhs.pciDevice ) && + ( pciFunction == rhs.pciFunction ); +# endif + } + + bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; + void * pNext = {}; + uint32_t pciDomain = {}; + uint32_t pciBus = {}; + uint32_t pciDevice = {}; + uint32_t pciFunction = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePCIBusInfoPropertiesEXT; + }; + + struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT + { + using NativeType = VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pageableDeviceLocalMemory{ pageableDeviceLocalMemory_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & + operator=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & operator=( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & + setPageableDeviceLocalMemory( VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ ) VULKAN_HPP_NOEXCEPT + { + pageableDeviceLocalMemory = pageableDeviceLocalMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pageableDeviceLocalMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pageableDeviceLocalMemory == rhs.pageableDeviceLocalMemory ); +# endif + } + + bool operator!=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + }; + + struct PhysicalDevicePartitionedAccelerationStructureFeaturesNV + { + using NativeType = VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePartitionedAccelerationStructureFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructureFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 partitionedAccelerationStructure_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , partitionedAccelerationStructure{ partitionedAccelerationStructure_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructureFeaturesNV( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePartitionedAccelerationStructureFeaturesNV( VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePartitionedAccelerationStructureFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePartitionedAccelerationStructureFeaturesNV & + operator=( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePartitionedAccelerationStructureFeaturesNV & + operator=( VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePartitionedAccelerationStructureFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePartitionedAccelerationStructureFeaturesNV & + setPartitionedAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 partitionedAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + partitionedAccelerationStructure = partitionedAccelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, partitionedAccelerationStructure ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( partitionedAccelerationStructure == rhs.partitionedAccelerationStructure ); +# endif + } + + bool operator!=( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePartitionedAccelerationStructureFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 partitionedAccelerationStructure = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePartitionedAccelerationStructureFeaturesNV; + }; + + struct PhysicalDevicePartitionedAccelerationStructurePropertiesNV + { + using NativeType = VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePartitionedAccelerationStructurePropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructurePropertiesNV( uint32_t maxPartitionCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxPartitionCount{ maxPartitionCount_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructurePropertiesNV( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePartitionedAccelerationStructurePropertiesNV( VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePartitionedAccelerationStructurePropertiesNV( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePartitionedAccelerationStructurePropertiesNV & + operator=( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePartitionedAccelerationStructurePropertiesNV & + operator=( VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxPartitionCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPartitionCount == rhs.maxPartitionCount ); +# endif + } + + bool operator!=( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePartitionedAccelerationStructurePropertiesNV; + void * pNext = {}; + uint32_t maxPartitionCount = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePartitionedAccelerationStructurePropertiesNV; + }; + + struct PhysicalDevicePerStageDescriptorSetFeaturesNV + { + using NativeType = VkPhysicalDevicePerStageDescriptorSetFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePerStageDescriptorSetFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 perStageDescriptorSet_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicPipelineLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , perStageDescriptorSet{ perStageDescriptorSet_ } + , dynamicPipelineLayout{ dynamicPipelineLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePerStageDescriptorSetFeaturesNV( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerStageDescriptorSetFeaturesNV( VkPhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePerStageDescriptorSetFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePerStageDescriptorSetFeaturesNV & operator=( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePerStageDescriptorSetFeaturesNV & operator=( VkPhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & + setPerStageDescriptorSet( VULKAN_HPP_NAMESPACE::Bool32 perStageDescriptorSet_ ) VULKAN_HPP_NOEXCEPT + { + perStageDescriptorSet = perStageDescriptorSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & + setDynamicPipelineLayout( VULKAN_HPP_NAMESPACE::Bool32 dynamicPipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + dynamicPipelineLayout = dynamicPipelineLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, perStageDescriptorSet, dynamicPipelineLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePerStageDescriptorSetFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perStageDescriptorSet == rhs.perStageDescriptorSet ) && + ( dynamicPipelineLayout == rhs.dynamicPipelineLayout ); +# endif + } + + bool operator!=( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 perStageDescriptorSet = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicPipelineLayout = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePerStageDescriptorSetFeaturesNV; + }; + + struct PhysicalDevicePerformanceQueryFeaturesKHR + { + using NativeType = VkPhysicalDevicePerformanceQueryFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , performanceCounterQueryPools{ performanceCounterQueryPools_ } + , performanceCounterMultipleQueryPools{ performanceCounterMultipleQueryPools_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePerformanceQueryFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePerformanceQueryFeaturesKHR & operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & + setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT + { + performanceCounterQueryPools = performanceCounterQueryPools_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & + setPerformanceCounterMultipleQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT + { + performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, performanceCounterQueryPools, performanceCounterMultipleQueryPools ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performanceCounterQueryPools == rhs.performanceCounterQueryPools ) && + ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools ); +# endif + } + + bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePerformanceQueryFeaturesKHR; + }; + + struct PhysicalDevicePerformanceQueryPropertiesKHR + { + using NativeType = VkPhysicalDevicePerformanceQueryPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , allowCommandBufferQueryCopies{ allowCommandBufferQueryCopies_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePerformanceQueryPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePerformanceQueryPropertiesKHR & operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, allowCommandBufferQueryCopies ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies ); +# endif + } + + bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePerformanceQueryPropertiesKHR; + }; + + struct PhysicalDevicePipelineBinaryFeaturesKHR + { + using NativeType = VkPhysicalDevicePipelineBinaryFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineBinaryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaries_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBinaries{ pipelineBinaries_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryFeaturesKHR( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineBinaryFeaturesKHR( VkPhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineBinaryFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineBinaryFeaturesKHR & operator=( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineBinaryFeaturesKHR & operator=( VkPhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryFeaturesKHR & setPipelineBinaries( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaries_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinaries = pipelineBinaries_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePipelineBinaryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineBinaryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineBinaries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineBinaryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinaries == rhs.pipelineBinaries ); +# endif + } + + bool operator!=( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineBinaryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaries = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineBinaryFeaturesKHR; + }; + + struct PhysicalDevicePipelineBinaryPropertiesKHR + { + using NativeType = VkPhysicalDevicePipelineBinaryPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineBinaryPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCache_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCacheControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrefersInternalCache_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrecompiledInternalCache_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryCompressedData_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBinaryInternalCache{ pipelineBinaryInternalCache_ } + , pipelineBinaryInternalCacheControl{ pipelineBinaryInternalCacheControl_ } + , pipelineBinaryPrefersInternalCache{ pipelineBinaryPrefersInternalCache_ } + , pipelineBinaryPrecompiledInternalCache{ pipelineBinaryPrecompiledInternalCache_ } + , pipelineBinaryCompressedData{ pipelineBinaryCompressedData_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryPropertiesKHR( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineBinaryPropertiesKHR( VkPhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineBinaryPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineBinaryPropertiesKHR & operator=( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineBinaryPropertiesKHR & operator=( VkPhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryPropertiesKHR & + setPipelineBinaryInternalCache( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCache_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinaryInternalCache = pipelineBinaryInternalCache_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryPropertiesKHR & + setPipelineBinaryInternalCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCacheControl_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinaryInternalCacheControl = pipelineBinaryInternalCacheControl_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryPropertiesKHR & + setPipelineBinaryPrefersInternalCache( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrefersInternalCache_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinaryPrefersInternalCache = pipelineBinaryPrefersInternalCache_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryPropertiesKHR & + setPipelineBinaryPrecompiledInternalCache( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrecompiledInternalCache_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinaryPrecompiledInternalCache = pipelineBinaryPrecompiledInternalCache_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryPropertiesKHR & + setPipelineBinaryCompressedData( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryCompressedData_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinaryCompressedData = pipelineBinaryCompressedData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePipelineBinaryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineBinaryPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + pipelineBinaryInternalCache, + pipelineBinaryInternalCacheControl, + pipelineBinaryPrefersInternalCache, + pipelineBinaryPrecompiledInternalCache, + pipelineBinaryCompressedData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineBinaryPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinaryInternalCache == rhs.pipelineBinaryInternalCache ) && + ( pipelineBinaryInternalCacheControl == rhs.pipelineBinaryInternalCacheControl ) && + ( pipelineBinaryPrefersInternalCache == rhs.pipelineBinaryPrefersInternalCache ) && + ( pipelineBinaryPrecompiledInternalCache == rhs.pipelineBinaryPrecompiledInternalCache ) && + ( pipelineBinaryCompressedData == rhs.pipelineBinaryCompressedData ); +# endif + } + + bool operator!=( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineBinaryPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCache = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCacheControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrefersInternalCache = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrecompiledInternalCache = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryCompressedData = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineBinaryPropertiesKHR; + }; + + struct PhysicalDevicePipelineCreationCacheControlFeatures + { + using NativeType = VkPhysicalDevicePipelineCreationCacheControlFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineCreationCacheControl{ pipelineCreationCacheControl_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePipelineCreationCacheControlFeatures( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineCreationCacheControlFeatures( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineCreationCacheControlFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineCreationCacheControlFeatures & + operator=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineCreationCacheControlFeatures & operator=( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & + setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT + { + pipelineCreationCacheControl = pipelineCreationCacheControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePipelineCreationCacheControlFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineCreationCacheControlFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineCreationCacheControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeatures const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ); +# endif + } + + bool operator!=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineCreationCacheControlFeatures; + }; + + using PhysicalDevicePipelineCreationCacheControlFeaturesEXT = PhysicalDevicePipelineCreationCacheControlFeatures; + + struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR + { + using NativeType = VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineExecutableInfo{ pipelineExecutableInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & + setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT + { + pipelineExecutableInfo = pipelineExecutableInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineExecutableInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineExecutableInfo == rhs.pipelineExecutableInfo ); +# endif + } + + bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + }; + + struct PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT + { + using NativeType = VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelineLibraryGroupHandles_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineLibraryGroupHandles{ pipelineLibraryGroupHandles_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & + operator=( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & operator=( VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & + setPipelineLibraryGroupHandles( VULKAN_HPP_NAMESPACE::Bool32 pipelineLibraryGroupHandles_ ) VULKAN_HPP_NOEXCEPT + { + pipelineLibraryGroupHandles = pipelineLibraryGroupHandles_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineLibraryGroupHandles ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineLibraryGroupHandles == rhs.pipelineLibraryGroupHandles ); +# endif + } + + bool operator!=( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineLibraryGroupHandles = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + }; + + struct PhysicalDevicePipelineOpacityMicromapFeaturesARM + { + using NativeType = VkPhysicalDevicePipelineOpacityMicromapFeaturesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineOpacityMicromapFeaturesARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineOpacityMicromapFeaturesARM( VULKAN_HPP_NAMESPACE::Bool32 pipelineOpacityMicromap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineOpacityMicromap{ pipelineOpacityMicromap_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePipelineOpacityMicromapFeaturesARM( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineOpacityMicromapFeaturesARM( VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineOpacityMicromapFeaturesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineOpacityMicromapFeaturesARM & operator=( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineOpacityMicromapFeaturesARM & operator=( VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineOpacityMicromapFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineOpacityMicromapFeaturesARM & + setPipelineOpacityMicromap( VULKAN_HPP_NAMESPACE::Bool32 pipelineOpacityMicromap_ ) VULKAN_HPP_NOEXCEPT + { + pipelineOpacityMicromap = pipelineOpacityMicromap_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineOpacityMicromap ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineOpacityMicromap == rhs.pipelineOpacityMicromap ); +# endif + } + + bool operator!=( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineOpacityMicromapFeaturesARM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineOpacityMicromap = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineOpacityMicromapFeaturesARM; + }; + + struct PhysicalDevicePipelinePropertiesFeaturesEXT + { + using NativeType = VkPhysicalDevicePipelinePropertiesFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelinePropertiesIdentifier{ pipelinePropertiesIdentifier_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelinePropertiesFeaturesEXT( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelinePropertiesFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT & + setPipelinePropertiesIdentifier( VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier_ ) VULKAN_HPP_NOEXCEPT + { + pipelinePropertiesIdentifier = pipelinePropertiesIdentifier_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePipelinePropertiesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelinePropertiesFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelinePropertiesIdentifier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelinePropertiesFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelinePropertiesIdentifier == rhs.pipelinePropertiesIdentifier ); +# endif + } + + bool operator!=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelinePropertiesFeaturesEXT; + }; + + struct PhysicalDevicePipelineProtectedAccessFeatures + { + using NativeType = VkPhysicalDevicePipelineProtectedAccessFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineProtectedAccessFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineProtectedAccessFeatures( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineProtectedAccess{ pipelineProtectedAccess_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePipelineProtectedAccessFeatures( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineProtectedAccessFeatures( VkPhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineProtectedAccessFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineProtectedAccessFeatures & operator=( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineProtectedAccessFeatures & operator=( VkPhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeatures & + setPipelineProtectedAccess( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ ) VULKAN_HPP_NOEXCEPT + { + pipelineProtectedAccess = pipelineProtectedAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePipelineProtectedAccessFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineProtectedAccessFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineProtectedAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineProtectedAccessFeatures const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineProtectedAccess == rhs.pipelineProtectedAccess ); +# endif + } + + bool operator!=( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineProtectedAccessFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineProtectedAccessFeatures; + }; + + using PhysicalDevicePipelineProtectedAccessFeaturesEXT = PhysicalDevicePipelineProtectedAccessFeatures; + + struct PhysicalDevicePipelineRobustnessFeatures + { + using NativeType = VkPhysicalDevicePipelineRobustnessFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeatures( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineRobustness{ pipelineRobustness_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeatures( PhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineRobustnessFeatures( VkPhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineRobustnessFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineRobustnessFeatures & operator=( PhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineRobustnessFeatures & operator=( VkPhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeatures & + setPipelineRobustness( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ ) VULKAN_HPP_NOEXCEPT + { + pipelineRobustness = pipelineRobustness_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePipelineRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineRobustnessFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineRobustness ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineRobustnessFeatures const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineRobustness == rhs.pipelineRobustness ); +# endif + } + + bool operator!=( PhysicalDevicePipelineRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineRobustnessFeatures; + }; + + using PhysicalDevicePipelineRobustnessFeaturesEXT = PhysicalDevicePipelineRobustnessFeatures; + + struct PhysicalDevicePipelineRobustnessProperties + { + using NativeType = VkPhysicalDevicePipelineRobustnessProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessProperties( + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior defaultRobustnessImages_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , defaultRobustnessStorageBuffers{ defaultRobustnessStorageBuffers_ } + , defaultRobustnessUniformBuffers{ defaultRobustnessUniformBuffers_ } + , defaultRobustnessVertexInputs{ defaultRobustnessVertexInputs_ } + , defaultRobustnessImages{ defaultRobustnessImages_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessProperties( PhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineRobustnessProperties( VkPhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineRobustnessProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineRobustnessProperties & operator=( PhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePipelineRobustnessProperties & operator=( VkPhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePipelineRobustnessProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineRobustnessProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, defaultRobustnessStorageBuffers, defaultRobustnessUniformBuffers, defaultRobustnessVertexInputs, defaultRobustnessImages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineRobustnessProperties const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineRobustnessProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( defaultRobustnessStorageBuffers == rhs.defaultRobustnessStorageBuffers ) && + ( defaultRobustnessUniformBuffers == rhs.defaultRobustnessUniformBuffers ) && + ( defaultRobustnessVertexInputs == rhs.defaultRobustnessVertexInputs ) && ( defaultRobustnessImages == rhs.defaultRobustnessImages ); +# endif + } + + bool operator!=( PhysicalDevicePipelineRobustnessProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior defaultRobustnessImages = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineRobustnessProperties; + }; + + using PhysicalDevicePipelineRobustnessPropertiesEXT = PhysicalDevicePipelineRobustnessProperties; + + struct PhysicalDevicePointClippingProperties + { + using NativeType = VkPhysicalDevicePointClippingProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePointClippingProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pointClippingBehavior{ pointClippingBehavior_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePointClippingProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePointClippingProperties & operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pointClippingBehavior ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePointClippingProperties const & ) const = default; +#else + bool operator==( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pointClippingBehavior == rhs.pointClippingBehavior ); +# endif + } + + bool operator!=( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePointClippingProperties; + }; + + using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDevicePortabilitySubsetFeaturesKHR + { + using NativeType = VkPhysicalDevicePortabilitySubsetFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 events_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , constantAlphaColorBlendFactors{ constantAlphaColorBlendFactors_ } + , events{ events_ } + , imageViewFormatReinterpretation{ imageViewFormatReinterpretation_ } + , imageViewFormatSwizzle{ imageViewFormatSwizzle_ } + , imageView2DOn3DImage{ imageView2DOn3DImage_ } + , multisampleArrayImage{ multisampleArrayImage_ } + , mutableComparisonSamplers{ mutableComparisonSamplers_ } + , pointPolygons{ pointPolygons_ } + , samplerMipLodBias{ samplerMipLodBias_ } + , separateStencilMaskRef{ separateStencilMaskRef_ } + , shaderSampleRateInterpolationFunctions{ shaderSampleRateInterpolationFunctions_ } + , tessellationIsolines{ tessellationIsolines_ } + , tessellationPointMode{ tessellationPointMode_ } + , triangleFans{ triangleFans_ } + , vertexAttributeAccessBeyondStride{ vertexAttributeAccessBeyondStride_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePortabilitySubsetFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setConstantAlphaColorBlendFactors( VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT + { + constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setEvents( VULKAN_HPP_NAMESPACE::Bool32 events_ ) VULKAN_HPP_NOEXCEPT + { + events = events_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setImageViewFormatReinterpretation( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT + { + imageViewFormatReinterpretation = imageViewFormatReinterpretation_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setImageViewFormatSwizzle( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT + { + imageViewFormatSwizzle = imageViewFormatSwizzle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setImageView2DOn3DImage( VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT + { + imageView2DOn3DImage = imageView2DOn3DImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setMultisampleArrayImage( VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT + { + multisampleArrayImage = multisampleArrayImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setMutableComparisonSamplers( VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT + { + mutableComparisonSamplers = mutableComparisonSamplers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPointPolygons( VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT + { + pointPolygons = pointPolygons_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setSamplerMipLodBias( VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ ) VULKAN_HPP_NOEXCEPT + { + samplerMipLodBias = samplerMipLodBias_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setSeparateStencilMaskRef( VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT + { + separateStencilMaskRef = separateStencilMaskRef_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setShaderSampleRateInterpolationFunctions( VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setTessellationIsolines( VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT + { + tessellationIsolines = tessellationIsolines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setTessellationPointMode( VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT + { + tessellationPointMode = tessellationPointMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTriangleFans( VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT + { + triangleFans = triangleFans_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & + setVertexAttributeAccessBeyondStride( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + constantAlphaColorBlendFactors, + events, + imageViewFormatReinterpretation, + imageViewFormatSwizzle, + imageView2DOn3DImage, + multisampleArrayImage, + mutableComparisonSamplers, + pointPolygons, + samplerMipLodBias, + separateStencilMaskRef, + shaderSampleRateInterpolationFunctions, + tessellationIsolines, + tessellationPointMode, + triangleFans, + vertexAttributeAccessBeyondStride ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePortabilitySubsetFeaturesKHR const & ) const = default; +# else + bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors ) && + ( events == rhs.events ) && ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation ) && + ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle ) && ( imageView2DOn3DImage == rhs.imageView2DOn3DImage ) && + ( multisampleArrayImage == rhs.multisampleArrayImage ) && ( mutableComparisonSamplers == rhs.mutableComparisonSamplers ) && + ( pointPolygons == rhs.pointPolygons ) && ( samplerMipLodBias == rhs.samplerMipLodBias ) && + ( separateStencilMaskRef == rhs.separateStencilMaskRef ) && + ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions ) && ( tessellationIsolines == rhs.tessellationIsolines ) && + ( tessellationPointMode == rhs.tessellationPointMode ) && ( triangleFans == rhs.triangleFans ) && + ( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride ); +# endif + } + + bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors = {}; + VULKAN_HPP_NAMESPACE::Bool32 events = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers = {}; + VULKAN_HPP_NAMESPACE::Bool32 pointPolygons = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode = {}; + VULKAN_HPP_NAMESPACE::Bool32 triangleFans = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePortabilitySubsetFeaturesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDevicePortabilitySubsetPropertiesKHR + { + using NativeType = VkPhysicalDevicePortabilitySubsetPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( uint32_t minVertexInputBindingStrideAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minVertexInputBindingStrideAlignment{ minVertexInputBindingStrideAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePortabilitySubsetPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & + setMinVertexInputBindingStrideAlignment( uint32_t minVertexInputBindingStrideAlignment_ ) VULKAN_HPP_NOEXCEPT + { + minVertexInputBindingStrideAlignment = minVertexInputBindingStrideAlignment_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minVertexInputBindingStrideAlignment ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePortabilitySubsetPropertiesKHR const & ) const = default; +# else + bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment ); +# endif + } + + bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; + void * pNext = {}; + uint32_t minVertexInputBindingStrideAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePortabilitySubsetPropertiesKHR; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct PhysicalDevicePresentBarrierFeaturesNV + { + using NativeType = VkPhysicalDevicePresentBarrierFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrier_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentBarrier{ presentBarrier_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePresentBarrierFeaturesNV( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePresentBarrierFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePresentBarrierFeaturesNV & operator=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePresentBarrierFeaturesNV & operator=( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPresentBarrier( VULKAN_HPP_NAMESPACE::Bool32 presentBarrier_ ) VULKAN_HPP_NOEXCEPT + { + presentBarrier = presentBarrier_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePresentBarrierFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentBarrierFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentBarrier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePresentBarrierFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrier == rhs.presentBarrier ); +# endif + } + + bool operator!=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentBarrier = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePresentBarrierFeaturesNV; + }; + + struct PhysicalDevicePresentIdFeaturesKHR + { + using NativeType = VkPhysicalDevicePresentIdFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentIdFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 presentId_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentId{ presentId_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePresentIdFeaturesKHR( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePresentIdFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePresentIdFeaturesKHR & operator=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePresentIdFeaturesKHR & operator=( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPresentId( VULKAN_HPP_NAMESPACE::Bool32 presentId_ ) VULKAN_HPP_NOEXCEPT + { + presentId = presentId_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePresentIdFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentIdFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentId ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePresentIdFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentId == rhs.presentId ); +# endif + } + + bool operator!=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentIdFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentId = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePresentIdFeaturesKHR; + }; + + struct PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT + { + using NativeType = VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 presentModeFifoLatestReady_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentModeFifoLatestReady{ presentModeFifoLatestReady_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT( VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT & + operator=( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT & operator=( VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT & + setPresentModeFifoLatestReady( VULKAN_HPP_NAMESPACE::Bool32 presentModeFifoLatestReady_ ) VULKAN_HPP_NOEXCEPT + { + presentModeFifoLatestReady = presentModeFifoLatestReady_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentModeFifoLatestReady ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeFifoLatestReady == rhs.presentModeFifoLatestReady ); +# endif + } + + bool operator!=( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentModeFifoLatestReady = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + }; + + struct PhysicalDevicePresentWaitFeaturesKHR + { + using NativeType = VkPhysicalDevicePresentWaitFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentWait{ presentWait_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePresentWaitFeaturesKHR( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePresentWaitFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePresentWaitFeaturesKHR & operator=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePresentWaitFeaturesKHR & operator=( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPresentWait( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ ) VULKAN_HPP_NOEXCEPT + { + presentWait = presentWait_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePresentWaitFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentWaitFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentWait ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePresentWaitFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentWait == rhs.presentWait ); +# endif + } + + bool operator!=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentWait = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePresentWaitFeaturesKHR; + }; + + struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT + { + using NativeType = VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , primitiveTopologyListRestart{ primitiveTopologyListRestart_ } + , primitiveTopologyPatchListRestart{ primitiveTopologyPatchListRestart_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & + operator=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & operator=( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & + setPrimitiveTopologyListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ ) VULKAN_HPP_NOEXCEPT + { + primitiveTopologyListRestart = primitiveTopologyListRestart_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & + setPrimitiveTopologyPatchListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ ) VULKAN_HPP_NOEXCEPT + { + primitiveTopologyPatchListRestart = primitiveTopologyPatchListRestart_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, primitiveTopologyListRestart, primitiveTopologyPatchListRestart ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitiveTopologyListRestart == rhs.primitiveTopologyListRestart ) && + ( primitiveTopologyPatchListRestart == rhs.primitiveTopologyPatchListRestart ); +# endif + } + + bool operator!=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + }; + + struct PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT + { + using NativeType = VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , primitivesGeneratedQuery{ primitivesGeneratedQuery_ } + , primitivesGeneratedQueryWithRasterizerDiscard{ primitivesGeneratedQueryWithRasterizerDiscard_ } + , primitivesGeneratedQueryWithNonZeroStreams{ primitivesGeneratedQueryWithNonZeroStreams_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & + operator=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & operator=( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & + setPrimitivesGeneratedQuery( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery_ ) VULKAN_HPP_NOEXCEPT + { + primitivesGeneratedQuery = primitivesGeneratedQuery_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & + setPrimitivesGeneratedQueryWithRasterizerDiscard( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ ) VULKAN_HPP_NOEXCEPT + { + primitivesGeneratedQueryWithRasterizerDiscard = primitivesGeneratedQueryWithRasterizerDiscard_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & + setPrimitivesGeneratedQueryWithNonZeroStreams( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams_ ) VULKAN_HPP_NOEXCEPT + { + primitivesGeneratedQueryWithNonZeroStreams = primitivesGeneratedQueryWithNonZeroStreams_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, primitivesGeneratedQuery, primitivesGeneratedQueryWithRasterizerDiscard, primitivesGeneratedQueryWithNonZeroStreams ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitivesGeneratedQuery == rhs.primitivesGeneratedQuery ) && + ( primitivesGeneratedQueryWithRasterizerDiscard == rhs.primitivesGeneratedQueryWithRasterizerDiscard ) && + ( primitivesGeneratedQueryWithNonZeroStreams == rhs.primitivesGeneratedQueryWithNonZeroStreams ); +# endif + } + + bool operator!=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + }; + + struct PhysicalDevicePrivateDataFeatures + { + using NativeType = VkPhysicalDevicePrivateDataFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrivateDataFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , privateData{ privateData_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePrivateDataFeatures( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePrivateDataFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePrivateDataFeatures & operator=( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePrivateDataFeatures & operator=( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT + { + privateData = privateData_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDevicePrivateDataFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePrivateDataFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, privateData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePrivateDataFeatures const & ) const = default; +#else + bool operator==( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateData == rhs.privateData ); +# endif + } + + bool operator!=( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 privateData = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePrivateDataFeatures; + }; + + using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures; + + struct PhysicalDeviceProtectedMemoryFeatures + { + using NativeType = VkPhysicalDeviceProtectedMemoryFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , protectedMemory{ protectedMemory_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProtectedMemoryFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceProtectedMemoryFeatures & operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + { + protectedMemory = protectedMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, protectedMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedMemory == rhs.protectedMemory ); +# endif + } + + bool operator!=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceProtectedMemoryFeatures; + }; + + struct PhysicalDeviceProtectedMemoryProperties + { + using NativeType = VkPhysicalDeviceProtectedMemoryProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , protectedNoFault{ protectedNoFault_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProtectedMemoryProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceProtectedMemoryProperties & operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, protectedNoFault ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProtectedMemoryProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedNoFault == rhs.protectedNoFault ); +# endif + } + + bool operator!=( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceProtectedMemoryProperties; + }; + + struct PhysicalDeviceProvokingVertexFeaturesEXT + { + using NativeType = VkPhysicalDeviceProvokingVertexFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , provokingVertexLast{ provokingVertexLast_ } + , transformFeedbackPreservesProvokingVertex{ transformFeedbackPreservesProvokingVertex_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexFeaturesEXT( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProvokingVertexFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProvokingVertexFeaturesEXT & operator=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceProvokingVertexFeaturesEXT & operator=( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & + setProvokingVertexLast( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ ) VULKAN_HPP_NOEXCEPT + { + provokingVertexLast = provokingVertexLast_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & + setTransformFeedbackPreservesProvokingVertex( VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ ) VULKAN_HPP_NOEXCEPT + { + transformFeedbackPreservesProvokingVertex = transformFeedbackPreservesProvokingVertex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceProvokingVertexFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProvokingVertexFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, provokingVertexLast, transformFeedbackPreservesProvokingVertex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProvokingVertexFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexLast == rhs.provokingVertexLast ) && + ( transformFeedbackPreservesProvokingVertex == rhs.transformFeedbackPreservesProvokingVertex ); +# endif + } + + bool operator!=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceProvokingVertexFeaturesEXT; + }; + + struct PhysicalDeviceProvokingVertexPropertiesEXT + { + using NativeType = VkPhysicalDeviceProvokingVertexPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , provokingVertexModePerPipeline{ provokingVertexModePerPipeline_ } + , transformFeedbackPreservesTriangleFanProvokingVertex{ transformFeedbackPreservesTriangleFanProvokingVertex_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProvokingVertexPropertiesEXT( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProvokingVertexPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProvokingVertexPropertiesEXT & operator=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceProvokingVertexPropertiesEXT & operator=( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProvokingVertexPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProvokingVertexPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, provokingVertexModePerPipeline, transformFeedbackPreservesTriangleFanProvokingVertex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProvokingVertexPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexModePerPipeline == rhs.provokingVertexModePerPipeline ) && + ( transformFeedbackPreservesTriangleFanProvokingVertex == rhs.transformFeedbackPreservesTriangleFanProvokingVertex ); +# endif + } + + bool operator!=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceProvokingVertexPropertiesEXT; + }; + + struct PhysicalDevicePushDescriptorProperties + { + using NativeType = VkPhysicalDevicePushDescriptorProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushDescriptorProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorProperties( uint32_t maxPushDescriptors_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxPushDescriptors{ maxPushDescriptors_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorProperties( PhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePushDescriptorProperties( VkPhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePushDescriptorProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePushDescriptorProperties & operator=( PhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDevicePushDescriptorProperties & operator=( VkPhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDevicePushDescriptorProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePushDescriptorProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxPushDescriptors ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePushDescriptorProperties const & ) const = default; +#else + bool operator==( PhysicalDevicePushDescriptorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPushDescriptors == rhs.maxPushDescriptors ); +# endif + } + + bool operator!=( PhysicalDevicePushDescriptorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorProperties; + void * pNext = {}; + uint32_t maxPushDescriptors = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePushDescriptorProperties; + }; + + using PhysicalDevicePushDescriptorPropertiesKHR = PhysicalDevicePushDescriptorProperties; + + struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT + { + using NativeType = VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , formatRgba10x6WithoutYCbCrSampler{ formatRgba10x6WithoutYCbCrSampler_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRGBA10X6FormatsFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & + setFormatRgba10x6WithoutYCbCrSampler( VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ ) VULKAN_HPP_NOEXCEPT + { + formatRgba10x6WithoutYCbCrSampler = formatRgba10x6WithoutYCbCrSampler_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, formatRgba10x6WithoutYCbCrSampler ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatRgba10x6WithoutYCbCrSampler == rhs.formatRgba10x6WithoutYCbCrSampler ); +# endif + } + + bool operator!=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRGBA10X6FormatsFeaturesEXT; + }; + + struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT + { + using NativeType = VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rasterizationOrderColorAttachmentAccess{ rasterizationOrderColorAttachmentAccess_ } + , rasterizationOrderDepthAttachmentAccess{ rasterizationOrderDepthAttachmentAccess_ } + , rasterizationOrderStencilAttachmentAccess{ rasterizationOrderStencilAttachmentAccess_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & + operator=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & + operator=( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & + setRasterizationOrderColorAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrderColorAttachmentAccess = rasterizationOrderColorAttachmentAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & + setRasterizationOrderDepthAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrderDepthAttachmentAccess = rasterizationOrderDepthAttachmentAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & + setRasterizationOrderStencilAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrderStencilAttachmentAccess = rasterizationOrderStencilAttachmentAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, rasterizationOrderColorAttachmentAccess, rasterizationOrderDepthAttachmentAccess, rasterizationOrderStencilAttachmentAccess ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrderColorAttachmentAccess == rhs.rasterizationOrderColorAttachmentAccess ) && + ( rasterizationOrderDepthAttachmentAccess == rhs.rasterizationOrderDepthAttachmentAccess ) && + ( rasterizationOrderStencilAttachmentAccess == rhs.rasterizationOrderStencilAttachmentAccess ); +# endif + } + + bool operator!=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + }; + + using PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + + struct PhysicalDeviceRawAccessChainsFeaturesNV + { + using NativeType = VkPhysicalDeviceRawAccessChainsFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRawAccessChainsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRawAccessChainsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderRawAccessChains_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderRawAccessChains{ shaderRawAccessChains_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRawAccessChainsFeaturesNV( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRawAccessChainsFeaturesNV( VkPhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRawAccessChainsFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRawAccessChainsFeaturesNV & operator=( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRawAccessChainsFeaturesNV & operator=( VkPhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRawAccessChainsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRawAccessChainsFeaturesNV & + setShaderRawAccessChains( VULKAN_HPP_NAMESPACE::Bool32 shaderRawAccessChains_ ) VULKAN_HPP_NOEXCEPT + { + shaderRawAccessChains = shaderRawAccessChains_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRawAccessChainsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRawAccessChainsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderRawAccessChains ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRawAccessChainsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderRawAccessChains == rhs.shaderRawAccessChains ); +# endif + } + + bool operator!=( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRawAccessChainsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRawAccessChains = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRawAccessChainsFeaturesNV; + }; + + struct PhysicalDeviceRayQueryFeaturesKHR + { + using NativeType = VkPhysicalDeviceRayQueryFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rayQuery{ rayQuery_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayQueryFeaturesKHR( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayQueryFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayQueryFeaturesKHR & operator=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayQueryFeaturesKHR & operator=( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT + { + rayQuery = rayQuery_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRayQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayQuery ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayQueryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayQuery == rhs.rayQuery ); +# endif + } + + bool operator!=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayQueryFeaturesKHR; + }; + + struct PhysicalDeviceRayTracingInvocationReorderFeaturesNV + { + using NativeType = VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rayTracingInvocationReorder{ rayTracingInvocationReorder_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingInvocationReorderFeaturesNV( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingInvocationReorderFeaturesNV( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingInvocationReorderFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingInvocationReorderFeaturesNV & + operator=( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingInvocationReorderFeaturesNV & operator=( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV & + setRayTracingInvocationReorder( VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingInvocationReorder = rayTracingInvocationReorder_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingInvocationReorder ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingInvocationReorder == rhs.rayTracingInvocationReorder ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingInvocationReorderFeaturesNV; + }; + + struct PhysicalDeviceRayTracingInvocationReorderPropertiesNV + { + using NativeType = VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderPropertiesNV( + VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV rayTracingInvocationReorderReorderingHint_ = + VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV::eNone, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rayTracingInvocationReorderReorderingHint{ rayTracingInvocationReorderReorderingHint_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingInvocationReorderPropertiesNV( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingInvocationReorderPropertiesNV( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingInvocationReorderPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingInvocationReorderPropertiesNV & + operator=( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingInvocationReorderPropertiesNV & operator=( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingInvocationReorderReorderingHint ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingInvocationReorderReorderingHint == rhs.rayTracingInvocationReorderReorderingHint ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV rayTracingInvocationReorderReorderingHint = + VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV::eNone; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingInvocationReorderPropertiesNV; + }; + + struct PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV + { + using NativeType = VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 spheres_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 linearSweptSpheres_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , spheres{ spheres_ } + , linearSweptSpheres{ linearSweptSpheres_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & + operator=( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & operator=( VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & setSpheres( VULKAN_HPP_NAMESPACE::Bool32 spheres_ ) VULKAN_HPP_NOEXCEPT + { + spheres = spheres_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & + setLinearSweptSpheres( VULKAN_HPP_NAMESPACE::Bool32 linearSweptSpheres_ ) VULKAN_HPP_NOEXCEPT + { + linearSweptSpheres = linearSweptSpheres_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, spheres, linearSweptSpheres ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spheres == rhs.spheres ) && ( linearSweptSpheres == rhs.linearSweptSpheres ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 spheres = {}; + VULKAN_HPP_NAMESPACE::Bool32 linearSweptSpheres = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; + }; + + struct PhysicalDeviceRayTracingMaintenance1FeaturesKHR + { + using NativeType = VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMaintenance1FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rayTracingMaintenance1{ rayTracingMaintenance1_ } + , rayTracingPipelineTraceRaysIndirect2{ rayTracingPipelineTraceRaysIndirect2_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingMaintenance1FeaturesKHR( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingMaintenance1FeaturesKHR( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingMaintenance1FeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & + setRayTracingMaintenance1( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingMaintenance1 = rayTracingMaintenance1_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & + setRayTracingPipelineTraceRaysIndirect2( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineTraceRaysIndirect2 = rayTracingPipelineTraceRaysIndirect2_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingMaintenance1, rayTracingPipelineTraceRaysIndirect2 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingMaintenance1 == rhs.rayTracingMaintenance1 ) && + ( rayTracingPipelineTraceRaysIndirect2 == rhs.rayTracingPipelineTraceRaysIndirect2 ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1 = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingMaintenance1FeaturesKHR; + }; + + struct PhysicalDeviceRayTracingMotionBlurFeaturesNV + { + using NativeType = VkPhysicalDeviceRayTracingMotionBlurFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rayTracingMotionBlur{ rayTracingMotionBlur_ } + , rayTracingMotionBlurPipelineTraceRaysIndirect{ rayTracingMotionBlurPipelineTraceRaysIndirect_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingMotionBlurFeaturesNV( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingMotionBlurFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & + setRayTracingMotionBlur( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingMotionBlur = rayTracingMotionBlur_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & + setRayTracingMotionBlurPipelineTraceRaysIndirect( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingMotionBlurPipelineTraceRaysIndirect = rayTracingMotionBlurPipelineTraceRaysIndirect_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingMotionBlur, rayTracingMotionBlurPipelineTraceRaysIndirect ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingMotionBlur == rhs.rayTracingMotionBlur ) && + ( rayTracingMotionBlurPipelineTraceRaysIndirect == rhs.rayTracingMotionBlurPipelineTraceRaysIndirect ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingMotionBlurFeaturesNV; + }; + + struct PhysicalDeviceRayTracingPipelineFeaturesKHR + { + using NativeType = VkPhysicalDeviceRayTracingPipelineFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rayTracingPipeline{ rayTracingPipeline_ } + , rayTracingPipelineShaderGroupHandleCaptureReplay{ rayTracingPipelineShaderGroupHandleCaptureReplay_ } + , rayTracingPipelineShaderGroupHandleCaptureReplayMixed{ rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ } + , rayTracingPipelineTraceRaysIndirect{ rayTracingPipelineTraceRaysIndirect_ } + , rayTraversalPrimitiveCulling{ rayTraversalPrimitiveCulling_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelineFeaturesKHR( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPipelineFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTracingPipeline( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipeline = rayTracingPipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTracingPipelineShaderGroupHandleCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineShaderGroupHandleCaptureReplay = rayTracingPipelineShaderGroupHandleCaptureReplay_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplayMixed( + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineShaderGroupHandleCaptureReplayMixed = rayTracingPipelineShaderGroupHandleCaptureReplayMixed_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTracingPipelineTraceRaysIndirect( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPipelineTraceRaysIndirect = rayTracingPipelineTraceRaysIndirect_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & + setRayTraversalPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT + { + rayTraversalPrimitiveCulling = rayTraversalPrimitiveCulling_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + rayTracingPipeline, + rayTracingPipelineShaderGroupHandleCaptureReplay, + rayTracingPipelineShaderGroupHandleCaptureReplayMixed, + rayTracingPipelineTraceRaysIndirect, + rayTraversalPrimitiveCulling ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPipelineFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingPipeline == rhs.rayTracingPipeline ) && + ( rayTracingPipelineShaderGroupHandleCaptureReplay == rhs.rayTracingPipelineShaderGroupHandleCaptureReplay ) && + ( rayTracingPipelineShaderGroupHandleCaptureReplayMixed == rhs.rayTracingPipelineShaderGroupHandleCaptureReplayMixed ) && + ( rayTracingPipelineTraceRaysIndirect == rhs.rayTracingPipelineTraceRaysIndirect ) && + ( rayTraversalPrimitiveCulling == rhs.rayTraversalPrimitiveCulling ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPipelineFeaturesKHR; + }; + + struct PhysicalDeviceRayTracingPipelinePropertiesKHR + { + using NativeType = VkPhysicalDeviceRayTracingPipelinePropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR( uint32_t shaderGroupHandleSize_ = {}, + uint32_t maxRayRecursionDepth_ = {}, + uint32_t maxShaderGroupStride_ = {}, + uint32_t shaderGroupBaseAlignment_ = {}, + uint32_t shaderGroupHandleCaptureReplaySize_ = {}, + uint32_t maxRayDispatchInvocationCount_ = {}, + uint32_t shaderGroupHandleAlignment_ = {}, + uint32_t maxRayHitAttributeSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderGroupHandleSize{ shaderGroupHandleSize_ } + , maxRayRecursionDepth{ maxRayRecursionDepth_ } + , maxShaderGroupStride{ maxShaderGroupStride_ } + , shaderGroupBaseAlignment{ shaderGroupBaseAlignment_ } + , shaderGroupHandleCaptureReplaySize{ shaderGroupHandleCaptureReplaySize_ } + , maxRayDispatchInvocationCount{ maxRayDispatchInvocationCount_ } + , shaderGroupHandleAlignment{ shaderGroupHandleAlignment_ } + , maxRayHitAttributeSize{ maxRayHitAttributeSize_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingPipelinePropertiesKHR( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPipelinePropertiesKHR( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPipelinePropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderGroupHandleSize, + maxRayRecursionDepth, + maxShaderGroupStride, + shaderGroupBaseAlignment, + shaderGroupHandleCaptureReplaySize, + maxRayDispatchInvocationCount, + shaderGroupHandleAlignment, + maxRayHitAttributeSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPipelinePropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && + ( maxRayRecursionDepth == rhs.maxRayRecursionDepth ) && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) && + ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize ) && + ( maxRayDispatchInvocationCount == rhs.maxRayDispatchInvocationCount ) && ( shaderGroupHandleAlignment == rhs.shaderGroupHandleAlignment ) && + ( maxRayHitAttributeSize == rhs.maxRayHitAttributeSize ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR; + void * pNext = {}; + uint32_t shaderGroupHandleSize = {}; + uint32_t maxRayRecursionDepth = {}; + uint32_t maxShaderGroupStride = {}; + uint32_t shaderGroupBaseAlignment = {}; + uint32_t shaderGroupHandleCaptureReplaySize = {}; + uint32_t maxRayDispatchInvocationCount = {}; + uint32_t shaderGroupHandleAlignment = {}; + uint32_t maxRayHitAttributeSize = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPipelinePropertiesKHR; + }; + + struct PhysicalDeviceRayTracingPositionFetchFeaturesKHR + { + using NativeType = VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPositionFetchFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPositionFetch_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rayTracingPositionFetch{ rayTracingPositionFetch_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRayTracingPositionFetchFeaturesKHR( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPositionFetchFeaturesKHR( VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPositionFetchFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingPositionFetchFeaturesKHR & operator=( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingPositionFetchFeaturesKHR & operator=( VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPositionFetchFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPositionFetchFeaturesKHR & + setRayTracingPositionFetch( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPositionFetch_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingPositionFetch = rayTracingPositionFetch_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingPositionFetch ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingPositionFetch == rhs.rayTracingPositionFetch ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingPositionFetchFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingPositionFetch = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPositionFetchFeaturesKHR; + }; + + struct PhysicalDeviceRayTracingPropertiesNV + { + using NativeType = VkPhysicalDeviceRayTracingPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {}, + uint32_t maxRecursionDepth_ = {}, + uint32_t maxShaderGroupStride_ = {}, + uint32_t shaderGroupBaseAlignment_ = {}, + uint64_t maxGeometryCount_ = {}, + uint64_t maxInstanceCount_ = {}, + uint64_t maxTriangleCount_ = {}, + uint32_t maxDescriptorSetAccelerationStructures_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderGroupHandleSize{ shaderGroupHandleSize_ } + , maxRecursionDepth{ maxRecursionDepth_ } + , maxShaderGroupStride{ maxShaderGroupStride_ } + , shaderGroupBaseAlignment{ shaderGroupBaseAlignment_ } + , maxGeometryCount{ maxGeometryCount_ } + , maxInstanceCount{ maxInstanceCount_ } + , maxTriangleCount{ maxTriangleCount_ } + , maxDescriptorSetAccelerationStructures{ maxDescriptorSetAccelerationStructures_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingPropertiesNV & operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingPropertiesNV & operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRayTracingPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderGroupHandleSize, + maxRecursionDepth, + maxShaderGroupStride, + shaderGroupBaseAlignment, + maxGeometryCount, + maxInstanceCount, + maxTriangleCount, + maxDescriptorSetAccelerationStructures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && + ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) && + ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && ( maxGeometryCount == rhs.maxGeometryCount ) && + ( maxInstanceCount == rhs.maxInstanceCount ) && ( maxTriangleCount == rhs.maxTriangleCount ) && + ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; + void * pNext = {}; + uint32_t shaderGroupHandleSize = {}; + uint32_t maxRecursionDepth = {}; + uint32_t maxShaderGroupStride = {}; + uint32_t shaderGroupBaseAlignment = {}; + uint64_t maxGeometryCount = {}; + uint64_t maxInstanceCount = {}; + uint64_t maxTriangleCount = {}; + uint32_t maxDescriptorSetAccelerationStructures = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingPropertiesNV; + }; + + struct PhysicalDeviceRayTracingValidationFeaturesNV + { + using NativeType = VkPhysicalDeviceRayTracingValidationFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingValidationFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingValidationFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 rayTracingValidation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rayTracingValidation{ rayTracingValidation_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingValidationFeaturesNV( PhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRayTracingValidationFeaturesNV( VkPhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRayTracingValidationFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRayTracingValidationFeaturesNV & operator=( PhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRayTracingValidationFeaturesNV & operator=( VkPhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingValidationFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingValidationFeaturesNV & + setRayTracingValidation( VULKAN_HPP_NAMESPACE::Bool32 rayTracingValidation_ ) VULKAN_HPP_NOEXCEPT + { + rayTracingValidation = rayTracingValidation_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRayTracingValidationFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingValidationFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, rayTracingValidation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRayTracingValidationFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingValidation == rhs.rayTracingValidation ); +# endif + } + + bool operator!=( PhysicalDeviceRayTracingValidationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingValidationFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rayTracingValidation = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRayTracingValidationFeaturesNV; + }; + + struct PhysicalDeviceRelaxedLineRasterizationFeaturesIMG + { + using NativeType = VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( VULKAN_HPP_NAMESPACE::Bool32 relaxedLineRasterization_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , relaxedLineRasterization{ relaxedLineRasterization_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & + operator=( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & operator=( VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRelaxedLineRasterizationFeaturesIMG & + setRelaxedLineRasterization( VULKAN_HPP_NAMESPACE::Bool32 relaxedLineRasterization_ ) VULKAN_HPP_NOEXCEPT + { + relaxedLineRasterization = relaxedLineRasterization_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, relaxedLineRasterization ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & ) const = default; +#else + bool operator==( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( relaxedLineRasterization == rhs.relaxedLineRasterization ); +# endif + } + + bool operator!=( PhysicalDeviceRelaxedLineRasterizationFeaturesIMG const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 relaxedLineRasterization = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + }; + + struct PhysicalDeviceRenderPassStripedFeaturesARM + { + using NativeType = VkPhysicalDeviceRenderPassStripedFeaturesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRenderPassStripedFeaturesARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedFeaturesARM( VULKAN_HPP_NAMESPACE::Bool32 renderPassStriped_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , renderPassStriped{ renderPassStriped_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedFeaturesARM( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRenderPassStripedFeaturesARM( VkPhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRenderPassStripedFeaturesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRenderPassStripedFeaturesARM & operator=( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRenderPassStripedFeaturesARM & operator=( VkPhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRenderPassStripedFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRenderPassStripedFeaturesARM & + setRenderPassStriped( VULKAN_HPP_NAMESPACE::Bool32 renderPassStriped_ ) VULKAN_HPP_NOEXCEPT + { + renderPassStriped = renderPassStriped_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRenderPassStripedFeaturesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRenderPassStripedFeaturesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, renderPassStriped ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRenderPassStripedFeaturesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPassStriped == rhs.renderPassStriped ); +# endif + } + + bool operator!=( PhysicalDeviceRenderPassStripedFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRenderPassStripedFeaturesARM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 renderPassStriped = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRenderPassStripedFeaturesARM; + }; + + struct PhysicalDeviceRenderPassStripedPropertiesARM + { + using NativeType = VkPhysicalDeviceRenderPassStripedPropertiesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRenderPassStripedPropertiesARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedPropertiesARM( VULKAN_HPP_NAMESPACE::Extent2D renderPassStripeGranularity_ = {}, + uint32_t maxRenderPassStripes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , renderPassStripeGranularity{ renderPassStripeGranularity_ } + , maxRenderPassStripes{ maxRenderPassStripes_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedPropertiesARM( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRenderPassStripedPropertiesARM( VkPhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRenderPassStripedPropertiesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRenderPassStripedPropertiesARM & operator=( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRenderPassStripedPropertiesARM & operator=( VkPhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRenderPassStripedPropertiesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRenderPassStripedPropertiesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, renderPassStripeGranularity, maxRenderPassStripes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRenderPassStripedPropertiesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPassStripeGranularity == rhs.renderPassStripeGranularity ) && + ( maxRenderPassStripes == rhs.maxRenderPassStripes ); +# endif + } + + bool operator!=( PhysicalDeviceRenderPassStripedPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRenderPassStripedPropertiesARM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D renderPassStripeGranularity = {}; + uint32_t maxRenderPassStripes = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRenderPassStripedPropertiesARM; + }; + + struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV + { + using NativeType = VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , representativeFragmentTest{ representativeFragmentTest_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceRepresentativeFragmentTestFeaturesNV( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRepresentativeFragmentTestFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & + setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT + { + representativeFragmentTest = representativeFragmentTest_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, representativeFragmentTest ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( representativeFragmentTest == rhs.representativeFragmentTest ); +# endif + } + + bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV; + }; + + struct PhysicalDeviceRobustness2FeaturesEXT + { + using NativeType = VkPhysicalDeviceRobustness2FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , robustBufferAccess2{ robustBufferAccess2_ } + , robustImageAccess2{ robustImageAccess2_ } + , nullDescriptor{ nullDescriptor_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2FeaturesEXT( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRobustness2FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRobustness2FeaturesEXT & operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRobustness2FeaturesEXT & operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & + setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT + { + robustBufferAccess2 = robustBufferAccess2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT + { + robustImageAccess2 = robustImageAccess2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + nullDescriptor = nullDescriptor_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceRobustness2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, robustBufferAccess2, robustImageAccess2, nullDescriptor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRobustness2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustBufferAccess2 == rhs.robustBufferAccess2 ) && + ( robustImageAccess2 == rhs.robustImageAccess2 ) && ( nullDescriptor == rhs.nullDescriptor ); +# endif + } + + bool operator!=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRobustness2FeaturesEXT; + }; + + struct PhysicalDeviceRobustness2PropertiesEXT + { + using NativeType = VkPhysicalDeviceRobustness2PropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , robustStorageBufferAccessSizeAlignment{ robustStorageBufferAccessSizeAlignment_ } + , robustUniformBufferAccessSizeAlignment{ robustUniformBufferAccessSizeAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceRobustness2PropertiesEXT( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceRobustness2PropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceRobustness2PropertiesEXT & operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceRobustness2PropertiesEXT & operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceRobustness2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, robustStorageBufferAccessSizeAlignment, robustUniformBufferAccessSizeAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceRobustness2PropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment ) && + ( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceRobustness2PropertiesEXT; + }; + + struct PhysicalDeviceSampleLocationsPropertiesEXT + { + using NativeType = VkPhysicalDeviceSampleLocationsPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, + std::array const & sampleLocationCoordinateRange_ = {}, + uint32_t sampleLocationSubPixelBits_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , sampleLocationSampleCounts{ sampleLocationSampleCounts_ } + , maxSampleLocationGridSize{ maxSampleLocationGridSize_ } + , sampleLocationCoordinateRange{ sampleLocationCoordinateRange_ } + , sampleLocationSubPixelBits{ sampleLocationSubPixelBits_ } + , variableSampleLocations{ variableSampleLocations_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSampleLocationsPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + sampleLocationSampleCounts, + maxSampleLocationGridSize, + sampleLocationCoordinateRange, + sampleLocationSubPixelBits, + variableSampleLocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) && + ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) && ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange ) && + ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) && ( variableSampleLocations == rhs.variableSampleLocations ); +# endif + } + + bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D sampleLocationCoordinateRange = {}; + uint32_t sampleLocationSubPixelBits = {}; + VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSampleLocationsPropertiesEXT; + }; + + struct PhysicalDeviceSamplerFilterMinmaxProperties + { + using NativeType = VkPhysicalDeviceSamplerFilterMinmaxProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , filterMinmaxSingleComponentFormats{ filterMinmaxSingleComponentFormats_ } + , filterMinmaxImageComponentMapping{ filterMinmaxImageComponentMapping_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSamplerFilterMinmaxProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSamplerFilterMinmaxProperties & operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSamplerFilterMinmaxProperties & operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSamplerFilterMinmaxProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, filterMinmaxSingleComponentFormats, filterMinmaxImageComponentMapping ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) && + ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ); +# endif + } + + bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSamplerFilterMinmaxProperties; + }; + + using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties; + + struct PhysicalDeviceSamplerYcbcrConversionFeatures + { + using NativeType = VkPhysicalDeviceSamplerYcbcrConversionFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , samplerYcbcrConversion{ samplerYcbcrConversion_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSamplerYcbcrConversionFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & + setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT + { + samplerYcbcrConversion = samplerYcbcrConversion_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, samplerYcbcrConversion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ); +# endif + } + + bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSamplerYcbcrConversionFeatures; + }; + + using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; + + struct PhysicalDeviceScalarBlockLayoutFeatures + { + using NativeType = VkPhysicalDeviceScalarBlockLayoutFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , scalarBlockLayout{ scalarBlockLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceScalarBlockLayoutFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceScalarBlockLayoutFeatures & operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceScalarBlockLayoutFeatures & operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & + setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + { + scalarBlockLayout = scalarBlockLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, scalarBlockLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( scalarBlockLayout == rhs.scalarBlockLayout ); +# endif + } + + bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceScalarBlockLayoutFeatures; + }; + + using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures; + + struct PhysicalDeviceSchedulingControlsFeaturesARM + { + using NativeType = VkPhysicalDeviceSchedulingControlsFeaturesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSchedulingControlsFeaturesARM( VULKAN_HPP_NAMESPACE::Bool32 schedulingControls_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , schedulingControls{ schedulingControls_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSchedulingControlsFeaturesARM( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSchedulingControlsFeaturesARM( VkPhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSchedulingControlsFeaturesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSchedulingControlsFeaturesARM & operator=( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSchedulingControlsFeaturesARM & operator=( VkPhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsFeaturesARM & + setSchedulingControls( VULKAN_HPP_NAMESPACE::Bool32 schedulingControls_ ) VULKAN_HPP_NOEXCEPT + { + schedulingControls = schedulingControls_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceSchedulingControlsFeaturesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSchedulingControlsFeaturesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, schedulingControls ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSchedulingControlsFeaturesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( schedulingControls == rhs.schedulingControls ); +# endif + } + + bool operator!=( PhysicalDeviceSchedulingControlsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 schedulingControls = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSchedulingControlsFeaturesARM; + }; + + struct PhysicalDeviceSchedulingControlsPropertiesARM + { + using NativeType = VkPhysicalDeviceSchedulingControlsPropertiesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSchedulingControlsPropertiesARM( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , schedulingControlsFlags{ schedulingControlsFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSchedulingControlsPropertiesARM( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSchedulingControlsPropertiesARM( VkPhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSchedulingControlsPropertiesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSchedulingControlsPropertiesARM & operator=( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSchedulingControlsPropertiesARM & operator=( VkPhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsPropertiesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSchedulingControlsPropertiesARM & + setSchedulingControlsFlags( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags_ ) VULKAN_HPP_NOEXCEPT + { + schedulingControlsFlags = schedulingControlsFlags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceSchedulingControlsPropertiesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSchedulingControlsPropertiesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, schedulingControlsFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSchedulingControlsPropertiesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( schedulingControlsFlags == rhs.schedulingControlsFlags ); +# endif + } + + bool operator!=( PhysicalDeviceSchedulingControlsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSchedulingControlsPropertiesARM; + }; + + struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures + { + using NativeType = VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , separateDepthStencilLayouts{ separateDepthStencilLayouts_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSeparateDepthStencilLayoutsFeatures( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSeparateDepthStencilLayoutsFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & + setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT + { + separateDepthStencilLayouts = separateDepthStencilLayouts_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, separateDepthStencilLayouts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ); +# endif + } + + bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + }; + + using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + + struct PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV + { + using NativeType = VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16VectorAtomics_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderFloat16VectorAtomics{ shaderFloat16VectorAtomics_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & + operator=( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & operator=( VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV & + setShaderFloat16VectorAtomics( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16VectorAtomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat16VectorAtomics = shaderFloat16VectorAtomics_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderFloat16VectorAtomics ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16VectorAtomics == rhs.shaderFloat16VectorAtomics ); +# endif + } + + bool operator!=( PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16VectorAtomics = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + }; + + struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderBufferFloat16Atomics{ shaderBufferFloat16Atomics_ } + , shaderBufferFloat16AtomicAdd{ shaderBufferFloat16AtomicAdd_ } + , shaderBufferFloat16AtomicMinMax{ shaderBufferFloat16AtomicMinMax_ } + , shaderBufferFloat32AtomicMinMax{ shaderBufferFloat32AtomicMinMax_ } + , shaderBufferFloat64AtomicMinMax{ shaderBufferFloat64AtomicMinMax_ } + , shaderSharedFloat16Atomics{ shaderSharedFloat16Atomics_ } + , shaderSharedFloat16AtomicAdd{ shaderSharedFloat16AtomicAdd_ } + , shaderSharedFloat16AtomicMinMax{ shaderSharedFloat16AtomicMinMax_ } + , shaderSharedFloat32AtomicMinMax{ shaderSharedFloat32AtomicMinMax_ } + , shaderSharedFloat64AtomicMinMax{ shaderSharedFloat64AtomicMinMax_ } + , shaderImageFloat32AtomicMinMax{ shaderImageFloat32AtomicMinMax_ } + , sparseImageFloat32AtomicMinMax{ sparseImageFloat32AtomicMinMax_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloat2FeaturesEXT( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicFloat2FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderBufferFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat16Atomics = shaderBufferFloat16Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderBufferFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat16AtomicAdd = shaderBufferFloat16AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderBufferFloat16AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat16AtomicMinMax = shaderBufferFloat16AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderBufferFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32AtomicMinMax = shaderBufferFloat32AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderBufferFloat64AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64AtomicMinMax = shaderBufferFloat64AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderSharedFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat16Atomics = shaderSharedFloat16Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderSharedFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat16AtomicAdd = shaderSharedFloat16AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderSharedFloat16AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat16AtomicMinMax = shaderSharedFloat16AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderSharedFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32AtomicMinMax = shaderSharedFloat32AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderSharedFloat64AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64AtomicMinMax = shaderSharedFloat64AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setShaderImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32AtomicMinMax = shaderImageFloat32AtomicMinMax_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & + setSparseImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32AtomicMinMax = sparseImageFloat32AtomicMinMax_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderBufferFloat16Atomics, + shaderBufferFloat16AtomicAdd, + shaderBufferFloat16AtomicMinMax, + shaderBufferFloat32AtomicMinMax, + shaderBufferFloat64AtomicMinMax, + shaderSharedFloat16Atomics, + shaderSharedFloat16AtomicAdd, + shaderSharedFloat16AtomicMinMax, + shaderSharedFloat32AtomicMinMax, + shaderSharedFloat64AtomicMinMax, + shaderImageFloat32AtomicMinMax, + sparseImageFloat32AtomicMinMax ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferFloat16Atomics == rhs.shaderBufferFloat16Atomics ) && + ( shaderBufferFloat16AtomicAdd == rhs.shaderBufferFloat16AtomicAdd ) && + ( shaderBufferFloat16AtomicMinMax == rhs.shaderBufferFloat16AtomicMinMax ) && + ( shaderBufferFloat32AtomicMinMax == rhs.shaderBufferFloat32AtomicMinMax ) && + ( shaderBufferFloat64AtomicMinMax == rhs.shaderBufferFloat64AtomicMinMax ) && ( shaderSharedFloat16Atomics == rhs.shaderSharedFloat16Atomics ) && + ( shaderSharedFloat16AtomicAdd == rhs.shaderSharedFloat16AtomicAdd ) && + ( shaderSharedFloat16AtomicMinMax == rhs.shaderSharedFloat16AtomicMinMax ) && + ( shaderSharedFloat32AtomicMinMax == rhs.shaderSharedFloat32AtomicMinMax ) && + ( shaderSharedFloat64AtomicMinMax == rhs.shaderSharedFloat64AtomicMinMax ) && + ( shaderImageFloat32AtomicMinMax == rhs.shaderImageFloat32AtomicMinMax ) && + ( sparseImageFloat32AtomicMinMax == rhs.sparseImageFloat32AtomicMinMax ); +# endif + } + + bool operator!=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicFloat2FeaturesEXT; + }; + + struct PhysicalDeviceShaderAtomicFloatFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderAtomicFloatFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderBufferFloat32Atomics{ shaderBufferFloat32Atomics_ } + , shaderBufferFloat32AtomicAdd{ shaderBufferFloat32AtomicAdd_ } + , shaderBufferFloat64Atomics{ shaderBufferFloat64Atomics_ } + , shaderBufferFloat64AtomicAdd{ shaderBufferFloat64AtomicAdd_ } + , shaderSharedFloat32Atomics{ shaderSharedFloat32Atomics_ } + , shaderSharedFloat32AtomicAdd{ shaderSharedFloat32AtomicAdd_ } + , shaderSharedFloat64Atomics{ shaderSharedFloat64Atomics_ } + , shaderSharedFloat64AtomicAdd{ shaderSharedFloat64AtomicAdd_ } + , shaderImageFloat32Atomics{ shaderImageFloat32Atomics_ } + , shaderImageFloat32AtomicAdd{ shaderImageFloat32AtomicAdd_ } + , sparseImageFloat32Atomics{ sparseImageFloat32Atomics_ } + , sparseImageFloat32AtomicAdd{ sparseImageFloat32AtomicAdd_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicFloatFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32Atomics = shaderImageFloat32Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32Atomics = sparseImageFloat32Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderBufferFloat32Atomics, + shaderBufferFloat32AtomicAdd, + shaderBufferFloat64Atomics, + shaderBufferFloat64AtomicAdd, + shaderSharedFloat32Atomics, + shaderSharedFloat32AtomicAdd, + shaderSharedFloat64Atomics, + shaderSharedFloat64AtomicAdd, + shaderImageFloat32Atomics, + shaderImageFloat32AtomicAdd, + sparseImageFloat32Atomics, + sparseImageFloat32AtomicAdd ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics ) && + ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd ) && ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics ) && + ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd ) && ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics ) && + ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd ) && ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics ) && + ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd ) && ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics ) && + ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd ) && ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics ) && + ( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd ); +# endif + } + + bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT; + }; + + struct PhysicalDeviceShaderAtomicInt64Features + { + using NativeType = VkPhysicalDeviceShaderAtomicInt64Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderBufferInt64Atomics{ shaderBufferInt64Atomics_ } + , shaderSharedInt64Atomics{ shaderSharedInt64Atomics_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderAtomicInt64Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderAtomicInt64Features & operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & + setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferInt64Atomics = shaderBufferInt64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & + setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedInt64Atomics = shaderSharedInt64Atomics_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderBufferInt64Atomics, shaderSharedInt64Atomics ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && + ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ); +# endif + } + + bool operator!=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderAtomicInt64Features; + }; + + using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features; + + struct PhysicalDeviceShaderClockFeaturesKHR + { + using NativeType = VkPhysicalDeviceShaderClockFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderSubgroupClock{ shaderSubgroupClock_ } + , shaderDeviceClock{ shaderDeviceClock_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderClockFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderClockFeaturesKHR & operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & + setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupClock = shaderSubgroupClock_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT + { + shaderDeviceClock = shaderDeviceClock_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderClockFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSubgroupClock, shaderDeviceClock ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupClock == rhs.shaderSubgroupClock ) && + ( shaderDeviceClock == rhs.shaderDeviceClock ); +# endif + } + + bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderClockFeaturesKHR; + }; + + struct PhysicalDeviceShaderCoreBuiltinsFeaturesARM + { + using NativeType = VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsFeaturesARM( VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderCoreBuiltins{ shaderCoreBuiltins_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsFeaturesARM( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCoreBuiltinsFeaturesARM( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCoreBuiltinsFeaturesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderCoreBuiltinsFeaturesARM & operator=( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderCoreBuiltinsFeaturesARM & operator=( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreBuiltinsFeaturesARM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreBuiltinsFeaturesARM & + setShaderCoreBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins_ ) VULKAN_HPP_NOEXCEPT + { + shaderCoreBuiltins = shaderCoreBuiltins_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderCoreBuiltins ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreBuiltins == rhs.shaderCoreBuiltins ); +# endif + } + + bool operator!=( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCoreBuiltinsFeaturesARM; + }; + + struct PhysicalDeviceShaderCoreBuiltinsPropertiesARM + { + using NativeType = VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsPropertiesARM( uint64_t shaderCoreMask_ = {}, + uint32_t shaderCoreCount_ = {}, + uint32_t shaderWarpsPerCore_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderCoreMask{ shaderCoreMask_ } + , shaderCoreCount{ shaderCoreCount_ } + , shaderWarpsPerCore{ shaderWarpsPerCore_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderCoreBuiltinsPropertiesARM( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCoreBuiltinsPropertiesARM( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCoreBuiltinsPropertiesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderCoreBuiltinsPropertiesARM & operator=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderCoreBuiltinsPropertiesARM & operator=( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderCoreMask, shaderCoreCount, shaderWarpsPerCore ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreMask == rhs.shaderCoreMask ) && ( shaderCoreCount == rhs.shaderCoreCount ) && + ( shaderWarpsPerCore == rhs.shaderWarpsPerCore ); +# endif + } + + bool operator!=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM; + void * pNext = {}; + uint64_t shaderCoreMask = {}; + uint32_t shaderCoreCount = {}; + uint32_t shaderWarpsPerCore = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCoreBuiltinsPropertiesARM; + }; + + struct PhysicalDeviceShaderCoreProperties2AMD + { + using NativeType = VkPhysicalDeviceShaderCoreProperties2AMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, + uint32_t activeComputeUnitCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderCoreFeatures{ shaderCoreFeatures_ } + , activeComputeUnitCount{ activeComputeUnitCount_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCoreProperties2AMD( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderCoreProperties2AMD & operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderCoreProperties2AMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderCoreFeatures, activeComputeUnitCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreFeatures == rhs.shaderCoreFeatures ) && + ( activeComputeUnitCount == rhs.activeComputeUnitCount ); +# endif + } + + bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {}; + uint32_t activeComputeUnitCount = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCoreProperties2AMD; + }; + + struct PhysicalDeviceShaderCorePropertiesAMD + { + using NativeType = VkPhysicalDeviceShaderCorePropertiesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = {}, + uint32_t shaderArraysPerEngineCount_ = {}, + uint32_t computeUnitsPerShaderArray_ = {}, + uint32_t simdPerComputeUnit_ = {}, + uint32_t wavefrontsPerSimd_ = {}, + uint32_t wavefrontSize_ = {}, + uint32_t sgprsPerSimd_ = {}, + uint32_t minSgprAllocation_ = {}, + uint32_t maxSgprAllocation_ = {}, + uint32_t sgprAllocationGranularity_ = {}, + uint32_t vgprsPerSimd_ = {}, + uint32_t minVgprAllocation_ = {}, + uint32_t maxVgprAllocation_ = {}, + uint32_t vgprAllocationGranularity_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderEngineCount{ shaderEngineCount_ } + , shaderArraysPerEngineCount{ shaderArraysPerEngineCount_ } + , computeUnitsPerShaderArray{ computeUnitsPerShaderArray_ } + , simdPerComputeUnit{ simdPerComputeUnit_ } + , wavefrontsPerSimd{ wavefrontsPerSimd_ } + , wavefrontSize{ wavefrontSize_ } + , sgprsPerSimd{ sgprsPerSimd_ } + , minSgprAllocation{ minSgprAllocation_ } + , maxSgprAllocation{ maxSgprAllocation_ } + , sgprAllocationGranularity{ sgprAllocationGranularity_ } + , vgprsPerSimd{ vgprsPerSimd_ } + , minVgprAllocation{ minVgprAllocation_ } + , maxVgprAllocation{ maxVgprAllocation_ } + , vgprAllocationGranularity{ vgprAllocationGranularity_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCorePropertiesAMD( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderCorePropertiesAMD & operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderCorePropertiesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderEngineCount, + shaderArraysPerEngineCount, + computeUnitsPerShaderArray, + simdPerComputeUnit, + wavefrontsPerSimd, + wavefrontSize, + sgprsPerSimd, + minSgprAllocation, + maxSgprAllocation, + sgprAllocationGranularity, + vgprsPerSimd, + minVgprAllocation, + maxVgprAllocation, + vgprAllocationGranularity ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEngineCount == rhs.shaderEngineCount ) && + ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) && + ( simdPerComputeUnit == rhs.simdPerComputeUnit ) && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) && ( wavefrontSize == rhs.wavefrontSize ) && + ( sgprsPerSimd == rhs.sgprsPerSimd ) && ( minSgprAllocation == rhs.minSgprAllocation ) && ( maxSgprAllocation == rhs.maxSgprAllocation ) && + ( sgprAllocationGranularity == rhs.sgprAllocationGranularity ) && ( vgprsPerSimd == rhs.vgprsPerSimd ) && + ( minVgprAllocation == rhs.minVgprAllocation ) && ( maxVgprAllocation == rhs.maxVgprAllocation ) && + ( vgprAllocationGranularity == rhs.vgprAllocationGranularity ); +# endif + } + + bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + void * pNext = {}; + uint32_t shaderEngineCount = {}; + uint32_t shaderArraysPerEngineCount = {}; + uint32_t computeUnitsPerShaderArray = {}; + uint32_t simdPerComputeUnit = {}; + uint32_t wavefrontsPerSimd = {}; + uint32_t wavefrontSize = {}; + uint32_t sgprsPerSimd = {}; + uint32_t minSgprAllocation = {}; + uint32_t maxSgprAllocation = {}; + uint32_t sgprAllocationGranularity = {}; + uint32_t vgprsPerSimd = {}; + uint32_t minVgprAllocation = {}; + uint32_t maxVgprAllocation = {}; + uint32_t vgprAllocationGranularity = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCorePropertiesAMD; + }; + + struct PhysicalDeviceShaderCorePropertiesARM + { + using NativeType = VkPhysicalDeviceShaderCorePropertiesARM; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesARM( uint32_t pixelRate_ = {}, + uint32_t texelRate_ = {}, + uint32_t fmaRate_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pixelRate{ pixelRate_ } + , texelRate{ texelRate_ } + , fmaRate{ fmaRate_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesARM( PhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderCorePropertiesARM( VkPhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderCorePropertiesARM( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderCorePropertiesARM & operator=( PhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderCorePropertiesARM & operator=( VkPhysicalDeviceShaderCorePropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderCorePropertiesARM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCorePropertiesARM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pixelRate, texelRate, fmaRate ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderCorePropertiesARM const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderCorePropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pixelRate == rhs.pixelRate ) && ( texelRate == rhs.texelRate ) && ( fmaRate == rhs.fmaRate ); +# endif + } + + bool operator!=( PhysicalDeviceShaderCorePropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesARM; + void * pNext = {}; + uint32_t pixelRate = {}; + uint32_t texelRate = {}; + uint32_t fmaRate = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderCorePropertiesARM; + }; + + struct PhysicalDeviceShaderDemoteToHelperInvocationFeatures + { + using NativeType = VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderDemoteToHelperInvocation{ shaderDemoteToHelperInvocation_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderDemoteToHelperInvocationFeatures( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderDemoteToHelperInvocationFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderDemoteToHelperInvocationFeatures & + operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderDemoteToHelperInvocationFeatures & operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & + setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderDemoteToHelperInvocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ); +# endif + } + + bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + }; + + using PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + + struct PhysicalDeviceShaderDrawParametersFeatures + { + using NativeType = VkPhysicalDeviceShaderDrawParametersFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderDrawParameters{ shaderDrawParameters_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderDrawParametersFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderDrawParametersFeatures & operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & + setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT + { + shaderDrawParameters = shaderDrawParameters_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderDrawParametersFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderDrawParameters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDrawParameters == rhs.shaderDrawParameters ); +# endif + } + + bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderDrawParametersFeatures; + }; + + using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures; + + struct PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD + { + using NativeType = VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderEarlyAndLateFragmentTests{ shaderEarlyAndLateFragmentTests_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & + operator=( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & + operator=( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & + setShaderEarlyAndLateFragmentTests( VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests_ ) VULKAN_HPP_NOEXCEPT + { + shaderEarlyAndLateFragmentTests = shaderEarlyAndLateFragmentTests_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderEarlyAndLateFragmentTests ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEarlyAndLateFragmentTests == rhs.shaderEarlyAndLateFragmentTests ); +# endif + } + + bool operator!=( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDeviceShaderEnqueueFeaturesAMDX + { + using NativeType = VkPhysicalDeviceShaderEnqueueFeaturesAMDX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX( VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderMeshEnqueue_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderEnqueue{ shaderEnqueue_ } + , shaderMeshEnqueue{ shaderMeshEnqueue_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderEnqueueFeaturesAMDX( VkPhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderEnqueueFeaturesAMDX( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderEnqueueFeaturesAMDX & operator=( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderEnqueueFeaturesAMDX & operator=( VkPhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & setShaderEnqueue( VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue_ ) VULKAN_HPP_NOEXCEPT + { + shaderEnqueue = shaderEnqueue_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & + setShaderMeshEnqueue( VULKAN_HPP_NAMESPACE::Bool32 shaderMeshEnqueue_ ) VULKAN_HPP_NOEXCEPT + { + shaderMeshEnqueue = shaderMeshEnqueue_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderEnqueue, shaderMeshEnqueue ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderEnqueueFeaturesAMDX const & ) const = default; +# else + bool operator==( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEnqueue == rhs.shaderEnqueue ) && ( shaderMeshEnqueue == rhs.shaderMeshEnqueue ); +# endif + } + + bool operator!=( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderMeshEnqueue = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderEnqueueFeaturesAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDeviceShaderEnqueuePropertiesAMDX + { + using NativeType = VkPhysicalDeviceShaderEnqueuePropertiesAMDX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX( uint32_t maxExecutionGraphDepth_ = {}, + uint32_t maxExecutionGraphShaderOutputNodes_ = {}, + uint32_t maxExecutionGraphShaderPayloadSize_ = {}, + uint32_t maxExecutionGraphShaderPayloadCount_ = {}, + uint32_t executionGraphDispatchAddressAlignment_ = {}, + std::array const & maxExecutionGraphWorkgroupCount_ = {}, + uint32_t maxExecutionGraphWorkgroups_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxExecutionGraphDepth{ maxExecutionGraphDepth_ } + , maxExecutionGraphShaderOutputNodes{ maxExecutionGraphShaderOutputNodes_ } + , maxExecutionGraphShaderPayloadSize{ maxExecutionGraphShaderPayloadSize_ } + , maxExecutionGraphShaderPayloadCount{ maxExecutionGraphShaderPayloadCount_ } + , executionGraphDispatchAddressAlignment{ executionGraphDispatchAddressAlignment_ } + , maxExecutionGraphWorkgroupCount{ maxExecutionGraphWorkgroupCount_ } + , maxExecutionGraphWorkgroups{ maxExecutionGraphWorkgroups_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderEnqueuePropertiesAMDX( VkPhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderEnqueuePropertiesAMDX( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderEnqueuePropertiesAMDX & operator=( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderEnqueuePropertiesAMDX & operator=( VkPhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & setMaxExecutionGraphDepth( uint32_t maxExecutionGraphDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphDepth = maxExecutionGraphDepth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphShaderOutputNodes( uint32_t maxExecutionGraphShaderOutputNodes_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphShaderOutputNodes = maxExecutionGraphShaderOutputNodes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphShaderPayloadSize( uint32_t maxExecutionGraphShaderPayloadSize_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphShaderPayloadSize = maxExecutionGraphShaderPayloadSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphShaderPayloadCount( uint32_t maxExecutionGraphShaderPayloadCount_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphShaderPayloadCount = maxExecutionGraphShaderPayloadCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setExecutionGraphDispatchAddressAlignment( uint32_t executionGraphDispatchAddressAlignment_ ) VULKAN_HPP_NOEXCEPT + { + executionGraphDispatchAddressAlignment = executionGraphDispatchAddressAlignment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphWorkgroupCount( std::array maxExecutionGraphWorkgroupCount_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphWorkgroupCount = maxExecutionGraphWorkgroupCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphWorkgroups( uint32_t maxExecutionGraphWorkgroups_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphWorkgroups = maxExecutionGraphWorkgroups_; + return *this; + } +# endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxExecutionGraphDepth, + maxExecutionGraphShaderOutputNodes, + maxExecutionGraphShaderPayloadSize, + maxExecutionGraphShaderPayloadCount, + executionGraphDispatchAddressAlignment, + maxExecutionGraphWorkgroupCount, + maxExecutionGraphWorkgroups ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderEnqueuePropertiesAMDX const & ) const = default; +# else + bool operator==( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxExecutionGraphDepth == rhs.maxExecutionGraphDepth ) && + ( maxExecutionGraphShaderOutputNodes == rhs.maxExecutionGraphShaderOutputNodes ) && + ( maxExecutionGraphShaderPayloadSize == rhs.maxExecutionGraphShaderPayloadSize ) && + ( maxExecutionGraphShaderPayloadCount == rhs.maxExecutionGraphShaderPayloadCount ) && + ( executionGraphDispatchAddressAlignment == rhs.executionGraphDispatchAddressAlignment ) && + ( maxExecutionGraphWorkgroupCount == rhs.maxExecutionGraphWorkgroupCount ) && ( maxExecutionGraphWorkgroups == rhs.maxExecutionGraphWorkgroups ); +# endif + } + + bool operator!=( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX; + void * pNext = {}; + uint32_t maxExecutionGraphDepth = {}; + uint32_t maxExecutionGraphShaderOutputNodes = {}; + uint32_t maxExecutionGraphShaderPayloadSize = {}; + uint32_t maxExecutionGraphShaderPayloadCount = {}; + uint32_t executionGraphDispatchAddressAlignment = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxExecutionGraphWorkgroupCount = {}; + uint32_t maxExecutionGraphWorkgroups = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderEnqueuePropertiesAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + struct PhysicalDeviceShaderExpectAssumeFeatures + { + using NativeType = VkPhysicalDeviceShaderExpectAssumeFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderExpectAssumeFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderExpectAssumeFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderExpectAssume{ shaderExpectAssume_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderExpectAssumeFeatures( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderExpectAssumeFeatures( VkPhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderExpectAssumeFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderExpectAssumeFeatures & operator=( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderExpectAssumeFeatures & operator=( VkPhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderExpectAssumeFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderExpectAssumeFeatures & + setShaderExpectAssume( VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ ) VULKAN_HPP_NOEXCEPT + { + shaderExpectAssume = shaderExpectAssume_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderExpectAssumeFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderExpectAssumeFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderExpectAssume ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderExpectAssumeFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderExpectAssume == rhs.shaderExpectAssume ); +# endif + } + + bool operator!=( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderExpectAssumeFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderExpectAssumeFeatures; + }; + + using PhysicalDeviceShaderExpectAssumeFeaturesKHR = PhysicalDeviceShaderExpectAssumeFeatures; + + struct PhysicalDeviceShaderFloat16Int8Features + { + using NativeType = VkPhysicalDeviceShaderFloat16Int8Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderFloat16{ shaderFloat16_ } + , shaderInt8{ shaderInt8_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderFloat16Int8Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderFloat16Int8Features & operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat16 = shaderFloat16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt8 = shaderInt8_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderFloat16, shaderInt8 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16 == rhs.shaderFloat16 ) && ( shaderInt8 == rhs.shaderInt8 ); +# endif + } + + bool operator!=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderFloat16Int8Features; + }; + + using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; + + struct PhysicalDeviceShaderFloatControls2Features + { + using NativeType = VkPhysicalDeviceShaderFloatControls2Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloatControls2Features; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloatControls2Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderFloatControls2{ shaderFloatControls2_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloatControls2Features( PhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderFloatControls2Features( VkPhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderFloatControls2Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderFloatControls2Features & operator=( PhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderFloatControls2Features & operator=( VkPhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloatControls2Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloatControls2Features & + setShaderFloatControls2( VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloatControls2 = shaderFloatControls2_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderFloatControls2Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderFloatControls2Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderFloatControls2 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderFloatControls2Features const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderFloatControls2Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloatControls2 == rhs.shaderFloatControls2 ); +# endif + } + + bool operator!=( PhysicalDeviceShaderFloatControls2Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloatControls2Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderFloatControls2Features; + }; + + using PhysicalDeviceShaderFloatControls2FeaturesKHR = PhysicalDeviceShaderFloatControls2Features; + + struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderImageInt64Atomics{ shaderImageInt64Atomics_ } + , sparseImageInt64Atomics{ sparseImageInt64Atomics_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + setShaderImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageInt64Atomics = shaderImageInt64Atomics_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & + setSparseImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + sparseImageInt64Atomics = sparseImageInt64Atomics_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderImageInt64Atomics, sparseImageInt64Atomics ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics ) && + ( sparseImageInt64Atomics == rhs.sparseImageInt64Atomics ); +# endif + } + + bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + }; + + struct PhysicalDeviceShaderImageFootprintFeaturesNV + { + using NativeType = VkPhysicalDeviceShaderImageFootprintFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageFootprint{ imageFootprint_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderImageFootprintFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT + { + imageFootprint = imageFootprint_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageFootprint ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFootprint == rhs.imageFootprint ); +# endif + } + + bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderImageFootprintFeaturesNV; + }; + + struct PhysicalDeviceShaderIntegerDotProductFeatures + { + using NativeType = VkPhysicalDeviceShaderIntegerDotProductFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderIntegerDotProduct{ shaderIntegerDotProduct_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderIntegerDotProductFeatures( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerDotProductFeatures( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderIntegerDotProductFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderIntegerDotProductFeatures & operator=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderIntegerDotProductFeatures & operator=( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & + setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT + { + shaderIntegerDotProduct = shaderIntegerDotProduct_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderIntegerDotProductFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerDotProductFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderIntegerDotProduct ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderIntegerDotProductFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct ); +# endif + } + + bool operator!=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderIntegerDotProductFeatures; + }; + + using PhysicalDeviceShaderIntegerDotProductFeaturesKHR = PhysicalDeviceShaderIntegerDotProductFeatures; + + struct PhysicalDeviceShaderIntegerDotProductProperties + { + using NativeType = VkPhysicalDeviceShaderIntegerDotProductProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductProperties( + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , integerDotProduct8BitUnsignedAccelerated{ integerDotProduct8BitUnsignedAccelerated_ } + , integerDotProduct8BitSignedAccelerated{ integerDotProduct8BitSignedAccelerated_ } + , integerDotProduct8BitMixedSignednessAccelerated{ integerDotProduct8BitMixedSignednessAccelerated_ } + , integerDotProduct4x8BitPackedUnsignedAccelerated{ integerDotProduct4x8BitPackedUnsignedAccelerated_ } + , integerDotProduct4x8BitPackedSignedAccelerated{ integerDotProduct4x8BitPackedSignedAccelerated_ } + , integerDotProduct4x8BitPackedMixedSignednessAccelerated{ integerDotProduct4x8BitPackedMixedSignednessAccelerated_ } + , integerDotProduct16BitUnsignedAccelerated{ integerDotProduct16BitUnsignedAccelerated_ } + , integerDotProduct16BitSignedAccelerated{ integerDotProduct16BitSignedAccelerated_ } + , integerDotProduct16BitMixedSignednessAccelerated{ integerDotProduct16BitMixedSignednessAccelerated_ } + , integerDotProduct32BitUnsignedAccelerated{ integerDotProduct32BitUnsignedAccelerated_ } + , integerDotProduct32BitSignedAccelerated{ integerDotProduct32BitSignedAccelerated_ } + , integerDotProduct32BitMixedSignednessAccelerated{ integerDotProduct32BitMixedSignednessAccelerated_ } + , integerDotProduct64BitUnsignedAccelerated{ integerDotProduct64BitUnsignedAccelerated_ } + , integerDotProduct64BitSignedAccelerated{ integerDotProduct64BitSignedAccelerated_ } + , integerDotProduct64BitMixedSignednessAccelerated{ integerDotProduct64BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating8BitSignedAccelerated{ integerDotProductAccumulatingSaturating8BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating16BitSignedAccelerated{ integerDotProductAccumulatingSaturating16BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating32BitSignedAccelerated{ integerDotProductAccumulatingSaturating32BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating64BitSignedAccelerated{ integerDotProductAccumulatingSaturating64BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderIntegerDotProductProperties( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerDotProductProperties( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderIntegerDotProductProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderIntegerDotProductProperties & operator=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderIntegerDotProductProperties & operator=( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderIntegerDotProductProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerDotProductProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + integerDotProduct8BitUnsignedAccelerated, + integerDotProduct8BitSignedAccelerated, + integerDotProduct8BitMixedSignednessAccelerated, + integerDotProduct4x8BitPackedUnsignedAccelerated, + integerDotProduct4x8BitPackedSignedAccelerated, + integerDotProduct4x8BitPackedMixedSignednessAccelerated, + integerDotProduct16BitUnsignedAccelerated, + integerDotProduct16BitSignedAccelerated, + integerDotProduct16BitMixedSignednessAccelerated, + integerDotProduct32BitUnsignedAccelerated, + integerDotProduct32BitSignedAccelerated, + integerDotProduct32BitMixedSignednessAccelerated, + integerDotProduct64BitUnsignedAccelerated, + integerDotProduct64BitSignedAccelerated, + integerDotProduct64BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating8BitSignedAccelerated, + integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating16BitSignedAccelerated, + integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating32BitSignedAccelerated, + integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating64BitSignedAccelerated, + integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderIntegerDotProductProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) && + ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated ) && + ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated ) && + ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) && + ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated ) && + ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) && + ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated ) && + ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated ) && + ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated ) && + ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated ) && + ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated ) && + ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated ) && + ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated ) && + ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated ) && + ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ); +# endif + } + + bool operator!=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderIntegerDotProductProperties; + }; + + using PhysicalDeviceShaderIntegerDotProductPropertiesKHR = PhysicalDeviceShaderIntegerDotProductProperties; + + struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL + { + using NativeType = VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderIntegerFunctions2{ shaderIntegerFunctions2_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & + setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT + { + shaderIntegerFunctions2 = shaderIntegerFunctions2_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderIntegerFunctions2 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 ); +# endif + } + + bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + }; + + struct PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR + { + using NativeType = VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderMaximalReconvergence_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderMaximalReconvergence{ shaderMaximalReconvergence_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR( VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR & + operator=( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR & operator=( VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR & + setShaderMaximalReconvergence( VULKAN_HPP_NAMESPACE::Bool32 shaderMaximalReconvergence_ ) VULKAN_HPP_NOEXCEPT + { + shaderMaximalReconvergence = shaderMaximalReconvergence_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderMaximalReconvergence ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderMaximalReconvergence == rhs.shaderMaximalReconvergence ); +# endif + } + + bool operator!=( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderMaximalReconvergence = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR; + }; + + struct PhysicalDeviceShaderModuleIdentifierFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderModuleIdentifierFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderModuleIdentifier{ shaderModuleIdentifier_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderModuleIdentifierFeaturesEXT( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderModuleIdentifierFeaturesEXT( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderModuleIdentifierFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderModuleIdentifierFeaturesEXT & operator=( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderModuleIdentifierFeaturesEXT & operator=( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierFeaturesEXT & + setShaderModuleIdentifier( VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier_ ) VULKAN_HPP_NOEXCEPT + { + shaderModuleIdentifier = shaderModuleIdentifier_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderModuleIdentifier ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderModuleIdentifier == rhs.shaderModuleIdentifier ); +# endif + } + + bool operator!=( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderModuleIdentifierFeaturesEXT; + }; + + struct PhysicalDeviceShaderModuleIdentifierPropertiesEXT + { + using NativeType = VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceShaderModuleIdentifierPropertiesEXT( std::array const & shaderModuleIdentifierAlgorithmUUID_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderModuleIdentifierAlgorithmUUID{ shaderModuleIdentifierAlgorithmUUID_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceShaderModuleIdentifierPropertiesEXT( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderModuleIdentifierPropertiesEXT( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderModuleIdentifierPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderModuleIdentifierPropertiesEXT & + operator=( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderModuleIdentifierPropertiesEXT & operator=( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderModuleIdentifierAlgorithmUUID ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderModuleIdentifierAlgorithmUUID == rhs.shaderModuleIdentifierAlgorithmUUID ); +# endif + } + + bool operator!=( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D shaderModuleIdentifierAlgorithmUUID = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderModuleIdentifierPropertiesEXT; + }; + + struct PhysicalDeviceShaderObjectFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderObjectFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderObjectFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderObjectFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderObject_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderObject{ shaderObject_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderObjectFeaturesEXT( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderObjectFeaturesEXT( VkPhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderObjectFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderObjectFeaturesEXT & operator=( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderObjectFeaturesEXT & operator=( VkPhysicalDeviceShaderObjectFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectFeaturesEXT & setShaderObject( VULKAN_HPP_NAMESPACE::Bool32 shaderObject_ ) VULKAN_HPP_NOEXCEPT + { + shaderObject = shaderObject_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderObjectFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderObjectFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderObject ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderObjectFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderObject == rhs.shaderObject ); +# endif + } + + bool operator!=( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderObjectFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderObject = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderObjectFeaturesEXT; + }; + + struct PhysicalDeviceShaderObjectPropertiesEXT + { + using NativeType = VkPhysicalDeviceShaderObjectPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderObjectPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT( std::array const & shaderBinaryUUID_ = {}, + uint32_t shaderBinaryVersion_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderBinaryUUID{ shaderBinaryUUID_ } + , shaderBinaryVersion{ shaderBinaryVersion_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderObjectPropertiesEXT( VkPhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderObjectPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderObjectPropertiesEXT & operator=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderObjectPropertiesEXT & operator=( VkPhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderObjectPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderObjectPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple const &, uint32_t const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderBinaryUUID, shaderBinaryVersion ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderObjectPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBinaryUUID == rhs.shaderBinaryUUID ) && + ( shaderBinaryVersion == rhs.shaderBinaryVersion ); +# endif + } + + bool operator!=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderObjectPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D shaderBinaryUUID = {}; + uint32_t shaderBinaryVersion = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderObjectPropertiesEXT; + }; + + struct PhysicalDeviceShaderQuadControlFeaturesKHR + { + using NativeType = VkPhysicalDeviceShaderQuadControlFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderQuadControlFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderQuadControlFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderQuadControl_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderQuadControl{ shaderQuadControl_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderQuadControlFeaturesKHR( PhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderQuadControlFeaturesKHR( VkPhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderQuadControlFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderQuadControlFeaturesKHR & operator=( PhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderQuadControlFeaturesKHR & operator=( VkPhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderQuadControlFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderQuadControlFeaturesKHR & + setShaderQuadControl( VULKAN_HPP_NAMESPACE::Bool32 shaderQuadControl_ ) VULKAN_HPP_NOEXCEPT + { + shaderQuadControl = shaderQuadControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderQuadControlFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderQuadControlFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderQuadControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderQuadControlFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderQuadControl == rhs.shaderQuadControl ); +# endif + } + + bool operator!=( PhysicalDeviceShaderQuadControlFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderQuadControlFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderQuadControl = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderQuadControlFeaturesKHR; + }; + + struct PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR + { + using NativeType = VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderRelaxedExtendedInstruction_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderRelaxedExtendedInstruction{ shaderRelaxedExtendedInstruction_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR & + operator=( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR & + operator=( VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR & + setShaderRelaxedExtendedInstruction( VULKAN_HPP_NAMESPACE::Bool32 shaderRelaxedExtendedInstruction_ ) VULKAN_HPP_NOEXCEPT + { + shaderRelaxedExtendedInstruction = shaderRelaxedExtendedInstruction_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderRelaxedExtendedInstruction ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderRelaxedExtendedInstruction == rhs.shaderRelaxedExtendedInstruction ); +# endif + } + + bool operator!=( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRelaxedExtendedInstruction = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + }; + + struct PhysicalDeviceShaderReplicatedCompositesFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderReplicatedComposites_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderReplicatedComposites{ shaderReplicatedComposites_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & + operator=( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & operator=( VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & + setShaderReplicatedComposites( VULKAN_HPP_NAMESPACE::Bool32 shaderReplicatedComposites_ ) VULKAN_HPP_NOEXCEPT + { + shaderReplicatedComposites = shaderReplicatedComposites_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderReplicatedComposites ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderReplicatedComposites == rhs.shaderReplicatedComposites ); +# endif + } + + bool operator!=( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderReplicatedComposites = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + }; + + struct PhysicalDeviceShaderSMBuiltinsFeaturesNV + { + using NativeType = VkPhysicalDeviceShaderSMBuiltinsFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderSMBuiltins{ shaderSMBuiltins_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSMBuiltinsFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT + { + shaderSMBuiltins = shaderSMBuiltins_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSMBuiltins ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMBuiltins == rhs.shaderSMBuiltins ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV; + }; + + struct PhysicalDeviceShaderSMBuiltinsPropertiesNV + { + using NativeType = VkPhysicalDeviceShaderSMBuiltinsPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {}, uint32_t shaderWarpsPerSM_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderSMCount{ shaderSMCount_ } + , shaderWarpsPerSM{ shaderWarpsPerSM_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSMBuiltinsPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSMCount, shaderWarpsPerSM ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMCount == rhs.shaderSMCount ) && ( shaderWarpsPerSM == rhs.shaderWarpsPerSM ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; + void * pNext = {}; + uint32_t shaderSMCount = {}; + uint32_t shaderWarpsPerSM = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV; + }; + + struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures + { + using NativeType = VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderSubgroupExtendedTypes{ shaderSubgroupExtendedTypes_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderSubgroupExtendedTypesFeatures( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSubgroupExtendedTypesFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & + setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSubgroupExtendedTypes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + }; + + using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + + struct PhysicalDeviceShaderSubgroupRotateFeatures + { + using NativeType = VkPhysicalDeviceShaderSubgroupRotateFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupRotateFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupRotateFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderSubgroupRotate{ shaderSubgroupRotate_ } + , shaderSubgroupRotateClustered{ shaderSubgroupRotateClustered_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupRotateFeatures( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderSubgroupRotateFeatures( VkPhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSubgroupRotateFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderSubgroupRotateFeatures & operator=( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderSubgroupRotateFeatures & operator=( VkPhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeatures & + setShaderSubgroupRotate( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupRotate = shaderSubgroupRotate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeatures & + setShaderSubgroupRotateClustered( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupRotateClustered = shaderSubgroupRotateClustered_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceShaderSubgroupRotateFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupRotateFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSubgroupRotate, shaderSubgroupRotateClustered ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSubgroupRotateFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupRotate == rhs.shaderSubgroupRotate ) && + ( shaderSubgroupRotateClustered == rhs.shaderSubgroupRotateClustered ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupRotateFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSubgroupRotateFeatures; + }; + + using PhysicalDeviceShaderSubgroupRotateFeaturesKHR = PhysicalDeviceShaderSubgroupRotateFeatures; + + struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR { - using NativeType = VkPhysicalDeviceLineRasterizationFeaturesEXT; + using NativeType = VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rectangularLines( rectangularLines_ ) - , bresenhamLines( bresenhamLines_ ) - , smoothLines( smoothLines_ ) - , stippledRectangularLines( stippledRectangularLines_ ) - , stippledBresenhamLines( stippledBresenhamLines_ ) - , stippledSmoothLines( stippledSmoothLines_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderSubgroupUniformControlFlow{ shaderSubgroupUniformControlFlow_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceLineRasterizationFeaturesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( + *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceLineRasterizationFeaturesEXT & operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & + operator=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceLineRasterizationFeaturesEXT & operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & + operator=( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & - setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & + setShaderSubgroupUniformControlFlow( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ ) VULKAN_HPP_NOEXCEPT { - rectangularLines = rectangularLines_; + shaderSubgroupUniformControlFlow = shaderSubgroupUniformControlFlow_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { - bresenhamLines = bresenhamLines_; + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderSubgroupUniformControlFlow ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupUniformControlFlow == rhs.shaderSubgroupUniformControlFlow ); +# endif + } + + bool operator!=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + }; + + struct PhysicalDeviceShaderTerminateInvocationFeatures + { + using NativeType = VkPhysicalDeviceShaderTerminateInvocationFeatures; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderTerminateInvocation{ shaderTerminateInvocation_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderTerminateInvocationFeatures( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderTerminateInvocationFeatures( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderTerminateInvocationFeatures( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderTerminateInvocationFeatures & operator=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderTerminateInvocationFeatures & operator=( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - smoothLines = smoothLines_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & - setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & + setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT { - stippledRectangularLines = stippledRectangularLines_; + shaderTerminateInvocation = shaderTerminateInvocation_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & - setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderTerminateInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT { - stippledBresenhamLines = stippledBresenhamLines_; + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderTerminateInvocationFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderTerminateInvocation ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderTerminateInvocationFeatures const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation ); +# endif + } + + bool operator!=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderTerminateInvocationFeatures; + }; + + using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures; + + struct PhysicalDeviceShaderTileImageFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderTileImageFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImageFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderTileImageColorReadAccess{ shaderTileImageColorReadAccess_ } + , shaderTileImageDepthReadAccess{ shaderTileImageDepthReadAccess_ } + , shaderTileImageStencilReadAccess{ shaderTileImageStencilReadAccess_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImageFeaturesEXT( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderTileImageFeaturesEXT( VkPhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderTileImageFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderTileImageFeaturesEXT & operator=( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceShaderTileImageFeaturesEXT & operator=( VkPhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & - setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - stippledSmoothLines = stippledSmoothLines_; + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & + setShaderTileImageColorReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess_ ) VULKAN_HPP_NOEXCEPT + { + shaderTileImageColorReadAccess = shaderTileImageColorReadAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & + setShaderTileImageDepthReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess_ ) VULKAN_HPP_NOEXCEPT + { + shaderTileImageDepthReadAccess = shaderTileImageDepthReadAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTileImageFeaturesEXT & + setShaderTileImageStencilReadAccess( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess_ ) VULKAN_HPP_NOEXCEPT + { + shaderTileImageStencilReadAccess = shaderTileImageStencilReadAccess_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceLineRasterizationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderTileImageFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderTileImageFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -55773,545 +95949,739 @@ namespace VULKAN_HPP_NAMESPACE void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, rectangularLines, bresenhamLines, smoothLines, stippledRectangularLines, stippledBresenhamLines, stippledSmoothLines ); + return std::tie( sType, pNext, shaderTileImageColorReadAccess, shaderTileImageDepthReadAccess, shaderTileImageStencilReadAccess ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceLineRasterizationFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceShaderTileImageFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rectangularLines == rhs.rectangularLines ) && ( bresenhamLines == rhs.bresenhamLines ) && - ( smoothLines == rhs.smoothLines ) && ( stippledRectangularLines == rhs.stippledRectangularLines ) && - ( stippledBresenhamLines == rhs.stippledBresenhamLines ) && ( stippledSmoothLines == rhs.stippledSmoothLines ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTileImageColorReadAccess == rhs.shaderTileImageColorReadAccess ) && + ( shaderTileImageDepthReadAccess == rhs.shaderTileImageDepthReadAccess ) && + ( shaderTileImageStencilReadAccess == rhs.shaderTileImageStencilReadAccess ); # endif } - bool operator!=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShaderTileImageFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {}; - VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageColorReadAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceLineRasterizationFeaturesEXT; + using Type = PhysicalDeviceShaderTileImageFeaturesEXT; }; - struct PhysicalDeviceLineRasterizationPropertiesEXT + struct PhysicalDeviceShaderTileImagePropertiesEXT { - using NativeType = VkPhysicalDeviceLineRasterizationPropertiesEXT; + using NativeType = VkPhysicalDeviceShaderTileImagePropertiesEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImagePropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageCoherentReadAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadSampleFromPixelRateInvocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadFromHelperInvocation_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderTileImageCoherentReadAccelerated{ shaderTileImageCoherentReadAccelerated_ } + , shaderTileImageReadSampleFromPixelRateInvocation{ shaderTileImageReadSampleFromPixelRateInvocation_ } + , shaderTileImageReadFromHelperInvocation{ shaderTileImageReadFromHelperInvocation_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTileImagePropertiesEXT( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceLineRasterizationPropertiesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceShaderTileImagePropertiesEXT( VkPhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderTileImagePropertiesEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceLineRasterizationPropertiesEXT & operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceShaderTileImagePropertiesEXT & operator=( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceLineRasterizationPropertiesEXT & operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderTileImagePropertiesEXT & operator=( VkPhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceLineRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderTileImagePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderTileImagePropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, lineSubPixelPrecisionBits ); + return std::tie( + sType, pNext, shaderTileImageCoherentReadAccelerated, shaderTileImageReadSampleFromPixelRateInvocation, shaderTileImageReadFromHelperInvocation ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceLineRasterizationPropertiesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceShaderTileImagePropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTileImageCoherentReadAccelerated == rhs.shaderTileImageCoherentReadAccelerated ) && + ( shaderTileImageReadSampleFromPixelRateInvocation == rhs.shaderTileImageReadSampleFromPixelRateInvocation ) && + ( shaderTileImageReadFromHelperInvocation == rhs.shaderTileImageReadFromHelperInvocation ); # endif } - bool operator!=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShaderTileImagePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; - void * pNext = {}; - uint32_t lineSubPixelPrecisionBits = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageCoherentReadAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadSampleFromPixelRateInvocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadFromHelperInvocation = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceLineRasterizationPropertiesEXT; + using Type = PhysicalDeviceShaderTileImagePropertiesEXT; }; - struct PhysicalDeviceLinearColorAttachmentFeaturesNV + struct PhysicalDeviceShadingRateImageFeaturesNV { - using NativeType = VkPhysicalDeviceLinearColorAttachmentFeaturesNV; + using NativeType = VkPhysicalDeviceShadingRateImageFeaturesNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , linearColorAttachment( linearColorAttachment_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shadingRateImage{ shadingRateImage_ } + , shadingRateCoarseSampleOrder{ shadingRateCoarseSampleOrder_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceLinearColorAttachmentFeaturesNV( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceLinearColorAttachmentFeaturesNV( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceLinearColorAttachmentFeaturesNV( *reinterpret_cast( &rhs ) ) + PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShadingRateImageFeaturesNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceShadingRateImageFeaturesNV & operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShadingRateImageFeaturesNV & operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & - setLinearColorAttachment( VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT { - linearColorAttachment = linearColorAttachment_; + shadingRateImage = shadingRateImage_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & + setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShadingRateImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, linearColorAttachment ); + return std::tie( sType, pNext, shadingRateImage, shadingRateCoarseSampleOrder ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceLinearColorAttachmentFeaturesNV const & ) const = default; + auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const & ) const = default; #else - bool operator==( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( linearColorAttachment == rhs.linearColorAttachment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateImage == rhs.shadingRateImage ) && + ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder ); # endif } - bool operator!=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceLinearColorAttachmentFeaturesNV; + using Type = PhysicalDeviceShadingRateImageFeaturesNV; }; - struct PhysicalDeviceMaintenance3Properties + struct PhysicalDeviceShadingRateImagePropertiesNV { - using NativeType = VkPhysicalDeviceMaintenance3Properties; + using NativeType = VkPhysicalDeviceShadingRateImagePropertiesNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance3Properties; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( uint32_t maxPerSetDescriptors_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxPerSetDescriptors( maxPerSetDescriptors_ ) - , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, + uint32_t shadingRatePaletteSize_ = {}, + uint32_t shadingRateMaxCoarseSamples_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shadingRateTexelSize{ shadingRateTexelSize_ } + , shadingRatePaletteSize{ shadingRatePaletteSize_ } + , shadingRateMaxCoarseSamples{ shadingRateMaxCoarseSamples_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMaintenance3Properties( *reinterpret_cast( &rhs ) ) + PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShadingRateImagePropertiesNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceShadingRateImagePropertiesNV & operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceMaintenance3Properties & operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShadingRateImagePropertiesNV & operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShadingRateImagePropertiesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxPerSetDescriptors, maxMemoryAllocationSize ); + return std::tie( sType, pNext, shadingRateTexelSize, shadingRatePaletteSize, shadingRateMaxCoarseSamples ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMaintenance3Properties const & ) const = default; + auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const & ) const = default; #else - bool operator==( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) && - ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateTexelSize == rhs.shadingRateTexelSize ) && + ( shadingRatePaletteSize == rhs.shadingRatePaletteSize ) && ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples ); # endif } - bool operator!=( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties; - void * pNext = {}; - uint32_t maxPerSetDescriptors = {}; - VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {}; + uint32_t shadingRatePaletteSize = {}; + uint32_t shadingRateMaxCoarseSamples = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMaintenance3Properties; + using Type = PhysicalDeviceShadingRateImagePropertiesNV; }; - using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; - struct PhysicalDeviceMaintenance4Features + struct PhysicalDeviceSparseImageFormatInfo2 { - using NativeType = VkPhysicalDeviceMaintenance4Features; + using NativeType = VkPhysicalDeviceSparseImageFormatInfo2; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Features; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maintenance4( maintenance4_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSparseImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , format{ format_ } + , type{ type_ } + , samples{ samples_ } + , usage{ usage_ } + , tiling{ tiling_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMaintenance4Features( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMaintenance4Features( *reinterpret_cast( &rhs ) ) + PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSparseImageFormatInfo2( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMaintenance4Features & operator=( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceSparseImageFormatInfo2 & operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceMaintenance4Features & operator=( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSparseImageFormatInfo2 & operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT { - maintenance4 = maintenance4_; + format = format_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceMaintenance4Features const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + type = type_; + return *this; } - operator VkPhysicalDeviceMaintenance4Features &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, format, type, samples, usage, tiling ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const & ) const = default; +#else + bool operator==( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && ( samples == rhs.samples ) && + ( usage == rhs.usage ) && ( tiling == rhs.tiling ); +# endif + } + + bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceSparseImageFormatInfo2; + }; + + using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; + + struct PhysicalDeviceSubgroupProperties + { + using NativeType = VkPhysicalDeviceSubgroupProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( uint32_t subgroupSize_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , subgroupSize{ subgroupSize_ } + , supportedStages{ supportedStages_ } + , supportedOperations{ supportedOperations_ } + , quadOperationsInAllStages{ quadOperationsInAllStages_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubgroupProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSubgroupProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maintenance4 ); + return std::tie( sType, pNext, subgroupSize, supportedStages, supportedOperations, quadOperationsInAllStages ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMaintenance4Features const & ) const = default; + auto operator<=>( PhysicalDeviceSubgroupProperties const & ) const = default; #else - bool operator==( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance4 == rhs.maintenance4 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSize == rhs.subgroupSize ) && ( supportedStages == rhs.supportedStages ) && + ( supportedOperations == rhs.supportedOperations ) && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages ); # endif } - bool operator!=( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Features; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties; + void * pNext = {}; + uint32_t subgroupSize = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {}; + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMaintenance4Features; + using Type = PhysicalDeviceSubgroupProperties; }; - using PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features; - struct PhysicalDeviceMaintenance4Properties + struct PhysicalDeviceSubgroupSizeControlFeatures { - using NativeType = VkPhysicalDeviceMaintenance4Properties; + using NativeType = VkPhysicalDeviceSubgroupSizeControlFeatures; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Properties; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxBufferSize( maxBufferSize_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , subgroupSizeControl{ subgroupSizeControl_ } + , computeFullSubgroups{ computeFullSubgroups_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMaintenance4Properties( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMaintenance4Properties( *reinterpret_cast( &rhs ) ) + PhysicalDeviceSubgroupSizeControlFeatures( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubgroupSizeControlFeatures( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMaintenance4Properties & operator=( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceSubgroupSizeControlFeatures & operator=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceMaintenance4Properties & operator=( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupSizeControlFeatures & operator=( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMaintenance4Properties const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceMaintenance4Properties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & + setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + subgroupSizeControl = subgroupSizeControl_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & + setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT + { + computeFullSubgroups = computeFullSubgroups_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceSubgroupSizeControlFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupSizeControlFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxBufferSize ); + return std::tie( sType, pNext, subgroupSizeControl, computeFullSubgroups ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMaintenance4Properties const & ) const = default; + auto operator<=>( PhysicalDeviceSubgroupSizeControlFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxBufferSize == rhs.maxBufferSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSizeControl == rhs.subgroupSizeControl ) && + ( computeFullSubgroups == rhs.computeFullSubgroups ); # endif } - bool operator!=( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance4Properties; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMaintenance4Properties; + using Type = PhysicalDeviceSubgroupSizeControlFeatures; }; - using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties; - struct PhysicalDeviceMemoryBudgetPropertiesEXT + using PhysicalDeviceSubgroupSizeControlFeaturesEXT = PhysicalDeviceSubgroupSizeControlFeatures; + + struct PhysicalDeviceSubgroupSizeControlProperties { - using NativeType = VkPhysicalDeviceMemoryBudgetPropertiesEXT; + using NativeType = VkPhysicalDeviceSubgroupSizeControlProperties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( std::array const & heapBudget_ = {}, - std::array const & heapUsage_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , heapBudget( heapBudget_ ) - , heapUsage( heapUsage_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( uint32_t minSubgroupSize_ = {}, + uint32_t maxSubgroupSize_ = {}, + uint32_t maxComputeWorkgroupSubgroups_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minSubgroupSize{ minSubgroupSize_ } + , maxSubgroupSize{ maxSubgroupSize_ } + , maxComputeWorkgroupSubgroups{ maxComputeWorkgroupSubgroups_ } + , requiredSubgroupSizeStages{ requiredSubgroupSizeStages_ } { } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMemoryBudgetPropertiesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceSubgroupSizeControlProperties( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubgroupSizeControlProperties( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceSubgroupSizeControlProperties & operator=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubgroupSizeControlProperties & operator=( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSubgroupSizeControlProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSubgroupSizeControlProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -56320,99 +96690,108 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ShaderStageFlags const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, heapBudget, heapUsage ); + return std::tie( sType, pNext, minSubgroupSize, maxSubgroupSize, maxComputeWorkgroupSubgroups, requiredSubgroupSizeStages ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceSubgroupSizeControlProperties const & ) const = default; #else - bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( heapBudget == rhs.heapBudget ) && ( heapUsage == rhs.heapUsage ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && ( maxSubgroupSize == rhs.maxSubgroupSize ) && + ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ); # endif } - bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapBudget = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D heapUsage = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties; + void * pNext = {}; + uint32_t minSubgroupSize = {}; + uint32_t maxSubgroupSize = {}; + uint32_t maxComputeWorkgroupSubgroups = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMemoryBudgetPropertiesEXT; + using Type = PhysicalDeviceSubgroupSizeControlProperties; }; - struct PhysicalDeviceMemoryPriorityFeaturesEXT + using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties; + + struct PhysicalDeviceSubpassMergeFeedbackFeaturesEXT { - using NativeType = VkPhysicalDeviceMemoryPriorityFeaturesEXT; + using NativeType = VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryPriority( memoryPriority_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , subpassMergeFeedback{ subpassMergeFeedback_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMemoryPriorityFeaturesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & operator=( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & operator=( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & + setSubpassMergeFeedback( VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback_ ) VULKAN_HPP_NOEXCEPT { - memoryPriority = memoryPriority_; + subpassMergeFeedback = subpassMergeFeedback_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -56423,1085 +96802,874 @@ namespace VULKAN_HPP_NAMESPACE # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, memoryPriority ); + return std::tie( sType, pNext, subpassMergeFeedback ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryPriority == rhs.memoryPriority ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassMergeFeedback == rhs.subpassMergeFeedback ); # endif } - bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMemoryPriorityFeaturesEXT; + using Type = PhysicalDeviceSubpassMergeFeedbackFeaturesEXT; }; - struct PhysicalDeviceMemoryProperties + struct PhysicalDeviceSubpassShadingFeaturesHUAWEI { - using NativeType = VkPhysicalDeviceMemoryProperties; + using NativeType = VkPhysicalDeviceSubpassShadingFeaturesHUAWEI; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 - PhysicalDeviceMemoryProperties( uint32_t memoryTypeCount_ = {}, - std::array const & memoryTypes_ = {}, - uint32_t memoryHeapCount_ = {}, - std::array const & memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryTypeCount( memoryTypeCount_ ) - , memoryTypes( memoryTypes_ ) - , memoryHeapCount( memoryHeapCount_ ) - , memoryHeaps( memoryHeaps_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , subpassShading{ subpassShading_ } { } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMemoryProperties( *reinterpret_cast( &rhs ) ) + PhysicalDeviceSubpassShadingFeaturesHUAWEI( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubpassShadingFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setSubpassShading( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + subpassShading = subpassShading_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( memoryTypeCount, memoryTypes, memoryHeapCount, memoryHeaps ); + return std::tie( sType, pNext, subpassShading ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMemoryProperties const & ) const = default; + auto operator<=>( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & ) const = default; #else - bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( memoryTypeCount == rhs.memoryTypeCount ) && ( memoryTypes == rhs.memoryTypes ) && ( memoryHeapCount == rhs.memoryHeapCount ) && - ( memoryHeaps == rhs.memoryHeaps ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassShading == rhs.subpassShading ); # endif } - bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t memoryTypeCount = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryTypes = {}; - uint32_t memoryHeapCount = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D memoryHeaps = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subpassShading = {}; }; - struct PhysicalDeviceMemoryProperties2 + template <> + struct CppType { - using NativeType = VkPhysicalDeviceMemoryProperties2; + using Type = PhysicalDeviceSubpassShadingFeaturesHUAWEI; + }; + + struct PhysicalDeviceSubpassShadingPropertiesHUAWEI + { + using NativeType = VkPhysicalDeviceSubpassShadingPropertiesHUAWEI; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryProperties( memoryProperties_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( uint32_t maxSubpassShadingWorkgroupSizeAspectRatio_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxSubpassShadingWorkgroupSizeAspectRatio{ maxSubpassShadingWorkgroupSizeAspectRatio_ } { } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMemoryProperties2( *reinterpret_cast( &rhs ) ) + PhysicalDeviceSubpassShadingPropertiesHUAWEI( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSubpassShadingPropertiesHUAWEI( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, memoryProperties ); + return std::tie( sType, pNext, maxSubpassShadingWorkgroupSizeAspectRatio ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMemoryProperties2 const & ) const = default; + auto operator<=>( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & ) const = default; #else - bool operator==( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryProperties == rhs.memoryProperties ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSubpassShadingWorkgroupSizeAspectRatio == rhs.maxSubpassShadingWorkgroupSizeAspectRatio ); # endif } - bool operator!=( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI; + void * pNext = {}; + uint32_t maxSubpassShadingWorkgroupSizeAspectRatio = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMemoryProperties2; + using Type = PhysicalDeviceSubpassShadingPropertiesHUAWEI; }; - using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; - struct PhysicalDeviceMeshShaderFeaturesEXT + struct PhysicalDeviceSurfaceInfo2KHR { - using NativeType = VkPhysicalDeviceMeshShaderFeaturesEXT; + using NativeType = VkPhysicalDeviceSurfaceInfo2KHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , taskShader( taskShader_ ) - , meshShader( meshShader_ ) - , multiviewMeshShader( multiviewMeshShader_ ) - , primitiveFragmentShadingRateMeshShader( primitiveFragmentShadingRateMeshShader_ ) - , meshShaderQueries( meshShaderQueries_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , surface{ surface_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMeshShaderFeaturesEXT( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMeshShaderFeaturesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSurfaceInfo2KHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMeshShaderFeaturesEXT & operator=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceMeshShaderFeaturesEXT & operator=( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT - { - taskShader = taskShader_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT - { - meshShader = meshShader_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & - setMultiviewMeshShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader_ ) VULKAN_HPP_NOEXCEPT - { - multiviewMeshShader = multiviewMeshShader_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & - setPrimitiveFragmentShadingRateMeshShader( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader_ ) VULKAN_HPP_NOEXCEPT - { - primitiveFragmentShadingRateMeshShader = primitiveFragmentShadingRateMeshShader_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShaderQueries( VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT { - meshShaderQueries = meshShaderQueries_; + surface = surface_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceMeshShaderFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceMeshShaderFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, taskShader, meshShader, multiviewMeshShader, primitiveFragmentShadingRateMeshShader, meshShaderQueries ); + return std::tie( sType, pNext, surface ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMeshShaderFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const & ) const = default; #else - bool operator==( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && ( meshShader == rhs.meshShader ) && - ( multiviewMeshShader == rhs.multiviewMeshShader ) && ( primitiveFragmentShadingRateMeshShader == rhs.primitiveFragmentShadingRateMeshShader ) && - ( meshShaderQueries == rhs.meshShaderQueries ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surface == rhs.surface ); # endif } - bool operator!=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 taskShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 meshShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiviewMeshShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMeshShaderFeaturesEXT; + using Type = PhysicalDeviceSurfaceInfo2KHR; }; - struct PhysicalDeviceMeshShaderFeaturesNV + struct PhysicalDeviceSwapchainMaintenance1FeaturesEXT { - using NativeType = VkPhysicalDeviceMeshShaderFeaturesNV; + using NativeType = VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , taskShader( taskShader_ ) - , meshShader( meshShader_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSwapchainMaintenance1FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 swapchainMaintenance1_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchainMaintenance1{ swapchainMaintenance1_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PhysicalDeviceSwapchainMaintenance1FeaturesEXT( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMeshShaderFeaturesNV( *reinterpret_cast( &rhs ) ) + PhysicalDeviceSwapchainMaintenance1FeaturesEXT( VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSwapchainMaintenance1FeaturesEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceSwapchainMaintenance1FeaturesEXT & operator=( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceMeshShaderFeaturesNV & operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceSwapchainMaintenance1FeaturesEXT & operator=( VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSwapchainMaintenance1FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT - { - taskShader = taskShader_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSwapchainMaintenance1FeaturesEXT & + setSwapchainMaintenance1( VULKAN_HPP_NAMESPACE::Bool32 swapchainMaintenance1_ ) VULKAN_HPP_NOEXCEPT { - meshShader = meshShader_; + swapchainMaintenance1 = swapchainMaintenance1_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, taskShader, meshShader ); + return std::tie( sType, pNext, swapchainMaintenance1 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const & ) const = default; + auto operator<=>( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && ( meshShader == rhs.meshShader ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainMaintenance1 == rhs.swapchainMaintenance1 ); # endif } - bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSwapchainMaintenance1FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 taskShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 meshShader = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 swapchainMaintenance1 = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMeshShaderFeaturesNV; + using Type = PhysicalDeviceSwapchainMaintenance1FeaturesEXT; }; - struct PhysicalDeviceMeshShaderPropertiesEXT + struct PhysicalDeviceSynchronization2Features { - using NativeType = VkPhysicalDeviceMeshShaderPropertiesEXT; + using NativeType = VkPhysicalDeviceSynchronization2Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSynchronization2Features; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , synchronization2{ synchronization2_ } + { + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT( uint32_t maxTaskWorkGroupTotalCount_ = {}, - std::array const & maxTaskWorkGroupCount_ = {}, - uint32_t maxTaskWorkGroupInvocations_ = {}, - std::array const & maxTaskWorkGroupSize_ = {}, - uint32_t maxTaskPayloadSize_ = {}, - uint32_t maxTaskSharedMemorySize_ = {}, - uint32_t maxTaskPayloadAndSharedMemorySize_ = {}, - uint32_t maxMeshWorkGroupTotalCount_ = {}, - std::array const & maxMeshWorkGroupCount_ = {}, - uint32_t maxMeshWorkGroupInvocations_ = {}, - std::array const & maxMeshWorkGroupSize_ = {}, - uint32_t maxMeshSharedMemorySize_ = {}, - uint32_t maxMeshPayloadAndSharedMemorySize_ = {}, - uint32_t maxMeshOutputMemorySize_ = {}, - uint32_t maxMeshPayloadAndOutputMemorySize_ = {}, - uint32_t maxMeshOutputComponents_ = {}, - uint32_t maxMeshOutputVertices_ = {}, - uint32_t maxMeshOutputPrimitives_ = {}, - uint32_t maxMeshOutputLayers_ = {}, - uint32_t maxMeshMultiviewViewCount_ = {}, - uint32_t meshOutputPerVertexGranularity_ = {}, - uint32_t meshOutputPerPrimitiveGranularity_ = {}, - uint32_t maxPreferredTaskWorkGroupInvocations_ = {}, - uint32_t maxPreferredMeshWorkGroupInvocations_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationVertexOutput_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationPrimitiveOutput_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 prefersCompactVertexOutput_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 prefersCompactPrimitiveOutput_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxTaskWorkGroupTotalCount( maxTaskWorkGroupTotalCount_ ) - , maxTaskWorkGroupCount( maxTaskWorkGroupCount_ ) - , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ) - , maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ) - , maxTaskPayloadSize( maxTaskPayloadSize_ ) - , maxTaskSharedMemorySize( maxTaskSharedMemorySize_ ) - , maxTaskPayloadAndSharedMemorySize( maxTaskPayloadAndSharedMemorySize_ ) - , maxMeshWorkGroupTotalCount( maxMeshWorkGroupTotalCount_ ) - , maxMeshWorkGroupCount( maxMeshWorkGroupCount_ ) - , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ) - , maxMeshWorkGroupSize( maxMeshWorkGroupSize_ ) - , maxMeshSharedMemorySize( maxMeshSharedMemorySize_ ) - , maxMeshPayloadAndSharedMemorySize( maxMeshPayloadAndSharedMemorySize_ ) - , maxMeshOutputMemorySize( maxMeshOutputMemorySize_ ) - , maxMeshPayloadAndOutputMemorySize( maxMeshPayloadAndOutputMemorySize_ ) - , maxMeshOutputComponents( maxMeshOutputComponents_ ) - , maxMeshOutputVertices( maxMeshOutputVertices_ ) - , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ) - , maxMeshOutputLayers( maxMeshOutputLayers_ ) - , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ) - , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ) - , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ ) - , maxPreferredTaskWorkGroupInvocations( maxPreferredTaskWorkGroupInvocations_ ) - , maxPreferredMeshWorkGroupInvocations( maxPreferredMeshWorkGroupInvocations_ ) - , prefersLocalInvocationVertexOutput( prefersLocalInvocationVertexOutput_ ) - , prefersLocalInvocationPrimitiveOutput( prefersLocalInvocationPrimitiveOutput_ ) - , prefersCompactVertexOutput( prefersCompactVertexOutput_ ) - , prefersCompactPrimitiveOutput( prefersCompactPrimitiveOutput_ ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSynchronization2Features( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSynchronization2Features( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceSynchronization2Features & operator=( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceMeshShaderPropertiesEXT( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMeshShaderPropertiesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceSynchronization2Features & operator=( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMeshShaderPropertiesEXT & operator=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } - PhysicalDeviceMeshShaderPropertiesEXT & operator=( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + synchronization2 = synchronization2_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceMeshShaderPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSynchronization2Features const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceMeshShaderPropertiesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceSynchronization2Features &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - maxTaskWorkGroupTotalCount, - maxTaskWorkGroupCount, - maxTaskWorkGroupInvocations, - maxTaskWorkGroupSize, - maxTaskPayloadSize, - maxTaskSharedMemorySize, - maxTaskPayloadAndSharedMemorySize, - maxMeshWorkGroupTotalCount, - maxMeshWorkGroupCount, - maxMeshWorkGroupInvocations, - maxMeshWorkGroupSize, - maxMeshSharedMemorySize, - maxMeshPayloadAndSharedMemorySize, - maxMeshOutputMemorySize, - maxMeshPayloadAndOutputMemorySize, - maxMeshOutputComponents, - maxMeshOutputVertices, - maxMeshOutputPrimitives, - maxMeshOutputLayers, - maxMeshMultiviewViewCount, - meshOutputPerVertexGranularity, - meshOutputPerPrimitiveGranularity, - maxPreferredTaskWorkGroupInvocations, - maxPreferredMeshWorkGroupInvocations, - prefersLocalInvocationVertexOutput, - prefersLocalInvocationPrimitiveOutput, - prefersCompactVertexOutput, - prefersCompactPrimitiveOutput ); + return std::tie( sType, pNext, synchronization2 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMeshShaderPropertiesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceSynchronization2Features const & ) const = default; #else - bool operator==( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTaskWorkGroupTotalCount == rhs.maxTaskWorkGroupTotalCount ) && - ( maxTaskWorkGroupCount == rhs.maxTaskWorkGroupCount ) && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) && - ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) && ( maxTaskPayloadSize == rhs.maxTaskPayloadSize ) && - ( maxTaskSharedMemorySize == rhs.maxTaskSharedMemorySize ) && ( maxTaskPayloadAndSharedMemorySize == rhs.maxTaskPayloadAndSharedMemorySize ) && - ( maxMeshWorkGroupTotalCount == rhs.maxMeshWorkGroupTotalCount ) && ( maxMeshWorkGroupCount == rhs.maxMeshWorkGroupCount ) && - ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) && - ( maxMeshSharedMemorySize == rhs.maxMeshSharedMemorySize ) && ( maxMeshPayloadAndSharedMemorySize == rhs.maxMeshPayloadAndSharedMemorySize ) && - ( maxMeshOutputMemorySize == rhs.maxMeshOutputMemorySize ) && ( maxMeshPayloadAndOutputMemorySize == rhs.maxMeshPayloadAndOutputMemorySize ) && - ( maxMeshOutputComponents == rhs.maxMeshOutputComponents ) && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) && - ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && ( maxMeshOutputLayers == rhs.maxMeshOutputLayers ) && - ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) && - ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ) && - ( maxPreferredTaskWorkGroupInvocations == rhs.maxPreferredTaskWorkGroupInvocations ) && - ( maxPreferredMeshWorkGroupInvocations == rhs.maxPreferredMeshWorkGroupInvocations ) && - ( prefersLocalInvocationVertexOutput == rhs.prefersLocalInvocationVertexOutput ) && - ( prefersLocalInvocationPrimitiveOutput == rhs.prefersLocalInvocationPrimitiveOutput ) && - ( prefersCompactVertexOutput == rhs.prefersCompactVertexOutput ) && ( prefersCompactPrimitiveOutput == rhs.prefersCompactPrimitiveOutput ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( synchronization2 == rhs.synchronization2 ); # endif } - bool operator!=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT; - void * pNext = {}; - uint32_t maxTaskWorkGroupTotalCount = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupCount = {}; - uint32_t maxTaskWorkGroupInvocations = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupSize = {}; - uint32_t maxTaskPayloadSize = {}; - uint32_t maxTaskSharedMemorySize = {}; - uint32_t maxTaskPayloadAndSharedMemorySize = {}; - uint32_t maxMeshWorkGroupTotalCount = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupCount = {}; - uint32_t maxMeshWorkGroupInvocations = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupSize = {}; - uint32_t maxMeshSharedMemorySize = {}; - uint32_t maxMeshPayloadAndSharedMemorySize = {}; - uint32_t maxMeshOutputMemorySize = {}; - uint32_t maxMeshPayloadAndOutputMemorySize = {}; - uint32_t maxMeshOutputComponents = {}; - uint32_t maxMeshOutputVertices = {}; - uint32_t maxMeshOutputPrimitives = {}; - uint32_t maxMeshOutputLayers = {}; - uint32_t maxMeshMultiviewViewCount = {}; - uint32_t meshOutputPerVertexGranularity = {}; - uint32_t meshOutputPerPrimitiveGranularity = {}; - uint32_t maxPreferredTaskWorkGroupInvocations = {}; - uint32_t maxPreferredMeshWorkGroupInvocations = {}; - VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationVertexOutput = {}; - VULKAN_HPP_NAMESPACE::Bool32 prefersLocalInvocationPrimitiveOutput = {}; - VULKAN_HPP_NAMESPACE::Bool32 prefersCompactVertexOutput = {}; - VULKAN_HPP_NAMESPACE::Bool32 prefersCompactPrimitiveOutput = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSynchronization2Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMeshShaderPropertiesEXT; + using Type = PhysicalDeviceSynchronization2Features; }; - struct PhysicalDeviceMeshShaderPropertiesNV + using PhysicalDeviceSynchronization2FeaturesKHR = PhysicalDeviceSynchronization2Features; + + struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT { - using NativeType = VkPhysicalDeviceMeshShaderPropertiesNV; + using NativeType = VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {}, - uint32_t maxTaskWorkGroupInvocations_ = {}, - std::array const & maxTaskWorkGroupSize_ = {}, - uint32_t maxTaskTotalMemorySize_ = {}, - uint32_t maxTaskOutputCount_ = {}, - uint32_t maxMeshWorkGroupInvocations_ = {}, - std::array const & maxMeshWorkGroupSize_ = {}, - uint32_t maxMeshTotalMemorySize_ = {}, - uint32_t maxMeshOutputVertices_ = {}, - uint32_t maxMeshOutputPrimitives_ = {}, - uint32_t maxMeshMultiviewViewCount_ = {}, - uint32_t meshOutputPerVertexGranularity_ = {}, - uint32_t meshOutputPerPrimitiveGranularity_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ) - , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ) - , maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ) - , maxTaskTotalMemorySize( maxTaskTotalMemorySize_ ) - , maxTaskOutputCount( maxTaskOutputCount_ ) - , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ) - , maxMeshWorkGroupSize( maxMeshWorkGroupSize_ ) - , maxMeshTotalMemorySize( maxMeshTotalMemorySize_ ) - , maxMeshOutputVertices( maxMeshOutputVertices_ ) - , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ) - , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ) - , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ) - , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , texelBufferAlignment{ texelBufferAlignment_ } { } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PhysicalDeviceTexelBufferAlignmentFeaturesEXT( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMeshShaderPropertiesNV( *reinterpret_cast( &rhs ) ) + PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTexelBufferAlignmentFeaturesEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceMeshShaderPropertiesNV & operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMeshShaderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & + setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + texelBufferAlignment = texelBufferAlignment_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - maxDrawMeshTasksCount, - maxTaskWorkGroupInvocations, - maxTaskWorkGroupSize, - maxTaskTotalMemorySize, - maxTaskOutputCount, - maxMeshWorkGroupInvocations, - maxMeshWorkGroupSize, - maxMeshTotalMemorySize, - maxMeshOutputVertices, - maxMeshOutputPrimitives, - maxMeshMultiviewViewCount, - meshOutputPerVertexGranularity, - meshOutputPerPrimitiveGranularity ); + return std::tie( sType, pNext, texelBufferAlignment ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const & ) const = default; + auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) && - ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) && ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) && - ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) && ( maxTaskOutputCount == rhs.maxTaskOutputCount ) && - ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) && - ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) && - ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) && - ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) && - ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( texelBufferAlignment == rhs.texelBufferAlignment ); # endif } - bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; - void * pNext = {}; - uint32_t maxDrawMeshTasksCount = {}; - uint32_t maxTaskWorkGroupInvocations = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxTaskWorkGroupSize = {}; - uint32_t maxTaskTotalMemorySize = {}; - uint32_t maxTaskOutputCount = {}; - uint32_t maxMeshWorkGroupInvocations = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxMeshWorkGroupSize = {}; - uint32_t maxMeshTotalMemorySize = {}; - uint32_t maxMeshOutputVertices = {}; - uint32_t maxMeshOutputPrimitives = {}; - uint32_t maxMeshMultiviewViewCount = {}; - uint32_t meshOutputPerVertexGranularity = {}; - uint32_t meshOutputPerPrimitiveGranularity = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMeshShaderPropertiesNV; + using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT; }; - struct PhysicalDeviceMultiDrawFeaturesEXT + struct PhysicalDeviceTexelBufferAlignmentProperties { - using NativeType = VkPhysicalDeviceMultiDrawFeaturesEXT; + using NativeType = VkPhysicalDeviceTexelBufferAlignmentProperties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , multiDraw( multiDraw_ ) - { - } - - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties; - PhysicalDeviceMultiDrawFeaturesEXT( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMultiDrawFeaturesEXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , storageTexelBufferOffsetAlignmentBytes{ storageTexelBufferOffsetAlignmentBytes_ } + , storageTexelBufferOffsetSingleTexelAlignment{ storageTexelBufferOffsetSingleTexelAlignment_ } + , uniformTexelBufferOffsetAlignmentBytes{ uniformTexelBufferOffsetAlignmentBytes_ } + , uniformTexelBufferOffsetSingleTexelAlignment{ uniformTexelBufferOffsetSingleTexelAlignment_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMultiDrawFeaturesEXT & operator=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMultiDrawFeaturesEXT & operator=( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTexelBufferAlignmentProperties( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTexelBufferAlignmentProperties( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + PhysicalDeviceTexelBufferAlignmentProperties & operator=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setMultiDraw( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTexelBufferAlignmentProperties & operator=( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - multiDraw = multiDraw_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceMultiDrawFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTexelBufferAlignmentProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceMultiDrawFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTexelBufferAlignmentProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, multiDraw ); + return std::tie( sType, + pNext, + storageTexelBufferOffsetAlignmentBytes, + storageTexelBufferOffsetSingleTexelAlignment, + uniformTexelBufferOffsetAlignmentBytes, + uniformTexelBufferOffsetSingleTexelAlignment ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMultiDrawFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceTexelBufferAlignmentProperties const & ) const = default; #else - bool operator==( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiDraw == rhs.multiDraw ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) && + ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) && + ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) && + ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ); # endif } - bool operator!=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiDraw = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMultiDrawFeaturesEXT; + using Type = PhysicalDeviceTexelBufferAlignmentProperties; }; - struct PhysicalDeviceMultiDrawPropertiesEXT + using PhysicalDeviceTexelBufferAlignmentPropertiesEXT = PhysicalDeviceTexelBufferAlignmentProperties; + + struct PhysicalDeviceTextureCompressionASTCHDRFeatures { - using NativeType = VkPhysicalDeviceMultiDrawPropertiesEXT; + using NativeType = VkPhysicalDeviceTextureCompressionASTCHDRFeatures; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( uint32_t maxMultiDrawCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxMultiDrawCount( maxMultiDrawCount_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , textureCompressionASTC_HDR{ textureCompressionASTC_HDR_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PhysicalDeviceTextureCompressionASTCHDRFeatures( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMultiDrawPropertiesEXT( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMultiDrawPropertiesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceTextureCompressionASTCHDRFeatures( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTextureCompressionASTCHDRFeatures( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMultiDrawPropertiesEXT & operator=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceMultiDrawPropertiesEXT & operator=( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMultiDrawPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceMultiDrawPropertiesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & + setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + textureCompressionASTC_HDR = textureCompressionASTC_HDR_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxMultiDrawCount ); + return std::tie( sType, pNext, textureCompressionASTC_HDR ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMultiDrawPropertiesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxMultiDrawCount == rhs.maxMultiDrawCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ); # endif } - bool operator!=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT; - void * pNext = {}; - uint32_t maxMultiDrawCount = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMultiDrawPropertiesEXT; + using Type = PhysicalDeviceTextureCompressionASTCHDRFeatures; }; - struct PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT + using PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT = PhysicalDeviceTextureCompressionASTCHDRFeatures; + + struct PhysicalDeviceTilePropertiesFeaturesQCOM { - using NativeType = VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + using NativeType = VkPhysicalDeviceTilePropertiesFeaturesQCOM; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , multisampledRenderToSingleSampled( multisampledRenderToSingleSampled_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTilePropertiesFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 tileProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , tileProperties{ tileProperties_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) - VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceTilePropertiesFeaturesQCOM( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( - *reinterpret_cast( &rhs ) ) + PhysicalDeviceTilePropertiesFeaturesQCOM( VkPhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTilePropertiesFeaturesQCOM( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & - operator=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceTilePropertiesFeaturesQCOM & operator=( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & - operator=( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTilePropertiesFeaturesQCOM & operator=( VkPhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTilePropertiesFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & - setMultisampledRenderToSingleSampled( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTilePropertiesFeaturesQCOM & setTileProperties( VULKAN_HPP_NAMESPACE::Bool32 tileProperties_ ) VULKAN_HPP_NOEXCEPT { - multisampledRenderToSingleSampled = multisampledRenderToSingleSampled_; + tileProperties = tileProperties_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTilePropertiesFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTilePropertiesFeaturesQCOM &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -57512,787 +97680,867 @@ namespace VULKAN_HPP_NAMESPACE # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, multisampledRenderToSingleSampled ); + return std::tie( sType, pNext, tileProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceTilePropertiesFeaturesQCOM const & ) const = default; #else - bool operator==( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multisampledRenderToSingleSampled == rhs.multisampledRenderToSingleSampled ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tileProperties == rhs.tileProperties ); # endif } - bool operator!=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 tileProperties = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + using Type = PhysicalDeviceTilePropertiesFeaturesQCOM; }; - struct PhysicalDeviceMultiviewFeatures + struct PhysicalDeviceTimelineSemaphoreFeatures { - using NativeType = VkPhysicalDeviceMultiviewFeatures; + using NativeType = VkPhysicalDeviceTimelineSemaphoreFeatures; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , multiview( multiview_ ) - , multiviewGeometryShader( multiviewGeometryShader_ ) - , multiviewTessellationShader( multiviewTessellationShader_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , timelineSemaphore{ timelineSemaphore_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMultiviewFeatures( *reinterpret_cast( &rhs ) ) + PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTimelineSemaphoreFeatures( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceTimelineSemaphoreFeatures & operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTimelineSemaphoreFeatures & operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT - { - multiview = multiview_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & - setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT - { - multiviewGeometryShader = multiviewGeometryShader_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & - setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & + setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT { - multiviewTessellationShader = multiviewTessellationShader_; + timelineSemaphore = timelineSemaphore_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, multiview, multiviewGeometryShader, multiviewTessellationShader ); + return std::tie( sType, pNext, timelineSemaphore ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMultiviewFeatures const & ) const = default; + auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiview == rhs.multiview ) && ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && - ( multiviewTessellationShader == rhs.multiviewTessellationShader ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timelineSemaphore == rhs.timelineSemaphore ); # endif } - bool operator!=( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMultiviewFeatures; + using Type = PhysicalDeviceTimelineSemaphoreFeatures; }; - using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; - struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX + using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures; + + struct PhysicalDeviceTimelineSemaphoreProperties { - using NativeType = VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + using NativeType = VkPhysicalDeviceTimelineSemaphoreProperties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , perViewPositionAllComponents( perViewPositionAllComponents_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxTimelineSemaphoreValueDifference{ maxTimelineSemaphoreValueDifference_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( *reinterpret_cast( &rhs ) ) + PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTimelineSemaphoreProperties( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & - operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceTimelineSemaphoreProperties & operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTimelineSemaphoreProperties & operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTimelineSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, perViewPositionAllComponents ); + return std::tie( sType, pNext, maxTimelineSemaphoreValueDifference ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & ) const = default; + auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const & ) const = default; #else - bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ); # endif } - bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; + void * pNext = {}; + uint64_t maxTimelineSemaphoreValueDifference = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + using Type = PhysicalDeviceTimelineSemaphoreProperties; }; - struct PhysicalDeviceMultiviewProperties + using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties; + + struct PhysicalDeviceToolProperties { - using NativeType = VkPhysicalDeviceMultiviewProperties; + using NativeType = VkPhysicalDeviceToolProperties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewProperties; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolProperties; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {}, - uint32_t maxMultiviewInstanceIndex_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxMultiviewViewCount( maxMultiviewViewCount_ ) - , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( std::array const & name_ = {}, + std::array const & version_ = {}, + VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes_ = {}, + std::array const & description_ = {}, + std::array const & layer_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , name{ name_ } + , version{ version_ } + , purposes{ purposes_ } + , description{ description_ } + , layer{ layer_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMultiviewProperties( *reinterpret_cast( &rhs ) ) + PhysicalDeviceToolProperties( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceToolProperties( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceToolProperties & operator=( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceToolProperties & operator=( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceToolProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceToolProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ToolPurposeFlags const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxMultiviewViewCount, maxMultiviewInstanceIndex ); + return std::tie( sType, pNext, name, version, purposes, description, layer ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMultiviewProperties const & ) const = default; -#else - bool operator==( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && - ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( version, rhs.version ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = purposes <=> rhs.purposes; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( layer, rhs.layer ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; } +#endif - bool operator!=( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( name, rhs.name ) == 0 ) && ( strcmp( version, rhs.version ) == 0 ) && + ( purposes == rhs.purposes ) && ( strcmp( description, rhs.description ) == 0 ) && ( strcmp( layer, rhs.layer ) == 0 ); + } + + bool operator!=( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties; - void * pNext = {}; - uint32_t maxMultiviewViewCount = {}; - uint32_t maxMultiviewInstanceIndex = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D version = {}; + VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D layer = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMultiviewProperties; + using Type = PhysicalDeviceToolProperties; }; - using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; - struct PhysicalDeviceMutableDescriptorTypeFeaturesEXT + using PhysicalDeviceToolPropertiesEXT = PhysicalDeviceToolProperties; + + struct PhysicalDeviceTransformFeedbackFeaturesEXT { - using NativeType = VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT; + using NativeType = VkPhysicalDeviceTransformFeedbackFeaturesEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , mutableDescriptorType( mutableDescriptorType_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , transformFeedback{ transformFeedback_ } + , geometryStreams{ geometryStreams_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceMutableDescriptorTypeFeaturesEXT( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMutableDescriptorTypeFeaturesEXT( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMutableDescriptorTypeFeaturesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTransformFeedbackFeaturesEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT & - setMutableDescriptorType( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & + setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT { - mutableDescriptorType = mutableDescriptorType_; + transformFeedback = transformFeedback_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + geometryStreams = geometryStreams_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, mutableDescriptorType ); + return std::tie( sType, pNext, transformFeedback, geometryStreams ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorType == rhs.mutableDescriptorType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transformFeedback == rhs.transformFeedback ) && ( geometryStreams == rhs.geometryStreams ); # endif } - bool operator!=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMutableDescriptorTypeFeaturesEXT; + using Type = PhysicalDeviceTransformFeedbackFeaturesEXT; }; - using PhysicalDeviceMutableDescriptorTypeFeaturesVALVE = PhysicalDeviceMutableDescriptorTypeFeaturesEXT; - struct PhysicalDeviceNonSeamlessCubeMapFeaturesEXT + struct PhysicalDeviceTransformFeedbackPropertiesEXT { - using NativeType = VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + using NativeType = VkPhysicalDeviceTransformFeedbackPropertiesEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , nonSeamlessCubeMap( nonSeamlessCubeMap_ ) - { - } - - VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; - PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( uint32_t maxTransformFeedbackStreams_ = {}, + uint32_t maxTransformFeedbackBuffers_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, + uint32_t maxTransformFeedbackStreamDataSize_ = {}, + uint32_t maxTransformFeedbackBufferDataSize_ = {}, + uint32_t maxTransformFeedbackBufferDataStride_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxTransformFeedbackStreams{ maxTransformFeedbackStreams_ } + , maxTransformFeedbackBuffers{ maxTransformFeedbackBuffers_ } + , maxTransformFeedbackBufferSize{ maxTransformFeedbackBufferSize_ } + , maxTransformFeedbackStreamDataSize{ maxTransformFeedbackStreamDataSize_ } + , maxTransformFeedbackBufferDataSize{ maxTransformFeedbackBufferDataSize_ } + , maxTransformFeedbackBufferDataStride{ maxTransformFeedbackBufferDataStride_ } + , transformFeedbackQueries{ transformFeedbackQueries_ } + , transformFeedbackStreamsLinesTriangles{ transformFeedbackStreamsLinesTriangles_ } + , transformFeedbackRasterizationStreamSelect{ transformFeedbackRasterizationStreamSelect_ } + , transformFeedbackDraw{ transformFeedbackDraw_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceTransformFeedbackPropertiesEXT( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & - setNonSeamlessCubeMap( VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - nonSeamlessCubeMap = nonSeamlessCubeMap_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, nonSeamlessCubeMap ); + return std::tie( sType, + pNext, + maxTransformFeedbackStreams, + maxTransformFeedbackBuffers, + maxTransformFeedbackBufferSize, + maxTransformFeedbackStreamDataSize, + maxTransformFeedbackBufferDataSize, + maxTransformFeedbackBufferDataStride, + transformFeedbackQueries, + transformFeedbackStreamsLinesTriangles, + transformFeedbackRasterizationStreamSelect, + transformFeedbackDraw ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nonSeamlessCubeMap == rhs.nonSeamlessCubeMap ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams ) && + ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers ) && ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize ) && + ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize ) && + ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize ) && + ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride ) && + ( transformFeedbackQueries == rhs.transformFeedbackQueries ) && + ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles ) && + ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect ) && + ( transformFeedbackDraw == rhs.transformFeedbackDraw ); # endif } - bool operator!=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; + void * pNext = {}; + uint32_t maxTransformFeedbackStreams = {}; + uint32_t maxTransformFeedbackBuffers = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {}; + uint32_t maxTransformFeedbackStreamDataSize = {}; + uint32_t maxTransformFeedbackBufferDataSize = {}; + uint32_t maxTransformFeedbackBufferDataStride = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + using Type = PhysicalDeviceTransformFeedbackPropertiesEXT; }; - struct PhysicalDevicePCIBusInfoPropertiesEXT + struct PhysicalDeviceUniformBufferStandardLayoutFeatures { - using NativeType = VkPhysicalDevicePCIBusInfoPropertiesEXT; + using NativeType = VkPhysicalDeviceUniformBufferStandardLayoutFeatures; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( - uint32_t pciDomain_ = {}, uint32_t pciBus_ = {}, uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pciDomain( pciDomain_ ) - , pciBus( pciBus_ ) - , pciDevice( pciDevice_ ) - , pciFunction( pciFunction_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , uniformBufferStandardLayout{ uniformBufferStandardLayout_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PhysicalDeviceUniformBufferStandardLayoutFeatures( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePCIBusInfoPropertiesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceUniformBufferStandardLayoutFeatures( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceUniformBufferStandardLayoutFeatures & + operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & + setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + uniformBufferStandardLayout = uniformBufferStandardLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pciDomain, pciBus, pciDevice, pciFunction ); + return std::tie( sType, pNext, uniformBufferStandardLayout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const & ) const = default; #else - bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pciDomain == rhs.pciDomain ) && ( pciBus == rhs.pciBus ) && ( pciDevice == rhs.pciDevice ) && - ( pciFunction == rhs.pciFunction ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ); # endif } - bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; - void * pNext = {}; - uint32_t pciDomain = {}; - uint32_t pciBus = {}; - uint32_t pciDevice = {}; - uint32_t pciFunction = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePCIBusInfoPropertiesEXT; + using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures; }; - struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT + using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures; + + struct PhysicalDeviceVariablePointersFeatures { - using NativeType = VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + using NativeType = VkPhysicalDeviceVariablePointersFeatures; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pageableDeviceLocalMemory( pageableDeviceLocalMemory_ ) + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVariablePointersFeatures; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , variablePointersStorageBuffer{ variablePointersStorageBuffer_ } + , variablePointers{ variablePointers_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVariablePointersFeatures( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & - operator=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & operator=( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVariablePointersFeatures & operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & - setPageableDeviceLocalMemory( VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & + setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT { - pageableDeviceLocalMemory = pageableDeviceLocalMemory_; + variablePointersStorageBuffer = variablePointersStorageBuffer_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + variablePointers = variablePointers_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVariablePointersFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pageableDeviceLocalMemory ); + return std::tie( sType, pNext, variablePointersStorageBuffer, variablePointers ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceVariablePointersFeatures const & ) const = default; #else - bool operator==( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pageableDeviceLocalMemory == rhs.pageableDeviceLocalMemory ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && + ( variablePointers == rhs.variablePointers ); # endif } - bool operator!=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + using Type = PhysicalDeviceVariablePointersFeatures; }; - struct PhysicalDevicePerformanceQueryFeaturesKHR + using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + + struct PhysicalDeviceVertexAttributeDivisorFeatures { - using NativeType = VkPhysicalDevicePerformanceQueryFeaturesKHR; + using NativeType = VkPhysicalDeviceVertexAttributeDivisorFeatures; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeatures; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , performanceCounterQueryPools( performanceCounterQueryPools_ ) - , performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeatures( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexAttributeInstanceRateDivisor{ vertexAttributeInstanceRateDivisor_ } + , vertexAttributeInstanceRateZeroDivisor{ vertexAttributeInstanceRateZeroDivisor_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeatures( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePerformanceQueryFeaturesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceVertexAttributeDivisorFeatures( VkPhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeDivisorFeatures( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePerformanceQueryFeaturesKHR & operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVertexAttributeDivisorFeatures & operator=( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorFeatures & operator=( VkPhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & - setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeatures & + setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT { - performanceCounterQueryPools = performanceCounterQueryPools_; + vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & - setPerformanceCounterMultipleQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeatures & + setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT { - performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_; + vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVertexAttributeDivisorFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVertexAttributeDivisorFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -58303,282 +98551,275 @@ namespace VULKAN_HPP_NAMESPACE # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, performanceCounterQueryPools, performanceCounterMultipleQueryPools ); + return std::tie( sType, pNext, vertexAttributeInstanceRateDivisor, vertexAttributeInstanceRateZeroDivisor ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeatures const & ) const = default; #else - bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performanceCounterQueryPools == rhs.performanceCounterQueryPools ) && - ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) && + ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor ); # endif } - bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {}; - VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePerformanceQueryFeaturesKHR; + using Type = PhysicalDeviceVertexAttributeDivisorFeatures; }; - struct PhysicalDevicePerformanceQueryPropertiesKHR + using PhysicalDeviceVertexAttributeDivisorFeaturesEXT = PhysicalDeviceVertexAttributeDivisorFeatures; + using PhysicalDeviceVertexAttributeDivisorFeaturesKHR = PhysicalDeviceVertexAttributeDivisorFeatures; + + struct PhysicalDeviceVertexAttributeDivisorProperties { - using NativeType = VkPhysicalDevicePerformanceQueryPropertiesKHR; + using NativeType = VkPhysicalDeviceVertexAttributeDivisorProperties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorProperties; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorProperties( uint32_t maxVertexAttribDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxVertexAttribDivisor{ maxVertexAttribDivisor_ } + , supportsNonZeroFirstInstance{ supportsNonZeroFirstInstance_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVertexAttributeDivisorProperties( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePerformanceQueryPropertiesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceVertexAttributeDivisorProperties( VkPhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeDivisorProperties( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePerformanceQueryPropertiesKHR & operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVertexAttributeDivisorProperties & operator=( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorProperties & operator=( VkPhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVertexAttributeDivisorProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVertexAttributeDivisorProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, allowCommandBufferQueryCopies ); + return std::tie( sType, pNext, maxVertexAttribDivisor, supportsNonZeroFirstInstance ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceVertexAttributeDivisorProperties const & ) const = default; #else - bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ) && + ( supportsNonZeroFirstInstance == rhs.supportsNonZeroFirstInstance ); # endif } - bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorProperties; + void * pNext = {}; + uint32_t maxVertexAttribDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePerformanceQueryPropertiesKHR; + using Type = PhysicalDeviceVertexAttributeDivisorProperties; }; - struct PhysicalDevicePipelineCreationCacheControlFeatures + using PhysicalDeviceVertexAttributeDivisorPropertiesKHR = PhysicalDeviceVertexAttributeDivisorProperties; + + struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT { - using NativeType = VkPhysicalDevicePipelineCreationCacheControlFeatures; + using NativeType = VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelineCreationCacheControl( pipelineCreationCacheControl_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxVertexAttribDivisor{ maxVertexAttribDivisor_ } { } VULKAN_HPP_CONSTEXPR - PhysicalDevicePipelineCreationCacheControlFeatures( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDevicePipelineCreationCacheControlFeatures( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePipelineCreationCacheControlFeatures( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDevicePipelineCreationCacheControlFeatures & - operator=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePipelineCreationCacheControlFeatures & operator=( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeDivisorPropertiesEXT( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + PhysicalDeviceVertexAttributeDivisorPropertiesEXT & + operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & - setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - pipelineCreationCacheControl = pipelineCreationCacheControl_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDevicePipelineCreationCacheControlFeatures const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevicePipelineCreationCacheControlFeatures &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pipelineCreationCacheControl ); + return std::tie( sType, pNext, maxVertexAttribDivisor ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeatures const & ) const = default; + auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & ) const = default; #else - bool operator==( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ); # endif } - bool operator!=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; + void * pNext = {}; + uint32_t maxVertexAttribDivisor = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePipelineCreationCacheControlFeatures; + using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT; }; - using PhysicalDevicePipelineCreationCacheControlFeaturesEXT = PhysicalDevicePipelineCreationCacheControlFeatures; - struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR + struct PhysicalDeviceVertexAttributeRobustnessFeaturesEXT { - using NativeType = VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + using NativeType = VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelineExecutableInfo( pipelineExecutableInfo_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeRobustness_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexAttributeRobustness{ vertexAttributeRobustness_ } { } VULKAN_HPP_CONSTEXPR - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & - operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & + operator=( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & operator=( VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & - setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & + setVertexAttributeRobustness( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeRobustness_ ) VULKAN_HPP_NOEXCEPT { - pipelineExecutableInfo = pipelineExecutableInfo_; + vertexAttributeRobustness = vertexAttributeRobustness_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -58589,94 +98830,95 @@ namespace VULKAN_HPP_NAMESPACE # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pipelineExecutableInfo ); + return std::tie( sType, pNext, vertexAttributeRobustness ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineExecutableInfo == rhs.pipelineExecutableInfo ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexAttributeRobustness == rhs.vertexAttributeRobustness ); # endif } - bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeRobustness = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + using Type = PhysicalDeviceVertexAttributeRobustnessFeaturesEXT; }; - struct PhysicalDevicePipelinePropertiesFeaturesEXT + struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT { - using NativeType = VkPhysicalDevicePipelinePropertiesFeaturesEXT; + using NativeType = VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelinePropertiesIdentifier( pipelinePropertiesIdentifier_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexInputDynamicState{ vertexInputDynamicState_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVertexInputDynamicStateFeaturesEXT( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePipelinePropertiesFeaturesEXT( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePipelinePropertiesFeaturesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexInputDynamicStateFeaturesEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT & - setPipelinePropertiesIdentifier( VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & + setVertexInputDynamicState( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ ) VULKAN_HPP_NOEXCEPT { - pipelinePropertiesIdentifier = pipelinePropertiesIdentifier_; + vertexInputDynamicState = vertexInputDynamicState_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDevicePipelinePropertiesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevicePipelinePropertiesFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -58687,94 +98929,93 @@ namespace VULKAN_HPP_NAMESPACE # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pipelinePropertiesIdentifier ); + return std::tie( sType, pNext, vertexInputDynamicState ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePipelinePropertiesFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelinePropertiesIdentifier == rhs.pipelinePropertiesIdentifier ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexInputDynamicState == rhs.vertexInputDynamicState ); # endif } - bool operator!=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePipelinePropertiesFeaturesEXT; + using Type = PhysicalDeviceVertexInputDynamicStateFeaturesEXT; }; - struct PhysicalDevicePipelineRobustnessFeaturesEXT + struct PhysicalDeviceVideoEncodeAV1FeaturesKHR { - using NativeType = VkPhysicalDevicePipelineRobustnessFeaturesEXT; + using NativeType = VkPhysicalDeviceVideoEncodeAV1FeaturesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoEncodeAv1FeaturesKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelineRobustness( pipelineRobustness_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeAV1FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 videoEncodeAV1_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoEncodeAV1{ videoEncodeAV1_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeaturesEXT( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeAV1FeaturesKHR( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePipelineRobustnessFeaturesEXT( VkPhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePipelineRobustnessFeaturesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceVideoEncodeAV1FeaturesKHR( VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoEncodeAV1FeaturesKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePipelineRobustnessFeaturesEXT & operator=( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVideoEncodeAV1FeaturesKHR & operator=( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDevicePipelineRobustnessFeaturesEXT & operator=( VkPhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVideoEncodeAV1FeaturesKHR & operator=( VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeAV1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeaturesEXT & - setPipelineRobustness( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeAV1FeaturesKHR & setVideoEncodeAV1( VULKAN_HPP_NAMESPACE::Bool32 videoEncodeAV1_ ) VULKAN_HPP_NOEXCEPT { - pipelineRobustness = pipelineRobustness_; + videoEncodeAV1 = videoEncodeAV1_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDevicePipelineRobustnessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevicePipelineRobustnessFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVideoEncodeAV1FeaturesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -58785,662 +99026,530 @@ namespace VULKAN_HPP_NAMESPACE # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pipelineRobustness ); + return std::tie( sType, pNext, videoEncodeAV1 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePipelineRobustnessFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & ) const = default; #else - bool operator==( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineRobustness == rhs.pipelineRobustness ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoEncodeAV1 == rhs.videoEncodeAV1 ); # endif } - bool operator!=( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoEncodeAv1FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 videoEncodeAV1 = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePipelineRobustnessFeaturesEXT; + using Type = PhysicalDeviceVideoEncodeAV1FeaturesKHR; }; - struct PhysicalDevicePipelineRobustnessPropertiesEXT + struct VideoProfileInfoKHR { - using NativeType = VkPhysicalDevicePipelineRobustnessPropertiesEXT; + using NativeType = VkVideoProfileInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessPropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PhysicalDevicePipelineRobustnessPropertiesEXT( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessStorageBuffers_ = - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessUniformBuffers_ = - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessVertexInputs_ = - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, - VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT defaultRobustnessImages_ = - VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , defaultRobustnessStorageBuffers( defaultRobustnessStorageBuffers_ ) - , defaultRobustnessUniformBuffers( defaultRobustnessUniformBuffers_ ) - , defaultRobustnessVertexInputs( defaultRobustnessVertexInputs_ ) - , defaultRobustnessImages( defaultRobustnessImages_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoProfileInfoKHR( + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eNone, + VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ = {}, + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ = {}, + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoCodecOperation{ videoCodecOperation_ } + , chromaSubsampling{ chromaSubsampling_ } + , lumaBitDepth{ lumaBitDepth_ } + , chromaBitDepth{ chromaBitDepth_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDevicePipelineRobustnessPropertiesEXT( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoProfileInfoKHR( VideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePipelineRobustnessPropertiesEXT( VkPhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePipelineRobustnessPropertiesEXT( *reinterpret_cast( &rhs ) ) + VideoProfileInfoKHR( VkVideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoProfileInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePipelineRobustnessPropertiesEXT & operator=( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoProfileInfoKHR & operator=( VideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDevicePipelineRobustnessPropertiesEXT & operator=( VkPhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoProfileInfoKHR & operator=( VkVideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDevicePipelineRobustnessPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevicePipelineRobustnessPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, defaultRobustnessStorageBuffers, defaultRobustnessUniformBuffers, defaultRobustnessVertexInputs, defaultRobustnessImages ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePipelineRobustnessPropertiesEXT const & ) const = default; -#else - bool operator==( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( defaultRobustnessStorageBuffers == rhs.defaultRobustnessStorageBuffers ) && - ( defaultRobustnessUniformBuffers == rhs.defaultRobustnessUniformBuffers ) && - ( defaultRobustnessVertexInputs == rhs.defaultRobustnessVertexInputs ) && ( defaultRobustnessImages == rhs.defaultRobustnessImages ); -# endif + pNext = pNext_; + return *this; } - bool operator!=( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & + setVideoCodecOperation( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + videoCodecOperation = videoCodecOperation_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessPropertiesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessStorageBuffers = - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessUniformBuffers = - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessVertexInputs = - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; - VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT defaultRobustnessImages = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault; - }; - - template <> - struct CppType - { - using Type = PhysicalDevicePipelineRobustnessPropertiesEXT; - }; - - struct PhysicalDevicePointClippingProperties - { - using NativeType = VkPhysicalDevicePointClippingProperties; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePointClippingProperties; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( - VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pointClippingBehavior( pointClippingBehavior_ ) + VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & + setChromaSubsampling( VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ ) VULKAN_HPP_NOEXCEPT { + chromaSubsampling = chromaSubsampling_; + return *this; } - VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePointClippingProperties( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setLumaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ ) VULKAN_HPP_NOEXCEPT { + lumaBitDepth = lumaBitDepth_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePointClippingProperties & operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setChromaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + chromaBitDepth = chromaBitDepth_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT + operator VkVideoProfileInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pointClippingBehavior ); + return std::tie( sType, pNext, videoCodecOperation, chromaSubsampling, lumaBitDepth, chromaBitDepth ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePointClippingProperties const & ) const = default; + auto operator<=>( VideoProfileInfoKHR const & ) const = default; #else - bool operator==( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pointClippingBehavior == rhs.pointClippingBehavior ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperation == rhs.videoCodecOperation ) && + ( chromaSubsampling == rhs.chromaSubsampling ) && ( lumaBitDepth == rhs.lumaBitDepth ) && ( chromaBitDepth == rhs.chromaBitDepth ); # endif } - bool operator!=( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eNone; + VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling = {}; + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth = {}; + VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePointClippingProperties; + using Type = VideoProfileInfoKHR; }; - using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct PhysicalDevicePortabilitySubsetFeaturesKHR + struct PhysicalDeviceVideoEncodeQualityLevelInfoKHR { - using NativeType = VkPhysicalDevicePortabilitySubsetFeaturesKHR; + using NativeType = VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoEncodeQualityLevelInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 events_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , constantAlphaColorBlendFactors( constantAlphaColorBlendFactors_ ) - , events( events_ ) - , imageViewFormatReinterpretation( imageViewFormatReinterpretation_ ) - , imageViewFormatSwizzle( imageViewFormatSwizzle_ ) - , imageView2DOn3DImage( imageView2DOn3DImage_ ) - , multisampleArrayImage( multisampleArrayImage_ ) - , mutableComparisonSamplers( mutableComparisonSamplers_ ) - , pointPolygons( pointPolygons_ ) - , samplerMipLodBias( samplerMipLodBias_ ) - , separateStencilMaskRef( separateStencilMaskRef_ ) - , shaderSampleRateInterpolationFunctions( shaderSampleRateInterpolationFunctions_ ) - , tessellationIsolines( tessellationIsolines_ ) - , tessellationPointMode( tessellationPointMode_ ) - , triangleFans( triangleFans_ ) - , vertexAttributeAccessBeyondStride( vertexAttributeAccessBeyondStride_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeQualityLevelInfoKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ = {}, + uint32_t qualityLevel_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pVideoProfile{ pVideoProfile_ } + , qualityLevel{ qualityLevel_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeQualityLevelInfoKHR( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePortabilitySubsetFeaturesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceVideoEncodeQualityLevelInfoKHR( VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoEncodeQualityLevelInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVideoEncodeQualityLevelInfoKHR & operator=( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVideoEncodeQualityLevelInfoKHR & operator=( VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQualityLevelInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & - setConstantAlphaColorBlendFactors( VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT - { - constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setEvents( VULKAN_HPP_NAMESPACE::Bool32 events_ ) VULKAN_HPP_NOEXCEPT - { - events = events_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & - setImageViewFormatReinterpretation( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT - { - imageViewFormatReinterpretation = imageViewFormatReinterpretation_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & - setImageViewFormatSwizzle( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQualityLevelInfoKHR & + setPVideoProfile( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ ) VULKAN_HPP_NOEXCEPT { - imageViewFormatSwizzle = imageViewFormatSwizzle_; + pVideoProfile = pVideoProfile_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & - setImageView2DOn3DImage( VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQualityLevelInfoKHR & setQualityLevel( uint32_t qualityLevel_ ) VULKAN_HPP_NOEXCEPT { - imageView2DOn3DImage = imageView2DOn3DImage_; + qualityLevel = qualityLevel_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & - setMultisampleArrayImage( VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - multisampleArrayImage = multisampleArrayImage_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & - setMutableComparisonSamplers( VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR &() VULKAN_HPP_NOEXCEPT { - mutableComparisonSamplers = mutableComparisonSamplers_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPointPolygons( VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - pointPolygons = pointPolygons_; - return *this; + return std::tie( sType, pNext, pVideoProfile, qualityLevel ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & - setSamplerMipLodBias( VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - samplerMipLodBias = samplerMipLodBias_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVideoProfile == rhs.pVideoProfile ) && ( qualityLevel == rhs.qualityLevel ); +# endif } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & - setSeparateStencilMaskRef( VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - separateStencilMaskRef = separateStencilMaskRef_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & - setShaderSampleRateInterpolationFunctions( VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoEncodeQualityLevelInfoKHR; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile = {}; + uint32_t qualityLevel = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVideoEncodeQualityLevelInfoKHR; + }; + + struct PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR + { + using NativeType = VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 videoEncodeQuantizationMap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoEncodeQuantizationMap{ videoEncodeQuantizationMap_ } { - shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & - setTessellationIsolines( VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( *reinterpret_cast( &rhs ) ) { - tessellationIsolines = tessellationIsolines_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & - setTessellationPointMode( VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & + operator=( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & operator=( VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - tessellationPointMode = tessellationPointMode_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTriangleFans( VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - triangleFans = triangleFans_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & - setVertexAttributeAccessBeyondStride( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & + setVideoEncodeQuantizationMap( VULKAN_HPP_NAMESPACE::Bool32 videoEncodeQuantizationMap_ ) VULKAN_HPP_NOEXCEPT { - vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_; + videoEncodeQuantizationMap = videoEncodeQuantizationMap_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - constantAlphaColorBlendFactors, - events, - imageViewFormatReinterpretation, - imageViewFormatSwizzle, - imageView2DOn3DImage, - multisampleArrayImage, - mutableComparisonSamplers, - pointPolygons, - samplerMipLodBias, - separateStencilMaskRef, - shaderSampleRateInterpolationFunctions, - tessellationIsolines, - tessellationPointMode, - triangleFans, - vertexAttributeAccessBeyondStride ); + return std::tie( sType, pNext, videoEncodeQuantizationMap ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePortabilitySubsetFeaturesKHR const & ) const = default; -# else - bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors ) && - ( events == rhs.events ) && ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation ) && - ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle ) && ( imageView2DOn3DImage == rhs.imageView2DOn3DImage ) && - ( multisampleArrayImage == rhs.multisampleArrayImage ) && ( mutableComparisonSamplers == rhs.mutableComparisonSamplers ) && - ( pointPolygons == rhs.pointPolygons ) && ( samplerMipLodBias == rhs.samplerMipLodBias ) && - ( separateStencilMaskRef == rhs.separateStencilMaskRef ) && - ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions ) && ( tessellationIsolines == rhs.tessellationIsolines ) && - ( tessellationPointMode == rhs.tessellationPointMode ) && ( triangleFans == rhs.triangleFans ) && - ( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoEncodeQuantizationMap == rhs.videoEncodeQuantizationMap ); +# endif } - bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors = {}; - VULKAN_HPP_NAMESPACE::Bool32 events = {}; - VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation = {}; - VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle = {}; - VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage = {}; - VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage = {}; - VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers = {}; - VULKAN_HPP_NAMESPACE::Bool32 pointPolygons = {}; - VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias = {}; - VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions = {}; - VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines = {}; - VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode = {}; - VULKAN_HPP_NAMESPACE::Bool32 triangleFans = {}; - VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 videoEncodeQuantizationMap = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePortabilitySubsetFeaturesKHR; + using Type = PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct PhysicalDevicePortabilitySubsetPropertiesKHR + struct PhysicalDeviceVideoFormatInfoKHR { - using NativeType = VkPhysicalDevicePortabilitySubsetPropertiesKHR; + using NativeType = VkPhysicalDeviceVideoFormatInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoFormatInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( uint32_t minVertexInputBindingStrideAlignment_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minVertexInputBindingStrideAlignment( minVertexInputBindingStrideAlignment_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageUsage{ imageUsage_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePortabilitySubsetPropertiesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceVideoFormatInfoKHR( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoFormatInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVideoFormatInfoKHR & operator=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVideoFormatInfoKHR & operator=( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & - setMinVertexInputBindingStrideAlignment( uint32_t minVertexInputBindingStrideAlignment_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT { - minVertexInputBindingStrideAlignment = minVertexInputBindingStrideAlignment_; + imageUsage = imageUsage_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVideoFormatInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVideoFormatInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, minVertexInputBindingStrideAlignment ); + return std::tie( sType, pNext, imageUsage ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePortabilitySubsetPropertiesKHR const & ) const = default; -# else - bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVideoFormatInfoKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageUsage == rhs.imageUsage ); +# endif } - bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; - void * pNext = {}; - uint32_t minVertexInputBindingStrideAlignment = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoFormatInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePortabilitySubsetPropertiesKHR; + using Type = PhysicalDeviceVideoFormatInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct PhysicalDevicePresentIdFeaturesKHR + struct PhysicalDeviceVideoMaintenance1FeaturesKHR { - using NativeType = VkPhysicalDevicePresentIdFeaturesKHR; + using NativeType = VkPhysicalDeviceVideoMaintenance1FeaturesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentIdFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoMaintenance1FeaturesKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 presentId_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , presentId( presentId_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance1FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance1_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoMaintenance1{ videoMaintenance1_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance1FeaturesKHR( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePresentIdFeaturesKHR( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePresentIdFeaturesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceVideoMaintenance1FeaturesKHR( VkPhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoMaintenance1FeaturesKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePresentIdFeaturesKHR & operator=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVideoMaintenance1FeaturesKHR & operator=( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDevicePresentIdFeaturesKHR & operator=( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVideoMaintenance1FeaturesKHR & operator=( VkPhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPresentId( VULKAN_HPP_NAMESPACE::Bool32 presentId_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance1FeaturesKHR & + setVideoMaintenance1( VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance1_ ) VULKAN_HPP_NOEXCEPT { - presentId = presentId_; + videoMaintenance1 = videoMaintenance1_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDevicePresentIdFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVideoMaintenance1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevicePresentIdFeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVideoMaintenance1FeaturesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -59451,92 +99560,94 @@ namespace VULKAN_HPP_NAMESPACE # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, presentId ); + return std::tie( sType, pNext, videoMaintenance1 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePresentIdFeaturesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceVideoMaintenance1FeaturesKHR const & ) const = default; #else - bool operator==( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentId == rhs.presentId ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoMaintenance1 == rhs.videoMaintenance1 ); # endif } - bool operator!=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVideoMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentIdFeaturesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 presentId = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoMaintenance1FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance1 = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePresentIdFeaturesKHR; + using Type = PhysicalDeviceVideoMaintenance1FeaturesKHR; }; - struct PhysicalDevicePresentWaitFeaturesKHR + struct PhysicalDeviceVideoMaintenance2FeaturesKHR { - using NativeType = VkPhysicalDevicePresentWaitFeaturesKHR; + using NativeType = VkPhysicalDeviceVideoMaintenance2FeaturesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoMaintenance2FeaturesKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , presentWait( presentWait_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance2FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance2_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoMaintenance2{ videoMaintenance2_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance2FeaturesKHR( PhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePresentWaitFeaturesKHR( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePresentWaitFeaturesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceVideoMaintenance2FeaturesKHR( VkPhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoMaintenance2FeaturesKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePresentWaitFeaturesKHR & operator=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVideoMaintenance2FeaturesKHR & operator=( PhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDevicePresentWaitFeaturesKHR & operator=( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVideoMaintenance2FeaturesKHR & operator=( VkPhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance2FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPresentWait( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoMaintenance2FeaturesKHR & + setVideoMaintenance2( VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance2_ ) VULKAN_HPP_NOEXCEPT { - presentWait = presentWait_; + videoMaintenance2 = videoMaintenance2_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDevicePresentWaitFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVideoMaintenance2FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevicePresentWaitFeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVideoMaintenance2FeaturesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -59547,225 +99658,342 @@ namespace VULKAN_HPP_NAMESPACE # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, presentWait ); + return std::tie( sType, pNext, videoMaintenance2 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePresentWaitFeaturesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceVideoMaintenance2FeaturesKHR const & ) const = default; #else - bool operator==( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentWait == rhs.presentWait ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoMaintenance2 == rhs.videoMaintenance2 ); # endif } - bool operator!=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVideoMaintenance2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 presentWait = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoMaintenance2FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance2 = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePresentWaitFeaturesKHR; + using Type = PhysicalDeviceVideoMaintenance2FeaturesKHR; }; - struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT + struct PhysicalDeviceVulkan11Features { - using NativeType = VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + using NativeType = VkPhysicalDeviceVulkan11Features; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , primitiveTopologyListRestart( primitiveTopologyListRestart_ ) - , primitiveTopologyPatchListRestart( primitiveTopologyPatchListRestart_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , storageBuffer16BitAccess{ storageBuffer16BitAccess_ } + , uniformAndStorageBuffer16BitAccess{ uniformAndStorageBuffer16BitAccess_ } + , storagePushConstant16{ storagePushConstant16_ } + , storageInputOutput16{ storageInputOutput16_ } + , multiview{ multiview_ } + , multiviewGeometryShader{ multiviewGeometryShader_ } + , multiviewTessellationShader{ multiviewTessellationShader_ } + , variablePointersStorageBuffer{ variablePointersStorageBuffer_ } + , variablePointers{ variablePointers_ } + , protectedMemory{ protectedMemory_ } + , samplerYcbcrConversion{ samplerYcbcrConversion_ } + , shaderDrawParameters{ shaderDrawParameters_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan11Features( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & - operator=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVulkan11Features & operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & operator=( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & - setPrimitiveTopologyListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT { - primitiveTopologyListRestart = primitiveTopologyListRestart_; + storageBuffer16BitAccess = storageBuffer16BitAccess_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & - setPrimitiveTopologyPatchListRestart( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT { - primitiveTopologyPatchListRestart = primitiveTopologyPatchListRestart_; + uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + storagePushConstant16 = storagePushConstant16_; + return *this; } - operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + storageInputOutput16 = storageInputOutput16_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT + { + multiview = multiview_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewGeometryShader = multiviewGeometryShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewTessellationShader = multiviewTessellationShader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT + { + variablePointersStorageBuffer = variablePointersStorageBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT + { + variablePointers = variablePointers_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + { + protectedMemory = protectedMemory_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & + setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT + { + samplerYcbcrConversion = samplerYcbcrConversion_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT + { + shaderDrawParameters = shaderDrawParameters_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceVulkan11Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, primitiveTopologyListRestart, primitiveTopologyPatchListRestart ); + return std::tie( sType, + pNext, + storageBuffer16BitAccess, + uniformAndStorageBuffer16BitAccess, + storagePushConstant16, + storageInputOutput16, + multiview, + multiviewGeometryShader, + multiviewTessellationShader, + variablePointersStorageBuffer, + variablePointers, + protectedMemory, + samplerYcbcrConversion, + shaderDrawParameters ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceVulkan11Features const & ) const = default; #else - bool operator==( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitiveTopologyListRestart == rhs.primitiveTopologyListRestart ) && - ( primitiveTopologyPatchListRestart == rhs.primitiveTopologyPatchListRestart ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) && + ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && ( storagePushConstant16 == rhs.storagePushConstant16 ) && + ( storageInputOutput16 == rhs.storageInputOutput16 ) && ( multiview == rhs.multiview ) && + ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && ( multiviewTessellationShader == rhs.multiviewTessellationShader ) && + ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && ( variablePointers == rhs.variablePointers ) && + ( protectedMemory == rhs.protectedMemory ) && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ) && + ( shaderDrawParameters == rhs.shaderDrawParameters ); # endif } - bool operator!=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart = {}; - VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + using Type = PhysicalDeviceVulkan11Features; }; - struct PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT + struct PhysicalDeviceVulkan11Properties { - using NativeType = VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + using NativeType = VkPhysicalDeviceVulkan11Properties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , primitivesGeneratedQuery( primitivesGeneratedQuery_ ) - , primitivesGeneratedQueryWithRasterizerDiscard( primitivesGeneratedQueryWithRasterizerDiscard_ ) - , primitivesGeneratedQueryWithNonZeroStreams( primitivesGeneratedQueryWithNonZeroStreams_ ) - { - } - - VULKAN_HPP_CONSTEXPR - PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & - operator=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & operator=( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties; - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & - setPrimitivesGeneratedQuery( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( + std::array const & deviceUUID_ = {}, + std::array const & driverUUID_ = {}, + std::array const & deviceLUID_ = {}, + uint32_t deviceNodeMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, + uint32_t subgroupSize_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, + uint32_t maxMultiviewViewCount_ = {}, + uint32_t maxMultiviewInstanceIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, + uint32_t maxPerSetDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceUUID{ deviceUUID_ } + , driverUUID{ driverUUID_ } + , deviceLUID{ deviceLUID_ } + , deviceNodeMask{ deviceNodeMask_ } + , deviceLUIDValid{ deviceLUIDValid_ } + , subgroupSize{ subgroupSize_ } + , subgroupSupportedStages{ subgroupSupportedStages_ } + , subgroupSupportedOperations{ subgroupSupportedOperations_ } + , subgroupQuadOperationsInAllStages{ subgroupQuadOperationsInAllStages_ } + , pointClippingBehavior{ pointClippingBehavior_ } + , maxMultiviewViewCount{ maxMultiviewViewCount_ } + , maxMultiviewInstanceIndex{ maxMultiviewInstanceIndex_ } + , protectedNoFault{ protectedNoFault_ } + , maxPerSetDescriptors{ maxPerSetDescriptors_ } + , maxMemoryAllocationSize{ maxMemoryAllocationSize_ } { - primitivesGeneratedQuery = primitivesGeneratedQuery_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & - setPrimitivesGeneratedQueryWithRasterizerDiscard( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan11Properties( *reinterpret_cast( &rhs ) ) { - primitivesGeneratedQueryWithRasterizerDiscard = primitivesGeneratedQueryWithRasterizerDiscard_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & - setPrimitivesGeneratedQueryWithNonZeroStreams( VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan11Properties & operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - primitivesGeneratedQueryWithNonZeroStreams = primitivesGeneratedQueryWithNonZeroStreams_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan11Properties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -59774,1061 +100002,1659 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, VULKAN_HPP_NAMESPACE::Bool32 const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags const &, VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &> + VULKAN_HPP_NAMESPACE::PointClippingBehavior const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, primitivesGeneratedQuery, primitivesGeneratedQueryWithRasterizerDiscard, primitivesGeneratedQueryWithNonZeroStreams ); + return std::tie( sType, + pNext, + deviceUUID, + driverUUID, + deviceLUID, + deviceNodeMask, + deviceLUIDValid, + subgroupSize, + subgroupSupportedStages, + subgroupSupportedOperations, + subgroupQuadOperationsInAllStages, + pointClippingBehavior, + maxMultiviewViewCount, + maxMultiviewInstanceIndex, + protectedNoFault, + maxPerSetDescriptors, + maxMemoryAllocationSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceVulkan11Properties const & ) const = default; #else - bool operator==( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitivesGeneratedQuery == rhs.primitivesGeneratedQuery ) && - ( primitivesGeneratedQueryWithRasterizerDiscard == rhs.primitivesGeneratedQueryWithRasterizerDiscard ) && - ( primitivesGeneratedQueryWithNonZeroStreams == rhs.primitivesGeneratedQueryWithNonZeroStreams ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && ( driverUUID == rhs.driverUUID ) && + ( deviceLUID == rhs.deviceLUID ) && ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ) && + ( subgroupSize == rhs.subgroupSize ) && ( subgroupSupportedStages == rhs.subgroupSupportedStages ) && + ( subgroupSupportedOperations == rhs.subgroupSupportedOperations ) && + ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages ) && ( pointClippingBehavior == rhs.pointClippingBehavior ) && + ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ) && + ( protectedNoFault == rhs.protectedNoFault ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) && + ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); # endif } - bool operator!=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQuery = {}; - VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard = {}; - VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; + uint32_t deviceNodeMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; + uint32_t subgroupSize = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {}; + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {}; + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; + uint32_t maxMultiviewViewCount = {}; + uint32_t maxMultiviewInstanceIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; + uint32_t maxPerSetDescriptors = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + using Type = PhysicalDeviceVulkan11Properties; }; - struct PhysicalDevicePrivateDataFeatures + struct PhysicalDeviceVulkan12Features { - using NativeType = VkPhysicalDevicePrivateDataFeatures; + using NativeType = VkPhysicalDeviceVulkan12Features; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrivateDataFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , privateData( privateData_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , samplerMirrorClampToEdge{ samplerMirrorClampToEdge_ } + , drawIndirectCount{ drawIndirectCount_ } + , storageBuffer8BitAccess{ storageBuffer8BitAccess_ } + , uniformAndStorageBuffer8BitAccess{ uniformAndStorageBuffer8BitAccess_ } + , storagePushConstant8{ storagePushConstant8_ } + , shaderBufferInt64Atomics{ shaderBufferInt64Atomics_ } + , shaderSharedInt64Atomics{ shaderSharedInt64Atomics_ } + , shaderFloat16{ shaderFloat16_ } + , shaderInt8{ shaderInt8_ } + , descriptorIndexing{ descriptorIndexing_ } + , shaderInputAttachmentArrayDynamicIndexing{ shaderInputAttachmentArrayDynamicIndexing_ } + , shaderUniformTexelBufferArrayDynamicIndexing{ shaderUniformTexelBufferArrayDynamicIndexing_ } + , shaderStorageTexelBufferArrayDynamicIndexing{ shaderStorageTexelBufferArrayDynamicIndexing_ } + , shaderUniformBufferArrayNonUniformIndexing{ shaderUniformBufferArrayNonUniformIndexing_ } + , shaderSampledImageArrayNonUniformIndexing{ shaderSampledImageArrayNonUniformIndexing_ } + , shaderStorageBufferArrayNonUniformIndexing{ shaderStorageBufferArrayNonUniformIndexing_ } + , shaderStorageImageArrayNonUniformIndexing{ shaderStorageImageArrayNonUniformIndexing_ } + , shaderInputAttachmentArrayNonUniformIndexing{ shaderInputAttachmentArrayNonUniformIndexing_ } + , shaderUniformTexelBufferArrayNonUniformIndexing{ shaderUniformTexelBufferArrayNonUniformIndexing_ } + , shaderStorageTexelBufferArrayNonUniformIndexing{ shaderStorageTexelBufferArrayNonUniformIndexing_ } + , descriptorBindingUniformBufferUpdateAfterBind{ descriptorBindingUniformBufferUpdateAfterBind_ } + , descriptorBindingSampledImageUpdateAfterBind{ descriptorBindingSampledImageUpdateAfterBind_ } + , descriptorBindingStorageImageUpdateAfterBind{ descriptorBindingStorageImageUpdateAfterBind_ } + , descriptorBindingStorageBufferUpdateAfterBind{ descriptorBindingStorageBufferUpdateAfterBind_ } + , descriptorBindingUniformTexelBufferUpdateAfterBind{ descriptorBindingUniformTexelBufferUpdateAfterBind_ } + , descriptorBindingStorageTexelBufferUpdateAfterBind{ descriptorBindingStorageTexelBufferUpdateAfterBind_ } + , descriptorBindingUpdateUnusedWhilePending{ descriptorBindingUpdateUnusedWhilePending_ } + , descriptorBindingPartiallyBound{ descriptorBindingPartiallyBound_ } + , descriptorBindingVariableDescriptorCount{ descriptorBindingVariableDescriptorCount_ } + , runtimeDescriptorArray{ runtimeDescriptorArray_ } + , samplerFilterMinmax{ samplerFilterMinmax_ } + , scalarBlockLayout{ scalarBlockLayout_ } + , imagelessFramebuffer{ imagelessFramebuffer_ } + , uniformBufferStandardLayout{ uniformBufferStandardLayout_ } + , shaderSubgroupExtendedTypes{ shaderSubgroupExtendedTypes_ } + , separateDepthStencilLayouts{ separateDepthStencilLayouts_ } + , hostQueryReset{ hostQueryReset_ } + , timelineSemaphore{ timelineSemaphore_ } + , bufferDeviceAddress{ bufferDeviceAddress_ } + , bufferDeviceAddressCaptureReplay{ bufferDeviceAddressCaptureReplay_ } + , bufferDeviceAddressMultiDevice{ bufferDeviceAddressMultiDevice_ } + , vulkanMemoryModel{ vulkanMemoryModel_ } + , vulkanMemoryModelDeviceScope{ vulkanMemoryModelDeviceScope_ } + , vulkanMemoryModelAvailabilityVisibilityChains{ vulkanMemoryModelAvailabilityVisibilityChains_ } + , shaderOutputViewportIndex{ shaderOutputViewportIndex_ } + , shaderOutputLayer{ shaderOutputLayer_ } + , subgroupBroadcastDynamicId{ subgroupBroadcastDynamicId_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePrivateDataFeatures( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePrivateDataFeatures( *reinterpret_cast( &rhs ) ) + PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan12Features( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePrivateDataFeatures & operator=( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDevicePrivateDataFeatures & operator=( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT { - privateData = privateData_; + samplerMirrorClampToEdge = samplerMirrorClampToEdge_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDevicePrivateDataFeatures const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + drawIndirectCount = drawIndirectCount_; + return *this; } - operator VkPhysicalDevicePrivateDataFeatures &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + storageBuffer8BitAccess = storageBuffer8BitAccess_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, privateData ); + uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePrivateDataFeatures const & ) const = default; -#else - bool operator==( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateData == rhs.privateData ); -# endif + storagePushConstant8 = storagePushConstant8_; + return *this; } - bool operator!=( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + shaderBufferInt64Atomics = shaderBufferInt64Atomics_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 privateData = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDevicePrivateDataFeatures; - }; - using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures; - - struct PhysicalDeviceSparseProperties - { - using NativeType = VkPhysicalDeviceSparseProperties; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT - : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ ) - , residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ ) - , residencyStandard3DBlockShape( residencyStandard3DBlockShape_ ) - , residencyAlignedMipSize( residencyAlignedMipSize_ ) - , residencyNonResidentStrict( residencyNonResidentStrict_ ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT { + shaderSharedInt64Atomics = shaderSharedInt64Atomics_; + return *this; } - VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceSparseProperties( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT { + shaderFloat16 = shaderFloat16_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + shaderInt8 = shaderInt8_; return *this; } - operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + descriptorIndexing = descriptorIndexing_; + return *this; } - operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( residencyStandard2DBlockShape, - residencyStandard2DMultisampleBlockShape, - residencyStandard3DBlockShape, - residencyAlignedMipSize, - residencyNonResidentStrict ); + shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceSparseProperties const & ) const = default; -#else - bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) && - ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) && - ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) && - ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); -# endif + shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; + return *this; } - bool operator!=( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {}; - }; - - struct PhysicalDeviceProperties - { - using NativeType = VkPhysicalDeviceProperties; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( uint32_t apiVersion_ = {}, - uint32_t driverVersion_ = {}, - uint32_t vendorID_ = {}, - uint32_t deviceID_ = {}, - VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, - std::array const & deviceName_ = {}, - std::array const & pipelineCacheUUID_ = {}, - VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, - VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : apiVersion( apiVersion_ ) - , driverVersion( driverVersion_ ) - , vendorID( vendorID_ ) - , deviceID( deviceID_ ) - , deviceType( deviceType_ ) - , deviceName( deviceName_ ) - , pipelineCacheUUID( pipelineCacheUUID_ ) - , limits( limits_ ) - , sparseProperties( sparseProperties_ ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { + shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; + return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceProperties( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { + shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; return *this; } - operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + return *this; } - operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const &, - VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const &> -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( apiVersion, driverVersion, vendorID, deviceID, deviceType, deviceName, pipelineCacheUUID, limits, sparseProperties ); + shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceProperties const & ) const = default; -#else - bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && - ( deviceType == rhs.deviceType ) && ( deviceName == rhs.deviceName ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && - ( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties ); -# endif + descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; + return *this; } - bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + return *this; } -#endif - - public: - uint32_t apiVersion = {}; - uint32_t driverVersion = {}; - uint32_t vendorID = {}; - uint32_t deviceID = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceName = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {}; - }; - - struct PhysicalDeviceProperties2 - { - using NativeType = VkPhysicalDeviceProperties2; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , properties( properties_ ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { + descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; + return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceProperties2( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; return *this; } - operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; + return *this; } - operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, properties ); + descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceProperties2 const & ) const = default; -#else - bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); -# endif + descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; + return *this; } - bool operator!=( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + runtimeDescriptorArray = runtimeDescriptorArray_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceProperties2; - }; - using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; - - struct PhysicalDeviceProtectedMemoryFeatures - { - using NativeType = VkPhysicalDeviceProtectedMemoryFeatures; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT + { + samplerFilterMinmax = samplerFilterMinmax_; + return *this; + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , protectedMemory( protectedMemory_ ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT { + scalarBlockLayout = scalarBlockLayout_; + return *this; } - VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceProtectedMemoryFeatures( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT { + imagelessFramebuffer = imagelessFramebuffer_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceProtectedMemoryFeatures & operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + uniformBufferStandardLayout = uniformBufferStandardLayout_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT { - protectedMemory = protectedMemory_; + separateDepthStencilLayouts = separateDepthStencilLayouts_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + hostQueryReset = hostQueryReset_; + return *this; } - operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + timelineSemaphore = timelineSemaphore_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, protectedMemory ); + bufferDeviceAddress = bufferDeviceAddress_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const & ) const = default; -#else - bool operator==( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedMemory == rhs.protectedMemory ); -# endif + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; } - bool operator!=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceProtectedMemoryFeatures; - }; - - struct PhysicalDeviceProtectedMemoryProperties - { - using NativeType = VkPhysicalDeviceProtectedMemoryProperties; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryProperties; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModel = vulkanMemoryModel_; + return *this; + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , protectedNoFault( protectedNoFault_ ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT { + vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; + return *this; } - VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; + return *this; + } - PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceProtectedMemoryProperties( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT { + shaderOutputViewportIndex = shaderOutputViewportIndex_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT + { + shaderOutputLayer = shaderOutputLayer_; + return *this; + } - PhysicalDeviceProtectedMemoryProperties & operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & + setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, protectedNoFault ); + return std::tie( sType, + pNext, + samplerMirrorClampToEdge, + drawIndirectCount, + storageBuffer8BitAccess, + uniformAndStorageBuffer8BitAccess, + storagePushConstant8, + shaderBufferInt64Atomics, + shaderSharedInt64Atomics, + shaderFloat16, + shaderInt8, + descriptorIndexing, + shaderInputAttachmentArrayDynamicIndexing, + shaderUniformTexelBufferArrayDynamicIndexing, + shaderStorageTexelBufferArrayDynamicIndexing, + shaderUniformBufferArrayNonUniformIndexing, + shaderSampledImageArrayNonUniformIndexing, + shaderStorageBufferArrayNonUniformIndexing, + shaderStorageImageArrayNonUniformIndexing, + shaderInputAttachmentArrayNonUniformIndexing, + shaderUniformTexelBufferArrayNonUniformIndexing, + shaderStorageTexelBufferArrayNonUniformIndexing, + descriptorBindingUniformBufferUpdateAfterBind, + descriptorBindingSampledImageUpdateAfterBind, + descriptorBindingStorageImageUpdateAfterBind, + descriptorBindingStorageBufferUpdateAfterBind, + descriptorBindingUniformTexelBufferUpdateAfterBind, + descriptorBindingStorageTexelBufferUpdateAfterBind, + descriptorBindingUpdateUnusedWhilePending, + descriptorBindingPartiallyBound, + descriptorBindingVariableDescriptorCount, + runtimeDescriptorArray, + samplerFilterMinmax, + scalarBlockLayout, + imagelessFramebuffer, + uniformBufferStandardLayout, + shaderSubgroupExtendedTypes, + separateDepthStencilLayouts, + hostQueryReset, + timelineSemaphore, + bufferDeviceAddress, + bufferDeviceAddressCaptureReplay, + bufferDeviceAddressMultiDevice, + vulkanMemoryModel, + vulkanMemoryModelDeviceScope, + vulkanMemoryModelAvailabilityVisibilityChains, + shaderOutputViewportIndex, + shaderOutputLayer, + subgroupBroadcastDynamicId ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceProtectedMemoryProperties const & ) const = default; + auto operator<=>( PhysicalDeviceVulkan12Features const & ) const = default; #else - bool operator==( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedNoFault == rhs.protectedNoFault ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge ) && + ( drawIndirectCount == rhs.drawIndirectCount ) && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) && + ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && ( storagePushConstant8 == rhs.storagePushConstant8 ) && + ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ) && + ( shaderFloat16 == rhs.shaderFloat16 ) && ( shaderInt8 == rhs.shaderInt8 ) && ( descriptorIndexing == rhs.descriptorIndexing ) && + ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) && + ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) && + ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) && + ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) && + ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) && + ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) && + ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) && + ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) && + ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) && + ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) && + ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) && + ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) && + ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) && + ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) && + ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) && + ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) && + ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) && + ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) && + ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) && + ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ) && ( samplerFilterMinmax == rhs.samplerFilterMinmax ) && + ( scalarBlockLayout == rhs.scalarBlockLayout ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer ) && + ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ) && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ) && + ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ) && ( hostQueryReset == rhs.hostQueryReset ) && + ( timelineSemaphore == rhs.timelineSemaphore ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && + ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && + ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) && + ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) && + ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ) && + ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex ) && ( shaderOutputLayer == rhs.shaderOutputLayer ) && + ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId ); # endif } - bool operator!=( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {}; + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {}; + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; + VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceProtectedMemoryProperties; + using Type = PhysicalDeviceVulkan12Features; }; - struct PhysicalDeviceProvokingVertexFeaturesEXT + struct PhysicalDeviceVulkan12Properties { - using NativeType = VkPhysicalDeviceProvokingVertexFeaturesEXT; + using NativeType = VkPhysicalDeviceVulkan12Properties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , provokingVertexLast( provokingVertexLast_ ) - , transformFeedbackPreservesProvokingVertex( transformFeedbackPreservesProvokingVertex_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( + VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, + std::array const & driverName_ = {}, + std::array const & driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, + uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, + uint32_t maxPerStageUpdateAfterBindResources_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, + uint64_t maxTimelineSemaphoreValueDifference_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , driverID{ driverID_ } + , driverName{ driverName_ } + , driverInfo{ driverInfo_ } + , conformanceVersion{ conformanceVersion_ } + , denormBehaviorIndependence{ denormBehaviorIndependence_ } + , roundingModeIndependence{ roundingModeIndependence_ } + , shaderSignedZeroInfNanPreserveFloat16{ shaderSignedZeroInfNanPreserveFloat16_ } + , shaderSignedZeroInfNanPreserveFloat32{ shaderSignedZeroInfNanPreserveFloat32_ } + , shaderSignedZeroInfNanPreserveFloat64{ shaderSignedZeroInfNanPreserveFloat64_ } + , shaderDenormPreserveFloat16{ shaderDenormPreserveFloat16_ } + , shaderDenormPreserveFloat32{ shaderDenormPreserveFloat32_ } + , shaderDenormPreserveFloat64{ shaderDenormPreserveFloat64_ } + , shaderDenormFlushToZeroFloat16{ shaderDenormFlushToZeroFloat16_ } + , shaderDenormFlushToZeroFloat32{ shaderDenormFlushToZeroFloat32_ } + , shaderDenormFlushToZeroFloat64{ shaderDenormFlushToZeroFloat64_ } + , shaderRoundingModeRTEFloat16{ shaderRoundingModeRTEFloat16_ } + , shaderRoundingModeRTEFloat32{ shaderRoundingModeRTEFloat32_ } + , shaderRoundingModeRTEFloat64{ shaderRoundingModeRTEFloat64_ } + , shaderRoundingModeRTZFloat16{ shaderRoundingModeRTZFloat16_ } + , shaderRoundingModeRTZFloat32{ shaderRoundingModeRTZFloat32_ } + , shaderRoundingModeRTZFloat64{ shaderRoundingModeRTZFloat64_ } + , maxUpdateAfterBindDescriptorsInAllPools{ maxUpdateAfterBindDescriptorsInAllPools_ } + , shaderUniformBufferArrayNonUniformIndexingNative{ shaderUniformBufferArrayNonUniformIndexingNative_ } + , shaderSampledImageArrayNonUniformIndexingNative{ shaderSampledImageArrayNonUniformIndexingNative_ } + , shaderStorageBufferArrayNonUniformIndexingNative{ shaderStorageBufferArrayNonUniformIndexingNative_ } + , shaderStorageImageArrayNonUniformIndexingNative{ shaderStorageImageArrayNonUniformIndexingNative_ } + , shaderInputAttachmentArrayNonUniformIndexingNative{ shaderInputAttachmentArrayNonUniformIndexingNative_ } + , robustBufferAccessUpdateAfterBind{ robustBufferAccessUpdateAfterBind_ } + , quadDivergentImplicitLod{ quadDivergentImplicitLod_ } + , maxPerStageDescriptorUpdateAfterBindSamplers{ maxPerStageDescriptorUpdateAfterBindSamplers_ } + , maxPerStageDescriptorUpdateAfterBindUniformBuffers{ maxPerStageDescriptorUpdateAfterBindUniformBuffers_ } + , maxPerStageDescriptorUpdateAfterBindStorageBuffers{ maxPerStageDescriptorUpdateAfterBindStorageBuffers_ } + , maxPerStageDescriptorUpdateAfterBindSampledImages{ maxPerStageDescriptorUpdateAfterBindSampledImages_ } + , maxPerStageDescriptorUpdateAfterBindStorageImages{ maxPerStageDescriptorUpdateAfterBindStorageImages_ } + , maxPerStageDescriptorUpdateAfterBindInputAttachments{ maxPerStageDescriptorUpdateAfterBindInputAttachments_ } + , maxPerStageUpdateAfterBindResources{ maxPerStageUpdateAfterBindResources_ } + , maxDescriptorSetUpdateAfterBindSamplers{ maxDescriptorSetUpdateAfterBindSamplers_ } + , maxDescriptorSetUpdateAfterBindUniformBuffers{ maxDescriptorSetUpdateAfterBindUniformBuffers_ } + , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic{ maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindStorageBuffers{ maxDescriptorSetUpdateAfterBindStorageBuffers_ } + , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic{ maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindSampledImages{ maxDescriptorSetUpdateAfterBindSampledImages_ } + , maxDescriptorSetUpdateAfterBindStorageImages{ maxDescriptorSetUpdateAfterBindStorageImages_ } + , maxDescriptorSetUpdateAfterBindInputAttachments{ maxDescriptorSetUpdateAfterBindInputAttachments_ } + , supportedDepthResolveModes{ supportedDepthResolveModes_ } + , supportedStencilResolveModes{ supportedStencilResolveModes_ } + , independentResolveNone{ independentResolveNone_ } + , independentResolve{ independentResolve_ } + , filterMinmaxSingleComponentFormats{ filterMinmaxSingleComponentFormats_ } + , filterMinmaxImageComponentMapping{ filterMinmaxImageComponentMapping_ } + , maxTimelineSemaphoreValueDifference{ maxTimelineSemaphoreValueDifference_ } + , framebufferIntegerColorSampleCounts{ framebufferIntegerColorSampleCounts_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceProvokingVertexFeaturesEXT( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceProvokingVertexFeaturesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan12Properties( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceProvokingVertexFeaturesEXT & operator=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceProvokingVertexFeaturesEXT & operator=( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & - setProvokingVertexLast( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan12Properties const &() const VULKAN_HPP_NOEXCEPT { - provokingVertexLast = provokingVertexLast_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & - setTransformFeedbackPreservesProvokingVertex( VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ ) VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT { - transformFeedbackPreservesProvokingVertex = transformFeedbackPreservesProvokingVertex_; - return *this; + return *reinterpret_cast( this ); } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceProvokingVertexFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ConformanceVersion const &, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, + VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + uint64_t const &, + VULKAN_HPP_NAMESPACE::SampleCountFlags const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return std::tie( sType, + pNext, + driverID, + driverName, + driverInfo, + conformanceVersion, + denormBehaviorIndependence, + roundingModeIndependence, + shaderSignedZeroInfNanPreserveFloat16, + shaderSignedZeroInfNanPreserveFloat32, + shaderSignedZeroInfNanPreserveFloat64, + shaderDenormPreserveFloat16, + shaderDenormPreserveFloat32, + shaderDenormPreserveFloat64, + shaderDenormFlushToZeroFloat16, + shaderDenormFlushToZeroFloat32, + shaderDenormFlushToZeroFloat64, + shaderRoundingModeRTEFloat16, + shaderRoundingModeRTEFloat32, + shaderRoundingModeRTEFloat64, + shaderRoundingModeRTZFloat16, + shaderRoundingModeRTZFloat32, + shaderRoundingModeRTZFloat64, + maxUpdateAfterBindDescriptorsInAllPools, + shaderUniformBufferArrayNonUniformIndexingNative, + shaderSampledImageArrayNonUniformIndexingNative, + shaderStorageBufferArrayNonUniformIndexingNative, + shaderStorageImageArrayNonUniformIndexingNative, + shaderInputAttachmentArrayNonUniformIndexingNative, + robustBufferAccessUpdateAfterBind, + quadDivergentImplicitLod, + maxPerStageDescriptorUpdateAfterBindSamplers, + maxPerStageDescriptorUpdateAfterBindUniformBuffers, + maxPerStageDescriptorUpdateAfterBindStorageBuffers, + maxPerStageDescriptorUpdateAfterBindSampledImages, + maxPerStageDescriptorUpdateAfterBindStorageImages, + maxPerStageDescriptorUpdateAfterBindInputAttachments, + maxPerStageUpdateAfterBindResources, + maxDescriptorSetUpdateAfterBindSamplers, + maxDescriptorSetUpdateAfterBindUniformBuffers, + maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, + maxDescriptorSetUpdateAfterBindStorageBuffers, + maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, + maxDescriptorSetUpdateAfterBindSampledImages, + maxDescriptorSetUpdateAfterBindStorageImages, + maxDescriptorSetUpdateAfterBindInputAttachments, + supportedDepthResolveModes, + supportedStencilResolveModes, + independentResolveNone, + independentResolve, + filterMinmaxSingleComponentFormats, + filterMinmaxImageComponentMapping, + maxTimelineSemaphoreValueDifference, + framebufferIntegerColorSampleCounts ); } +#endif - operator VkPhysicalDeviceProvokingVertexFeaturesEXT &() VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); - } + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = driverID <=> rhs.driverID; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( driverName, rhs.driverName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( driverInfo, rhs.driverInfo ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = conformanceVersion <=> rhs.conformanceVersion; cmp != 0 ) + return cmp; + if ( auto cmp = denormBehaviorIndependence <=> rhs.denormBehaviorIndependence; cmp != 0 ) + return cmp; + if ( auto cmp = roundingModeIndependence <=> rhs.roundingModeIndependence; cmp != 0 ) + return cmp; + if ( auto cmp = shaderSignedZeroInfNanPreserveFloat16 <=> rhs.shaderSignedZeroInfNanPreserveFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderSignedZeroInfNanPreserveFloat32 <=> rhs.shaderSignedZeroInfNanPreserveFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderSignedZeroInfNanPreserveFloat64 <=> rhs.shaderSignedZeroInfNanPreserveFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormPreserveFloat16 <=> rhs.shaderDenormPreserveFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormPreserveFloat32 <=> rhs.shaderDenormPreserveFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormPreserveFloat64 <=> rhs.shaderDenormPreserveFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormFlushToZeroFloat16 <=> rhs.shaderDenormFlushToZeroFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormFlushToZeroFloat32 <=> rhs.shaderDenormFlushToZeroFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderDenormFlushToZeroFloat64 <=> rhs.shaderDenormFlushToZeroFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTEFloat16 <=> rhs.shaderRoundingModeRTEFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTEFloat32 <=> rhs.shaderRoundingModeRTEFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTEFloat64 <=> rhs.shaderRoundingModeRTEFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTZFloat16 <=> rhs.shaderRoundingModeRTZFloat16; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTZFloat32 <=> rhs.shaderRoundingModeRTZFloat32; cmp != 0 ) + return cmp; + if ( auto cmp = shaderRoundingModeRTZFloat64 <=> rhs.shaderRoundingModeRTZFloat64; cmp != 0 ) + return cmp; + if ( auto cmp = maxUpdateAfterBindDescriptorsInAllPools <=> rhs.maxUpdateAfterBindDescriptorsInAllPools; cmp != 0 ) + return cmp; + if ( auto cmp = shaderUniformBufferArrayNonUniformIndexingNative <=> rhs.shaderUniformBufferArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = shaderSampledImageArrayNonUniformIndexingNative <=> rhs.shaderSampledImageArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = shaderStorageBufferArrayNonUniformIndexingNative <=> rhs.shaderStorageBufferArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = shaderStorageImageArrayNonUniformIndexingNative <=> rhs.shaderStorageImageArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = shaderInputAttachmentArrayNonUniformIndexingNative <=> rhs.shaderInputAttachmentArrayNonUniformIndexingNative; cmp != 0 ) + return cmp; + if ( auto cmp = robustBufferAccessUpdateAfterBind <=> rhs.robustBufferAccessUpdateAfterBind; cmp != 0 ) + return cmp; + if ( auto cmp = quadDivergentImplicitLod <=> rhs.quadDivergentImplicitLod; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindSamplers <=> rhs.maxPerStageDescriptorUpdateAfterBindSamplers; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindUniformBuffers <=> rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindStorageBuffers <=> rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindSampledImages <=> rhs.maxPerStageDescriptorUpdateAfterBindSampledImages; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindStorageImages <=> rhs.maxPerStageDescriptorUpdateAfterBindStorageImages; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageDescriptorUpdateAfterBindInputAttachments <=> rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments; cmp != 0 ) + return cmp; + if ( auto cmp = maxPerStageUpdateAfterBindResources <=> rhs.maxPerStageUpdateAfterBindResources; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindSamplers <=> rhs.maxDescriptorSetUpdateAfterBindSamplers; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindUniformBuffers <=> rhs.maxDescriptorSetUpdateAfterBindUniformBuffers; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindUniformBuffersDynamic <=> rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageBuffers <=> rhs.maxDescriptorSetUpdateAfterBindStorageBuffers; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageBuffersDynamic <=> rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindSampledImages <=> rhs.maxDescriptorSetUpdateAfterBindSampledImages; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindStorageImages <=> rhs.maxDescriptorSetUpdateAfterBindStorageImages; cmp != 0 ) + return cmp; + if ( auto cmp = maxDescriptorSetUpdateAfterBindInputAttachments <=> rhs.maxDescriptorSetUpdateAfterBindInputAttachments; cmp != 0 ) + return cmp; + if ( auto cmp = supportedDepthResolveModes <=> rhs.supportedDepthResolveModes; cmp != 0 ) + return cmp; + if ( auto cmp = supportedStencilResolveModes <=> rhs.supportedStencilResolveModes; cmp != 0 ) + return cmp; + if ( auto cmp = independentResolveNone <=> rhs.independentResolveNone; cmp != 0 ) + return cmp; + if ( auto cmp = independentResolve <=> rhs.independentResolve; cmp != 0 ) + return cmp; + if ( auto cmp = filterMinmaxSingleComponentFormats <=> rhs.filterMinmaxSingleComponentFormats; cmp != 0 ) + return cmp; + if ( auto cmp = filterMinmaxImageComponentMapping <=> rhs.filterMinmaxImageComponentMapping; cmp != 0 ) + return cmp; + if ( auto cmp = maxTimelineSemaphoreValueDifference <=> rhs.maxTimelineSemaphoreValueDifference; cmp != 0 ) + return cmp; + if ( auto cmp = framebufferIntegerColorSampleCounts <=> rhs.framebufferIntegerColorSampleCounts; cmp != 0 ) + return cmp; -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, provokingVertexLast, transformFeedbackPreservesProvokingVertex ); + return std::strong_ordering::equivalent; } #endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceProvokingVertexFeaturesEXT const & ) const = default; -#else - bool operator==( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexLast == rhs.provokingVertexLast ) && - ( transformFeedbackPreservesProvokingVertex == rhs.transformFeedbackPreservesProvokingVertex ); -# endif + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( strcmp( driverName, rhs.driverName ) == 0 ) && + ( strcmp( driverInfo, rhs.driverInfo ) == 0 ) && ( conformanceVersion == rhs.conformanceVersion ) && + ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && ( roundingModeIndependence == rhs.roundingModeIndependence ) && + ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && + ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && + ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) && + ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) && + ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) && + ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) && + ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) && + ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) && + ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) && + ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ) && + ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) && + ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) && + ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) && + ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) && + ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) && + ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) && + ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) && + ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) && + ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) && + ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) && + ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) && + ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) && + ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) && + ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) && + ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) && + ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) && + ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) && + ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ) && + ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && + ( independentResolveNone == rhs.independentResolveNone ) && ( independentResolve == rhs.independentResolve ) && + ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) && + ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ) && + ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ) && + ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts ); } - bool operator!=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast = {}; - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; + VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; + uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; + uint32_t maxPerStageUpdateAfterBindResources = {}; + uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; + uint64_t maxTimelineSemaphoreValueDifference = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceProvokingVertexFeaturesEXT; + using Type = PhysicalDeviceVulkan12Properties; }; - struct PhysicalDeviceProvokingVertexPropertiesEXT + struct PhysicalDeviceVulkan13Features { - using NativeType = VkPhysicalDeviceProvokingVertexPropertiesEXT; + using NativeType = VkPhysicalDeviceVulkan13Features; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Features; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , provokingVertexModePerPipeline( provokingVertexModePerPipeline_ ) - , transformFeedbackPreservesTriangleFanProvokingVertex( transformFeedbackPreservesTriangleFanProvokingVertex_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , robustImageAccess{ robustImageAccess_ } + , inlineUniformBlock{ inlineUniformBlock_ } + , descriptorBindingInlineUniformBlockUpdateAfterBind{ descriptorBindingInlineUniformBlockUpdateAfterBind_ } + , pipelineCreationCacheControl{ pipelineCreationCacheControl_ } + , privateData{ privateData_ } + , shaderDemoteToHelperInvocation{ shaderDemoteToHelperInvocation_ } + , shaderTerminateInvocation{ shaderTerminateInvocation_ } + , subgroupSizeControl{ subgroupSizeControl_ } + , computeFullSubgroups{ computeFullSubgroups_ } + , synchronization2{ synchronization2_ } + , textureCompressionASTC_HDR{ textureCompressionASTC_HDR_ } + , shaderZeroInitializeWorkgroupMemory{ shaderZeroInitializeWorkgroupMemory_ } + , dynamicRendering{ dynamicRendering_ } + , shaderIntegerDotProduct{ shaderIntegerDotProduct_ } + , maintenance4{ maintenance4_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceProvokingVertexPropertiesEXT( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceProvokingVertexPropertiesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceVulkan13Features( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan13Features( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceProvokingVertexPropertiesEXT & operator=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVulkan13Features & operator=( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceProvokingVertexPropertiesEXT & operator=( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan13Features & operator=( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceProvokingVertexPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceProvokingVertexPropertiesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, provokingVertexModePerPipeline, transformFeedbackPreservesTriangleFanProvokingVertex ); + pNext = pNext_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceProvokingVertexPropertiesEXT const & ) const = default; -#else - bool operator==( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexModePerPipeline == rhs.provokingVertexModePerPipeline ) && - ( transformFeedbackPreservesTriangleFanProvokingVertex == rhs.transformFeedbackPreservesTriangleFanProvokingVertex ); -# endif + robustImageAccess = robustImageAccess_; + return *this; } - bool operator!=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + inlineUniformBlock = inlineUniformBlock_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline = {}; - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceProvokingVertexPropertiesEXT; - }; - - struct PhysicalDevicePushDescriptorPropertiesKHR - { - using NativeType = VkPhysicalDevicePushDescriptorPropertiesKHR; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxPushDescriptors( maxPushDescriptors_ ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDescriptorBindingInlineUniformBlockUpdateAfterBind( + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT { + descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; + return *this; } - VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePushDescriptorPropertiesKHR( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT { + pipelineCreationCacheControl = pipelineCreationCacheControl_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePushDescriptorPropertiesKHR & operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDevicePushDescriptorPropertiesKHR & operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + privateData = privateData_; return *this; } - operator VkPhysicalDevicePushDescriptorPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; + return *this; } - operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + shaderTerminateInvocation = shaderTerminateInvocation_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxPushDescriptors ); + subgroupSizeControl = subgroupSizeControl_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const & ) const = default; -#else - bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPushDescriptors == rhs.maxPushDescriptors ); -# endif + computeFullSubgroups = computeFullSubgroups_; + return *this; } - bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + synchronization2 = synchronization2_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; - void * pNext = {}; - uint32_t maxPushDescriptors = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDevicePushDescriptorPropertiesKHR; - }; - - struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT - { - using NativeType = VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , formatRgba10x6WithoutYCbCrSampler( formatRgba10x6WithoutYCbCrSampler_ ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT { + textureCompressionASTC_HDR = textureCompressionASTC_HDR_; + return *this; } - VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceRGBA10X6FormatsFeaturesEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT { + shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + dynamicRendering = dynamicRendering_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & + setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + shaderIntegerDotProduct = shaderIntegerDotProduct_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & - setFormatRgba10x6WithoutYCbCrSampler( VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT { - formatRgba10x6WithoutYCbCrSampler = formatRgba10x6WithoutYCbCrSampler_; + maintenance4 = maintenance4_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan13Features const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan13Features &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, formatRgba10x6WithoutYCbCrSampler ); + return std::tie( sType, + pNext, + robustImageAccess, + inlineUniformBlock, + descriptorBindingInlineUniformBlockUpdateAfterBind, + pipelineCreationCacheControl, + privateData, + shaderDemoteToHelperInvocation, + shaderTerminateInvocation, + subgroupSizeControl, + computeFullSubgroups, + synchronization2, + textureCompressionASTC_HDR, + shaderZeroInitializeWorkgroupMemory, + dynamicRendering, + shaderIntegerDotProduct, + maintenance4 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceVulkan13Features const & ) const = default; #else - bool operator==( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatRgba10x6WithoutYCbCrSampler == rhs.formatRgba10x6WithoutYCbCrSampler ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess ) && + ( inlineUniformBlock == rhs.inlineUniformBlock ) && + ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ) && + ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ) && ( privateData == rhs.privateData ) && + ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ) && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation ) && + ( subgroupSizeControl == rhs.subgroupSizeControl ) && ( computeFullSubgroups == rhs.computeFullSubgroups ) && + ( synchronization2 == rhs.synchronization2 ) && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ) && + ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory ) && ( dynamicRendering == rhs.dynamicRendering ) && + ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct ) && ( maintenance4 == rhs.maintenance4 ); # endif } - bool operator!=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 privateData = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; + VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceRGBA10X6FormatsFeaturesEXT; + using Type = PhysicalDeviceVulkan13Features; }; - struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT + struct PhysicalDeviceVulkan13Properties { - using NativeType = VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + using NativeType = VkPhysicalDeviceVulkan13Properties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Properties; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR - PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rasterizationOrderColorAttachmentAccess( rasterizationOrderColorAttachmentAccess_ ) - , rasterizationOrderDepthAttachmentAccess( rasterizationOrderDepthAttachmentAccess_ ) - , rasterizationOrderStencilAttachmentAccess( rasterizationOrderStencilAttachmentAccess_ ) - { - } - - VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) - VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( - *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & - operator=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & - operator=( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan13Properties( uint32_t minSubgroupSize_ = {}, + uint32_t maxSubgroupSize_ = {}, + uint32_t maxComputeWorkgroupSubgroups_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}, + uint32_t maxInlineUniformBlockSize_ = {}, + uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, + uint32_t maxInlineUniformTotalSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minSubgroupSize{ minSubgroupSize_ } + , maxSubgroupSize{ maxSubgroupSize_ } + , maxComputeWorkgroupSubgroups{ maxComputeWorkgroupSubgroups_ } + , requiredSubgroupSizeStages{ requiredSubgroupSizeStages_ } + , maxInlineUniformBlockSize{ maxInlineUniformBlockSize_ } + , maxPerStageDescriptorInlineUniformBlocks{ maxPerStageDescriptorInlineUniformBlocks_ } + , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks{ maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ } + , maxDescriptorSetInlineUniformBlocks{ maxDescriptorSetInlineUniformBlocks_ } + , maxDescriptorSetUpdateAfterBindInlineUniformBlocks{ maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ } + , maxInlineUniformTotalSize{ maxInlineUniformTotalSize_ } + , integerDotProduct8BitUnsignedAccelerated{ integerDotProduct8BitUnsignedAccelerated_ } + , integerDotProduct8BitSignedAccelerated{ integerDotProduct8BitSignedAccelerated_ } + , integerDotProduct8BitMixedSignednessAccelerated{ integerDotProduct8BitMixedSignednessAccelerated_ } + , integerDotProduct4x8BitPackedUnsignedAccelerated{ integerDotProduct4x8BitPackedUnsignedAccelerated_ } + , integerDotProduct4x8BitPackedSignedAccelerated{ integerDotProduct4x8BitPackedSignedAccelerated_ } + , integerDotProduct4x8BitPackedMixedSignednessAccelerated{ integerDotProduct4x8BitPackedMixedSignednessAccelerated_ } + , integerDotProduct16BitUnsignedAccelerated{ integerDotProduct16BitUnsignedAccelerated_ } + , integerDotProduct16BitSignedAccelerated{ integerDotProduct16BitSignedAccelerated_ } + , integerDotProduct16BitMixedSignednessAccelerated{ integerDotProduct16BitMixedSignednessAccelerated_ } + , integerDotProduct32BitUnsignedAccelerated{ integerDotProduct32BitUnsignedAccelerated_ } + , integerDotProduct32BitSignedAccelerated{ integerDotProduct32BitSignedAccelerated_ } + , integerDotProduct32BitMixedSignednessAccelerated{ integerDotProduct32BitMixedSignednessAccelerated_ } + , integerDotProduct64BitUnsignedAccelerated{ integerDotProduct64BitUnsignedAccelerated_ } + , integerDotProduct64BitSignedAccelerated{ integerDotProduct64BitSignedAccelerated_ } + , integerDotProduct64BitMixedSignednessAccelerated{ integerDotProduct64BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating8BitSignedAccelerated{ integerDotProductAccumulatingSaturating8BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating16BitSignedAccelerated{ integerDotProductAccumulatingSaturating16BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating32BitSignedAccelerated{ integerDotProductAccumulatingSaturating32BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating64BitSignedAccelerated{ integerDotProductAccumulatingSaturating64BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ } + , storageTexelBufferOffsetAlignmentBytes{ storageTexelBufferOffsetAlignmentBytes_ } + , storageTexelBufferOffsetSingleTexelAlignment{ storageTexelBufferOffsetSingleTexelAlignment_ } + , uniformTexelBufferOffsetAlignmentBytes{ uniformTexelBufferOffsetAlignmentBytes_ } + , uniformTexelBufferOffsetSingleTexelAlignment{ uniformTexelBufferOffsetSingleTexelAlignment_ } + , maxBufferSize{ maxBufferSize_ } { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & - setRasterizationOrderColorAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan13Properties( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan13Properties( *reinterpret_cast( &rhs ) ) { - rasterizationOrderColorAttachmentAccess = rasterizationOrderColorAttachmentAccess_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & - setRasterizationOrderDepthAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT - { - rasterizationOrderDepthAttachmentAccess = rasterizationOrderDepthAttachmentAccess_; - return *this; - } + PhysicalDeviceVulkan13Properties & operator=( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & - setRasterizationOrderStencilAttachmentAccess( VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan13Properties & operator=( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - rasterizationOrderStencilAttachmentAccess = rasterizationOrderStencilAttachmentAccess_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan13Properties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan13Properties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -60837,457 +101663,867 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( - sType, pNext, rasterizationOrderColorAttachmentAccess, rasterizationOrderDepthAttachmentAccess, rasterizationOrderStencilAttachmentAccess ); + return std::tie( sType, + pNext, + minSubgroupSize, + maxSubgroupSize, + maxComputeWorkgroupSubgroups, + requiredSubgroupSizeStages, + maxInlineUniformBlockSize, + maxPerStageDescriptorInlineUniformBlocks, + maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, + maxDescriptorSetInlineUniformBlocks, + maxDescriptorSetUpdateAfterBindInlineUniformBlocks, + maxInlineUniformTotalSize, + integerDotProduct8BitUnsignedAccelerated, + integerDotProduct8BitSignedAccelerated, + integerDotProduct8BitMixedSignednessAccelerated, + integerDotProduct4x8BitPackedUnsignedAccelerated, + integerDotProduct4x8BitPackedSignedAccelerated, + integerDotProduct4x8BitPackedMixedSignednessAccelerated, + integerDotProduct16BitUnsignedAccelerated, + integerDotProduct16BitSignedAccelerated, + integerDotProduct16BitMixedSignednessAccelerated, + integerDotProduct32BitUnsignedAccelerated, + integerDotProduct32BitSignedAccelerated, + integerDotProduct32BitMixedSignednessAccelerated, + integerDotProduct64BitUnsignedAccelerated, + integerDotProduct64BitSignedAccelerated, + integerDotProduct64BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating8BitSignedAccelerated, + integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, + integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating16BitSignedAccelerated, + integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating32BitSignedAccelerated, + integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, + integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, + integerDotProductAccumulatingSaturating64BitSignedAccelerated, + integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated, + storageTexelBufferOffsetAlignmentBytes, + storageTexelBufferOffsetSingleTexelAlignment, + uniformTexelBufferOffsetAlignmentBytes, + uniformTexelBufferOffsetSingleTexelAlignment, + maxBufferSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceVulkan13Properties const & ) const = default; #else - bool operator==( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrderColorAttachmentAccess == rhs.rasterizationOrderColorAttachmentAccess ) && - ( rasterizationOrderDepthAttachmentAccess == rhs.rasterizationOrderDepthAttachmentAccess ) && - ( rasterizationOrderStencilAttachmentAccess == rhs.rasterizationOrderStencilAttachmentAccess ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && ( maxSubgroupSize == rhs.maxSubgroupSize ) && + ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ) && + ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) && + ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) && + ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) && + ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) && + ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ) && + ( maxInlineUniformTotalSize == rhs.maxInlineUniformTotalSize ) && + ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) && + ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated ) && + ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated ) && + ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) && + ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated ) && + ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) && + ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated ) && + ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated ) && + ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated ) && + ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated ) && + ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated ) && + ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated ) && + ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated ) && + ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated ) && + ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) && + ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == + rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ) && + ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) && + ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) && + ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) && + ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ) && ( maxBufferSize == rhs.maxBufferSize ); # endif } - bool operator!=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderColorAttachmentAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Properties; + void * pNext = {}; + uint32_t minSubgroupSize = {}; + uint32_t maxSubgroupSize = {}; + uint32_t maxComputeWorkgroupSubgroups = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; + uint32_t maxInlineUniformBlockSize = {}; + uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; + uint32_t maxDescriptorSetInlineUniformBlocks = {}; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; + uint32_t maxInlineUniformTotalSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {}; + VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + using Type = PhysicalDeviceVulkan13Properties; }; - using PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; - struct PhysicalDeviceRayQueryFeaturesKHR + struct PhysicalDeviceVulkan14Features { - using NativeType = VkPhysicalDeviceRayQueryFeaturesKHR; + using NativeType = VkPhysicalDeviceVulkan14Features; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan14Features; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rayQuery( rayQuery_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan14Features( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pushDescriptor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , globalPriorityQuery{ globalPriorityQuery_ } + , shaderSubgroupRotate{ shaderSubgroupRotate_ } + , shaderSubgroupRotateClustered{ shaderSubgroupRotateClustered_ } + , shaderFloatControls2{ shaderFloatControls2_ } + , shaderExpectAssume{ shaderExpectAssume_ } + , rectangularLines{ rectangularLines_ } + , bresenhamLines{ bresenhamLines_ } + , smoothLines{ smoothLines_ } + , stippledRectangularLines{ stippledRectangularLines_ } + , stippledBresenhamLines{ stippledBresenhamLines_ } + , stippledSmoothLines{ stippledSmoothLines_ } + , vertexAttributeInstanceRateDivisor{ vertexAttributeInstanceRateDivisor_ } + , vertexAttributeInstanceRateZeroDivisor{ vertexAttributeInstanceRateZeroDivisor_ } + , indexTypeUint8{ indexTypeUint8_ } + , dynamicRenderingLocalRead{ dynamicRenderingLocalRead_ } + , maintenance5{ maintenance5_ } + , maintenance6{ maintenance6_ } + , pipelineProtectedAccess{ pipelineProtectedAccess_ } + , pipelineRobustness{ pipelineRobustness_ } + , hostImageCopy{ hostImageCopy_ } + , pushDescriptor{ pushDescriptor_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan14Features( PhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceRayQueryFeaturesKHR( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceRayQueryFeaturesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceVulkan14Features( VkPhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan14Features( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceRayQueryFeaturesKHR & operator=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVulkan14Features & operator=( PhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceRayQueryFeaturesKHR & operator=( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan14Features & operator=( VkPhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT { - rayQuery = rayQuery_; + globalPriorityQuery = globalPriorityQuery_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceRayQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderSubgroupRotate( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + shaderSubgroupRotate = shaderSubgroupRotate_; + return *this; } - operator VkPhysicalDeviceRayQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setShaderSubgroupRotateClustered( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + shaderSubgroupRotateClustered = shaderSubgroupRotateClustered_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderFloatControls2( VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, rayQuery ); + shaderFloatControls2 = shaderFloatControls2_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceRayQueryFeaturesKHR const & ) const = default; -#else - bool operator==( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderExpectAssume( VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayQuery == rhs.rayQuery ); -# endif + shaderExpectAssume = shaderExpectAssume_; + return *this; } - bool operator!=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + rectangularLines = rectangularLines_; + return *this; } -#endif - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {}; - }; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + bresenhamLines = bresenhamLines_; + return *this; + } - template <> - struct CppType - { - using Type = PhysicalDeviceRayQueryFeaturesKHR; - }; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT + { + smoothLines = smoothLines_; + return *this; + } - struct PhysicalDeviceRayTracingMaintenance1FeaturesKHR - { - using NativeType = VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledRectangularLines = stippledRectangularLines_; + return *this; + } - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledBresenhamLines = stippledBresenhamLines_; + return *this; + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMaintenance1FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rayTracingMaintenance1( rayTracingMaintenance1_ ) - , rayTracingPipelineTraceRaysIndirect2( rayTracingPipelineTraceRaysIndirect2_ ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT { + stippledSmoothLines = stippledSmoothLines_; + return *this; } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceRayTracingMaintenance1FeaturesKHR( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_; + return *this; + } - PhysicalDeviceRayTracingMaintenance1FeaturesKHR( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceRayTracingMaintenance1FeaturesKHR( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT { + vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeUint8 = indexTypeUint8_; + return *this; + } - PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setDynamicRenderingLocalRead( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + dynamicRenderingLocalRead = dynamicRenderingLocalRead_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setMaintenance5( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + maintenance5 = maintenance5_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & - setRayTracingMaintenance1( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setMaintenance6( VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ ) VULKAN_HPP_NOEXCEPT { - rayTracingMaintenance1 = rayTracingMaintenance1_; + maintenance6 = maintenance6_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & - setRayTracingPipelineTraceRaysIndirect2( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setPipelineProtectedAccess( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ ) VULKAN_HPP_NOEXCEPT { - rayTracingPipelineTraceRaysIndirect2 = rayTracingPipelineTraceRaysIndirect2_; + pipelineProtectedAccess = pipelineProtectedAccess_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setPipelineRobustness( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pipelineRobustness = pipelineRobustness_; + return *this; } - operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setHostImageCopy( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + hostImageCopy = hostImageCopy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setPushDescriptor( VULKAN_HPP_NAMESPACE::Bool32 pushDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + pushDescriptor = pushDescriptor_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceVulkan14Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan14Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, rayTracingMaintenance1, rayTracingPipelineTraceRaysIndirect2 ); + return std::tie( sType, + pNext, + globalPriorityQuery, + shaderSubgroupRotate, + shaderSubgroupRotateClustered, + shaderFloatControls2, + shaderExpectAssume, + rectangularLines, + bresenhamLines, + smoothLines, + stippledRectangularLines, + stippledBresenhamLines, + stippledSmoothLines, + vertexAttributeInstanceRateDivisor, + vertexAttributeInstanceRateZeroDivisor, + indexTypeUint8, + dynamicRenderingLocalRead, + maintenance5, + maintenance6, + pipelineProtectedAccess, + pipelineRobustness, + hostImageCopy, + pushDescriptor ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceVulkan14Features const & ) const = default; #else - bool operator==( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkan14Features const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingMaintenance1 == rhs.rayTracingMaintenance1 ) && - ( rayTracingPipelineTraceRaysIndirect2 == rhs.rayTracingPipelineTraceRaysIndirect2 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriorityQuery == rhs.globalPriorityQuery ) && + ( shaderSubgroupRotate == rhs.shaderSubgroupRotate ) && ( shaderSubgroupRotateClustered == rhs.shaderSubgroupRotateClustered ) && + ( shaderFloatControls2 == rhs.shaderFloatControls2 ) && ( shaderExpectAssume == rhs.shaderExpectAssume ) && + ( rectangularLines == rhs.rectangularLines ) && ( bresenhamLines == rhs.bresenhamLines ) && ( smoothLines == rhs.smoothLines ) && + ( stippledRectangularLines == rhs.stippledRectangularLines ) && ( stippledBresenhamLines == rhs.stippledBresenhamLines ) && + ( stippledSmoothLines == rhs.stippledSmoothLines ) && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) && + ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor ) && ( indexTypeUint8 == rhs.indexTypeUint8 ) && + ( dynamicRenderingLocalRead == rhs.dynamicRenderingLocalRead ) && ( maintenance5 == rhs.maintenance5 ) && ( maintenance6 == rhs.maintenance6 ) && + ( pipelineProtectedAccess == rhs.pipelineProtectedAccess ) && ( pipelineRobustness == rhs.pipelineRobustness ) && + ( hostImageCopy == rhs.hostImageCopy ) && ( pushDescriptor == rhs.pushDescriptor ); # endif } - bool operator!=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkan14Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1 = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2 = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan14Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume = {}; + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance5 = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance6 = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy = {}; + VULKAN_HPP_NAMESPACE::Bool32 pushDescriptor = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceRayTracingMaintenance1FeaturesKHR; + using Type = PhysicalDeviceVulkan14Features; }; - struct PhysicalDeviceRayTracingMotionBlurFeaturesNV + struct PhysicalDeviceVulkan14Properties { - using NativeType = VkPhysicalDeviceRayTracingMotionBlurFeaturesNV; + using NativeType = VkPhysicalDeviceVulkan14Properties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rayTracingMotionBlur( rayTracingMotionBlur_ ) - , rayTracingMotionBlurPipelineTraceRaysIndirect( rayTracingMotionBlurPipelineTraceRaysIndirect_ ) - { - } + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan14Properties; - VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceRayTracingMotionBlurFeaturesNV( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceRayTracingMotionBlurFeaturesNV( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Properties( + uint32_t lineSubPixelPrecisionBits_ = {}, + uint32_t maxVertexAttribDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance_ = {}, + uint32_t maxPushDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalReadDepthStencilAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalReadMultisampledAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers_ = {}, + uint32_t maxCombinedImageSamplerDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs_ = {}, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior defaultRobustnessImages_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault, + uint32_t copySrcLayoutCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ = {}, + uint32_t copyDstLayoutCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ = {}, + std::array const & optimalTilingLayoutUUID_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , lineSubPixelPrecisionBits{ lineSubPixelPrecisionBits_ } + , maxVertexAttribDivisor{ maxVertexAttribDivisor_ } + , supportsNonZeroFirstInstance{ supportsNonZeroFirstInstance_ } + , maxPushDescriptors{ maxPushDescriptors_ } + , dynamicRenderingLocalReadDepthStencilAttachments{ dynamicRenderingLocalReadDepthStencilAttachments_ } + , dynamicRenderingLocalReadMultisampledAttachments{ dynamicRenderingLocalReadMultisampledAttachments_ } + , earlyFragmentMultisampleCoverageAfterSampleCounting{ earlyFragmentMultisampleCoverageAfterSampleCounting_ } + , earlyFragmentSampleMaskTestBeforeSampleCounting{ earlyFragmentSampleMaskTestBeforeSampleCounting_ } + , depthStencilSwizzleOneSupport{ depthStencilSwizzleOneSupport_ } + , polygonModePointSize{ polygonModePointSize_ } + , nonStrictSinglePixelWideLinesUseParallelogram{ nonStrictSinglePixelWideLinesUseParallelogram_ } + , nonStrictWideLinesUseParallelogram{ nonStrictWideLinesUseParallelogram_ } + , blockTexelViewCompatibleMultipleLayers{ blockTexelViewCompatibleMultipleLayers_ } + , maxCombinedImageSamplerDescriptorCount{ maxCombinedImageSamplerDescriptorCount_ } + , fragmentShadingRateClampCombinerInputs{ fragmentShadingRateClampCombinerInputs_ } + , defaultRobustnessStorageBuffers{ defaultRobustnessStorageBuffers_ } + , defaultRobustnessUniformBuffers{ defaultRobustnessUniformBuffers_ } + , defaultRobustnessVertexInputs{ defaultRobustnessVertexInputs_ } + , defaultRobustnessImages{ defaultRobustnessImages_ } + , copySrcLayoutCount{ copySrcLayoutCount_ } + , pCopySrcLayouts{ pCopySrcLayouts_ } + , copyDstLayoutCount{ copyDstLayoutCount_ } + , pCopyDstLayouts{ pCopyDstLayouts_ } + , optimalTilingLayoutUUID{ optimalTilingLayoutUUID_ } + , identicalMemoryTypeRequirements{ identicalMemoryTypeRequirements_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceRayTracingMotionBlurFeaturesNV & operator=( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Properties( PhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan14Properties( VkPhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan14Properties( *reinterpret_cast( &rhs ) ) { - pNext = pNext_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & - setRayTracingMotionBlur( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ ) VULKAN_HPP_NOEXCEPT - { - rayTracingMotionBlur = rayTracingMotionBlur_; - return *this; - } + PhysicalDeviceVulkan14Properties & operator=( PhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMotionBlurFeaturesNV & - setRayTracingMotionBlurPipelineTraceRaysIndirect( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkan14Properties & operator=( VkPhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - rayTracingMotionBlurPipelineTraceRaysIndirect = rayTracingMotionBlurPipelineTraceRaysIndirect_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan14Properties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceRayTracingMotionBlurFeaturesNV &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkan14Properties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, rayTracingMotionBlur, rayTracingMotionBlurPipelineTraceRaysIndirect ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & ) const = default; -#else - bool operator==( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingMotionBlur == rhs.rayTracingMotionBlur ) && - ( rayTracingMotionBlurPipelineTraceRaysIndirect == rhs.rayTracingMotionBlurPipelineTraceRaysIndirect ); + return std::tie( sType, + pNext, + lineSubPixelPrecisionBits, + maxVertexAttribDivisor, + supportsNonZeroFirstInstance, + maxPushDescriptors, + dynamicRenderingLocalReadDepthStencilAttachments, + dynamicRenderingLocalReadMultisampledAttachments, + earlyFragmentMultisampleCoverageAfterSampleCounting, + earlyFragmentSampleMaskTestBeforeSampleCounting, + depthStencilSwizzleOneSupport, + polygonModePointSize, + nonStrictSinglePixelWideLinesUseParallelogram, + nonStrictWideLinesUseParallelogram, + blockTexelViewCompatibleMultipleLayers, + maxCombinedImageSamplerDescriptorCount, + fragmentShadingRateClampCombinerInputs, + defaultRobustnessStorageBuffers, + defaultRobustnessUniformBuffers, + defaultRobustnessVertexInputs, + defaultRobustnessImages, + copySrcLayoutCount, + pCopySrcLayouts, + copyDstLayoutCount, + pCopyDstLayouts, + optimalTilingLayoutUUID, + identicalMemoryTypeRequirements ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan14Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan14Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits ) && + ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ) && ( supportsNonZeroFirstInstance == rhs.supportsNonZeroFirstInstance ) && + ( maxPushDescriptors == rhs.maxPushDescriptors ) && + ( dynamicRenderingLocalReadDepthStencilAttachments == rhs.dynamicRenderingLocalReadDepthStencilAttachments ) && + ( dynamicRenderingLocalReadMultisampledAttachments == rhs.dynamicRenderingLocalReadMultisampledAttachments ) && + ( earlyFragmentMultisampleCoverageAfterSampleCounting == rhs.earlyFragmentMultisampleCoverageAfterSampleCounting ) && + ( earlyFragmentSampleMaskTestBeforeSampleCounting == rhs.earlyFragmentSampleMaskTestBeforeSampleCounting ) && + ( depthStencilSwizzleOneSupport == rhs.depthStencilSwizzleOneSupport ) && ( polygonModePointSize == rhs.polygonModePointSize ) && + ( nonStrictSinglePixelWideLinesUseParallelogram == rhs.nonStrictSinglePixelWideLinesUseParallelogram ) && + ( nonStrictWideLinesUseParallelogram == rhs.nonStrictWideLinesUseParallelogram ) && + ( blockTexelViewCompatibleMultipleLayers == rhs.blockTexelViewCompatibleMultipleLayers ) && + ( maxCombinedImageSamplerDescriptorCount == rhs.maxCombinedImageSamplerDescriptorCount ) && + ( fragmentShadingRateClampCombinerInputs == rhs.fragmentShadingRateClampCombinerInputs ) && + ( defaultRobustnessStorageBuffers == rhs.defaultRobustnessStorageBuffers ) && + ( defaultRobustnessUniformBuffers == rhs.defaultRobustnessUniformBuffers ) && + ( defaultRobustnessVertexInputs == rhs.defaultRobustnessVertexInputs ) && ( defaultRobustnessImages == rhs.defaultRobustnessImages ) && + ( copySrcLayoutCount == rhs.copySrcLayoutCount ) && ( pCopySrcLayouts == rhs.pCopySrcLayouts ) && + ( copyDstLayoutCount == rhs.copyDstLayoutCount ) && ( pCopyDstLayouts == rhs.pCopyDstLayouts ) && + ( optimalTilingLayoutUUID == rhs.optimalTilingLayoutUUID ) && ( identicalMemoryTypeRequirements == rhs.identicalMemoryTypeRequirements ); # endif } - bool operator!=( PhysicalDeviceRayTracingMotionBlurFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkan14Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingMotionBlurFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan14Properties; + void * pNext = {}; + uint32_t lineSubPixelPrecisionBits = {}; + uint32_t maxVertexAttribDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance = {}; + uint32_t maxPushDescriptors = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalReadDepthStencilAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalReadMultisampledAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting = {}; + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport = {}; + VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram = {}; + VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram = {}; + VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers = {}; + uint32_t maxCombinedImageSamplerDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs = {}; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior defaultRobustnessImages = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault; + uint32_t copySrcLayoutCount = {}; + VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts = {}; + uint32_t copyDstLayoutCount = {}; + VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D optimalTilingLayoutUUID = {}; + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceRayTracingMotionBlurFeaturesNV; + using Type = PhysicalDeviceVulkan14Properties; }; - struct PhysicalDeviceRayTracingPipelineFeaturesKHR + struct PhysicalDeviceVulkanMemoryModelFeatures { - using NativeType = VkPhysicalDeviceRayTracingPipelineFeaturesKHR; + using NativeType = VkPhysicalDeviceVulkanMemoryModelFeatures; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rayTracingPipeline( rayTracingPipeline_ ) - , rayTracingPipelineShaderGroupHandleCaptureReplay( rayTracingPipelineShaderGroupHandleCaptureReplay_ ) - , rayTracingPipelineShaderGroupHandleCaptureReplayMixed( rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) - , rayTracingPipelineTraceRaysIndirect( rayTracingPipelineTraceRaysIndirect_ ) - , rayTraversalPrimitiveCulling( rayTraversalPrimitiveCulling_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vulkanMemoryModel{ vulkanMemoryModel_ } + , vulkanMemoryModelDeviceScope{ vulkanMemoryModelDeviceScope_ } + , vulkanMemoryModelAvailabilityVisibilityChains{ vulkanMemoryModelAvailabilityVisibilityChains_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceRayTracingPipelineFeaturesKHR( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceRayTracingPipelineFeaturesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkanMemoryModelFeatures( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVulkanMemoryModelFeatures & operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVulkanMemoryModelFeatures & operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & - setRayTracingPipeline( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ ) VULKAN_HPP_NOEXCEPT - { - rayTracingPipeline = rayTracingPipeline_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & - setRayTracingPipelineShaderGroupHandleCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT - { - rayTracingPipelineShaderGroupHandleCaptureReplay = rayTracingPipelineShaderGroupHandleCaptureReplay_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplayMixed( - VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & + setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT { - rayTracingPipelineShaderGroupHandleCaptureReplayMixed = rayTracingPipelineShaderGroupHandleCaptureReplayMixed_; + vulkanMemoryModel = vulkanMemoryModel_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & - setRayTracingPipelineTraceRaysIndirect( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & + setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT { - rayTracingPipelineTraceRaysIndirect = rayTracingPipelineTraceRaysIndirect_; + vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & - setRayTraversalPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & + setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT { - rayTraversalPrimitiveCulling = rayTraversalPrimitiveCulling_; + vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkanMemoryModelFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -61298,114 +102534,135 @@ namespace VULKAN_HPP_NAMESPACE void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - rayTracingPipeline, - rayTracingPipelineShaderGroupHandleCaptureReplay, - rayTracingPipelineShaderGroupHandleCaptureReplayMixed, - rayTracingPipelineTraceRaysIndirect, - rayTraversalPrimitiveCulling ); + return std::tie( sType, pNext, vulkanMemoryModel, vulkanMemoryModelDeviceScope, vulkanMemoryModelAvailabilityVisibilityChains ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceRayTracingPipelineFeaturesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingPipeline == rhs.rayTracingPipeline ) && - ( rayTracingPipelineShaderGroupHandleCaptureReplay == rhs.rayTracingPipelineShaderGroupHandleCaptureReplay ) && - ( rayTracingPipelineShaderGroupHandleCaptureReplayMixed == rhs.rayTracingPipelineShaderGroupHandleCaptureReplayMixed ) && - ( rayTracingPipelineTraceRaysIndirect == rhs.rayTracingPipelineTraceRaysIndirect ) && - ( rayTraversalPrimitiveCulling == rhs.rayTraversalPrimitiveCulling ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) && + ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) && + ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ); # endif } - bool operator!=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect = {}; - VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceRayTracingPipelineFeaturesKHR; + using Type = PhysicalDeviceVulkanMemoryModelFeatures; }; - struct PhysicalDeviceRayTracingPipelinePropertiesKHR + using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures; + + struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR { - using NativeType = VkPhysicalDeviceRayTracingPipelinePropertiesKHR; + using NativeType = VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR( uint32_t shaderGroupHandleSize_ = {}, - uint32_t maxRayRecursionDepth_ = {}, - uint32_t maxShaderGroupStride_ = {}, - uint32_t shaderGroupBaseAlignment_ = {}, - uint32_t shaderGroupHandleCaptureReplaySize_ = {}, - uint32_t maxRayDispatchInvocationCount_ = {}, - uint32_t shaderGroupHandleAlignment_ = {}, - uint32_t maxRayHitAttributeSize_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderGroupHandleSize( shaderGroupHandleSize_ ) - , maxRayRecursionDepth( maxRayRecursionDepth_ ) - , maxShaderGroupStride( maxShaderGroupStride_ ) - , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) - , shaderGroupHandleCaptureReplaySize( shaderGroupHandleCaptureReplaySize_ ) - , maxRayDispatchInvocationCount( maxRayDispatchInvocationCount_ ) - , shaderGroupHandleAlignment( shaderGroupHandleAlignment_ ) - , maxRayHitAttributeSize( maxRayHitAttributeSize_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , workgroupMemoryExplicitLayout{ workgroupMemoryExplicitLayout_ } + , workgroupMemoryExplicitLayoutScalarBlockLayout{ workgroupMemoryExplicitLayoutScalarBlockLayout_ } + , workgroupMemoryExplicitLayout8BitAccess{ workgroupMemoryExplicitLayout8BitAccess_ } + , workgroupMemoryExplicitLayout16BitAccess{ workgroupMemoryExplicitLayout16BitAccess_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceRayTracingPipelinePropertiesKHR( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceRayTracingPipelinePropertiesKHR( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceRayTracingPipelinePropertiesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + operator=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + operator=( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + setWorkgroupMemoryExplicitLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + workgroupMemoryExplicitLayout = workgroupMemoryExplicitLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + setWorkgroupMemoryExplicitLayoutScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayoutScalarBlockLayout = workgroupMemoryExplicitLayoutScalarBlockLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + setWorkgroupMemoryExplicitLayout8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayout8BitAccess = workgroupMemoryExplicitLayout8BitAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & + setWorkgroupMemoryExplicitLayout16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + workgroupMemoryExplicitLayout16BitAccess = workgroupMemoryExplicitLayout16BitAccess_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -61414,253 +102671,209 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, - shaderGroupHandleSize, - maxRayRecursionDepth, - maxShaderGroupStride, - shaderGroupBaseAlignment, - shaderGroupHandleCaptureReplaySize, - maxRayDispatchInvocationCount, - shaderGroupHandleAlignment, - maxRayHitAttributeSize ); + workgroupMemoryExplicitLayout, + workgroupMemoryExplicitLayoutScalarBlockLayout, + workgroupMemoryExplicitLayout8BitAccess, + workgroupMemoryExplicitLayout16BitAccess ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceRayTracingPipelinePropertiesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & ) const = default; #else - bool operator==( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && - ( maxRayRecursionDepth == rhs.maxRayRecursionDepth ) && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) && - ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize ) && - ( maxRayDispatchInvocationCount == rhs.maxRayDispatchInvocationCount ) && ( shaderGroupHandleAlignment == rhs.shaderGroupHandleAlignment ) && - ( maxRayHitAttributeSize == rhs.maxRayHitAttributeSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( workgroupMemoryExplicitLayout == rhs.workgroupMemoryExplicitLayout ) && + ( workgroupMemoryExplicitLayoutScalarBlockLayout == rhs.workgroupMemoryExplicitLayoutScalarBlockLayout ) && + ( workgroupMemoryExplicitLayout8BitAccess == rhs.workgroupMemoryExplicitLayout8BitAccess ) && + ( workgroupMemoryExplicitLayout16BitAccess == rhs.workgroupMemoryExplicitLayout16BitAccess ); # endif } - bool operator!=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR; - void * pNext = {}; - uint32_t shaderGroupHandleSize = {}; - uint32_t maxRayRecursionDepth = {}; - uint32_t maxShaderGroupStride = {}; - uint32_t shaderGroupBaseAlignment = {}; - uint32_t shaderGroupHandleCaptureReplaySize = {}; - uint32_t maxRayDispatchInvocationCount = {}; - uint32_t shaderGroupHandleAlignment = {}; - uint32_t maxRayHitAttributeSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceRayTracingPipelinePropertiesKHR; + using Type = PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; }; - struct PhysicalDeviceRayTracingPropertiesNV + struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT { - using NativeType = VkPhysicalDeviceRayTracingPropertiesNV; + using NativeType = VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {}, - uint32_t maxRecursionDepth_ = {}, - uint32_t maxShaderGroupStride_ = {}, - uint32_t shaderGroupBaseAlignment_ = {}, - uint64_t maxGeometryCount_ = {}, - uint64_t maxInstanceCount_ = {}, - uint64_t maxTriangleCount_ = {}, - uint32_t maxDescriptorSetAccelerationStructures_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderGroupHandleSize( shaderGroupHandleSize_ ) - , maxRecursionDepth( maxRecursionDepth_ ) - , maxShaderGroupStride( maxShaderGroupStride_ ) - , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) - , maxGeometryCount( maxGeometryCount_ ) - , maxInstanceCount( maxInstanceCount_ ) - , maxTriangleCount( maxTriangleCount_ ) - , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , ycbcr2plane444Formats{ ycbcr2plane444Formats_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceRayTracingPropertiesNV( *reinterpret_cast( &rhs ) ) + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceRayTracingPropertiesNV & operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceRayTracingPropertiesNV & operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceRayTracingPropertiesNV const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & + setYcbcr2plane444Formats( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + ycbcr2plane444Formats = ycbcr2plane444Formats_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - shaderGroupHandleSize, - maxRecursionDepth, - maxShaderGroupStride, - shaderGroupBaseAlignment, - maxGeometryCount, - maxInstanceCount, - maxTriangleCount, - maxDescriptorSetAccelerationStructures ); + return std::tie( sType, pNext, ycbcr2plane444Formats ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const & ) const = default; + auto operator<=>( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) && - ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) && - ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) && ( maxGeometryCount == rhs.maxGeometryCount ) && - ( maxInstanceCount == rhs.maxInstanceCount ) && ( maxTriangleCount == rhs.maxTriangleCount ) && - ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcr2plane444Formats == rhs.ycbcr2plane444Formats ); # endif } - bool operator!=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; - void * pNext = {}; - uint32_t shaderGroupHandleSize = {}; - uint32_t maxRecursionDepth = {}; - uint32_t maxShaderGroupStride = {}; - uint32_t shaderGroupBaseAlignment = {}; - uint64_t maxGeometryCount = {}; - uint64_t maxInstanceCount = {}; - uint64_t maxTriangleCount = {}; - uint32_t maxDescriptorSetAccelerationStructures = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceRayTracingPropertiesNV; + using Type = PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; }; - struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV + struct PhysicalDeviceYcbcrDegammaFeaturesQCOM { - using NativeType = VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV; + using NativeType = VkPhysicalDeviceYcbcrDegammaFeaturesQCOM; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrDegammaFeaturesQCOM; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , representativeFragmentTest( representativeFragmentTest_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrDegammaFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 ycbcrDegamma_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , ycbcrDegamma{ ycbcrDegamma_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceRepresentativeFragmentTestFeaturesNV( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrDegammaFeaturesQCOM( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceRepresentativeFragmentTestFeaturesNV( *reinterpret_cast( &rhs ) ) + PhysicalDeviceYcbcrDegammaFeaturesQCOM( VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceYcbcrDegammaFeaturesQCOM( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceRepresentativeFragmentTestFeaturesNV & - operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceYcbcrDegammaFeaturesQCOM & operator=( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceYcbcrDegammaFeaturesQCOM & operator=( VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrDegammaFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & - setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrDegammaFeaturesQCOM & setYcbcrDegamma( VULKAN_HPP_NAMESPACE::Bool32 ycbcrDegamma_ ) VULKAN_HPP_NOEXCEPT { - representativeFragmentTest = representativeFragmentTest_; + ycbcrDegamma = ycbcrDegamma_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceYcbcrDegammaFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceYcbcrDegammaFeaturesQCOM &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -61671,295 +102884,301 @@ namespace VULKAN_HPP_NAMESPACE # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, representativeFragmentTest ); + return std::tie( sType, pNext, ycbcrDegamma ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & ) const = default; + auto operator<=>( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & ) const = default; #else - bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( representativeFragmentTest == rhs.representativeFragmentTest ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcrDegamma == rhs.ycbcrDegamma ); # endif } - bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceYcbcrDegammaFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrDegammaFeaturesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 ycbcrDegamma = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV; + using Type = PhysicalDeviceYcbcrDegammaFeaturesQCOM; }; - struct PhysicalDeviceRobustness2FeaturesEXT + struct PhysicalDeviceYcbcrImageArraysFeaturesEXT { - using NativeType = VkPhysicalDeviceRobustness2FeaturesEXT; + using NativeType = VkPhysicalDeviceYcbcrImageArraysFeaturesEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , robustBufferAccess2( robustBufferAccess2_ ) - , robustImageAccess2( robustImageAccess2_ ) - , nullDescriptor( nullDescriptor_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , ycbcrImageArrays{ ycbcrImageArrays_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceRobustness2FeaturesEXT( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceRobustness2FeaturesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceYcbcrImageArraysFeaturesEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceRobustness2FeaturesEXT & operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceRobustness2FeaturesEXT & operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & - setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT - { - robustBufferAccess2 = robustBufferAccess2_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT - { - robustImageAccess2 = robustImageAccess2_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & + setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT { - nullDescriptor = nullDescriptor_; + ycbcrImageArrays = ycbcrImageArrays_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceRobustness2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, robustBufferAccess2, robustImageAccess2, nullDescriptor ); + return std::tie( sType, pNext, ycbcrImageArrays ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceRobustness2FeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & ) const = default; #else - bool operator==( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustBufferAccess2 == rhs.robustBufferAccess2 ) && - ( robustImageAccess2 == rhs.robustImageAccess2 ) && ( nullDescriptor == rhs.nullDescriptor ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcrImageArrays == rhs.ycbcrImageArrays ); # endif } - bool operator!=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2 = {}; - VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {}; - VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceRobustness2FeaturesEXT; + using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT; }; - struct PhysicalDeviceRobustness2PropertiesEXT + struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures { - using NativeType = VkPhysicalDeviceRobustness2PropertiesEXT; + using NativeType = VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , robustStorageBufferAccessSizeAlignment( robustStorageBufferAccessSizeAlignment_ ) - , robustUniformBufferAccessSizeAlignment( robustUniformBufferAccessSizeAlignment_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderZeroInitializeWorkgroupMemory{ shaderZeroInitializeWorkgroupMemory_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceRobustness2PropertiesEXT( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceRobustness2PropertiesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceRobustness2PropertiesEXT & operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & + operator=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceRobustness2PropertiesEXT & operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & operator=( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceRobustness2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & + setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT + { + shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, robustStorageBufferAccessSizeAlignment, robustUniformBufferAccessSizeAlignment ); + return std::tie( sType, pNext, shaderZeroInitializeWorkgroupMemory ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceRobustness2PropertiesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment ) && - ( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory ); # endif } - bool operator!=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceRobustness2PropertiesEXT; + using Type = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; }; - struct PhysicalDeviceSampleLocationsPropertiesEXT + using PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + + struct PipelineBinaryKeyKHR { - using NativeType = VkPhysicalDeviceSampleLocationsPropertiesEXT; + using NativeType = VkPipelineBinaryKeyKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryKeyKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, - std::array const & sampleLocationCoordinateRange_ = {}, - uint32_t sampleLocationSubPixelBits_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , sampleLocationSampleCounts( sampleLocationSampleCounts_ ) - , maxSampleLocationGridSize( maxSampleLocationGridSize_ ) - , sampleLocationCoordinateRange( sampleLocationCoordinateRange_ ) - , sampleLocationSubPixelBits( sampleLocationSubPixelBits_ ) - , variableSampleLocations( variableSampleLocations_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR( uint32_t keySize_ = {}, + std::array const & key_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , keySize{ keySize_ } + , key{ key_ } { } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR( PipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceSampleLocationsPropertiesEXT( *reinterpret_cast( &rhs ) ) + PipelineBinaryKeyKHR( VkPipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryKeyKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineBinaryKeyKHR & operator=( PipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineBinaryKeyKHR & operator=( VkPipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR & setKeySize( uint32_t keySize_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + keySize = keySize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR & setKey( std::array key_ ) VULKAN_HPP_NOEXCEPT + { + key = key_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineBinaryKeyKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryKeyKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -61968,844 +103187,930 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple const &, uint32_t const &, - VULKAN_HPP_NAMESPACE::Bool32 const &> + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - sampleLocationSampleCounts, - maxSampleLocationGridSize, - sampleLocationCoordinateRange, - sampleLocationSubPixelBits, - variableSampleLocations ); + return std::tie( sType, pNext, keySize, key ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const & ) const = default; + auto operator<=>( PipelineBinaryKeyKHR const & ) const = default; #else - bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineBinaryKeyKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) && - ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) && ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange ) && - ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) && ( variableSampleLocations == rhs.variableSampleLocations ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( keySize == rhs.keySize ) && ( key == rhs.key ); # endif } - bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineBinaryKeyKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D sampleLocationCoordinateRange = {}; - uint32_t sampleLocationSubPixelBits = {}; - VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryKeyKHR; + void * pNext = {}; + uint32_t keySize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D key = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceSampleLocationsPropertiesEXT; + using Type = PipelineBinaryKeyKHR; }; - struct PhysicalDeviceSamplerFilterMinmaxProperties + struct PipelineBinaryDataKHR { - using NativeType = VkPhysicalDeviceSamplerFilterMinmaxProperties; + using NativeType = VkPipelineBinaryDataKHR; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineBinaryDataKHR( size_t dataSize_ = {}, void * pData_ = {} ) VULKAN_HPP_NOEXCEPT + : dataSize{ dataSize_ } + , pData{ pData_ } + { + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) - , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) + VULKAN_HPP_CONSTEXPR PipelineBinaryDataKHR( PipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineBinaryDataKHR( VkPipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryDataKHR( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineBinaryDataKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) : dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceSamplerFilterMinmaxProperties( *reinterpret_cast( &rhs ) ) + PipelineBinaryDataKHR & operator=( PipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineBinaryDataKHR & operator=( VkPipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceSamplerFilterMinmaxProperties & operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataKHR & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } - PhysicalDeviceSamplerFilterMinmaxProperties & operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataKHR & setPData( void * pData_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + pData = pData_; return *this; } - operator VkPhysicalDeviceSamplerFilterMinmaxProperties const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineBinaryDataKHR & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dataSize = data_.size() * sizeof( T ); + pData = data_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT + operator VkPipelineBinaryDataKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, filterMinmaxSingleComponentFormats, filterMinmaxImageComponentMapping ); + return std::tie( dataSize, pData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const & ) const = default; + auto operator<=>( PipelineBinaryDataKHR const & ) const = default; #else - bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineBinaryDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) && - ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ); + return ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); # endif } - bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineBinaryDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; + size_t dataSize = {}; + void * pData = {}; }; - template <> - struct CppType + struct PipelineBinaryKeysAndDataKHR { - using Type = PhysicalDeviceSamplerFilterMinmaxProperties; - }; - using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties; + using NativeType = VkPipelineBinaryKeysAndDataKHR; - struct PhysicalDeviceSamplerYcbcrConversionFeatures - { - using NativeType = VkPhysicalDeviceSamplerYcbcrConversionFeatures; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR( uint32_t binaryCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKeys_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR * pPipelineBinaryData_ = {} ) VULKAN_HPP_NOEXCEPT + : binaryCount{ binaryCount_ } + , pPipelineBinaryKeys{ pPipelineBinaryKeys_ } + , pPipelineBinaryData{ pPipelineBinaryData_ } + { + } - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR( PipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , samplerYcbcrConversion( samplerYcbcrConversion_ ) + PipelineBinaryKeysAndDataKHR( VkPipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryKeysAndDataKHR( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryKeysAndDataKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaryKeys_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaryData_ = {} ) + : binaryCount( static_cast( pipelineBinaryKeys_.size() ) ) + , pPipelineBinaryKeys( pipelineBinaryKeys_.data() ) + , pPipelineBinaryData( pipelineBinaryData_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( pipelineBinaryKeys_.size() == pipelineBinaryData_.size() ); +# else + if ( pipelineBinaryKeys_.size() != pipelineBinaryData_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::PipelineBinaryKeysAndDataKHR::PipelineBinaryKeysAndDataKHR: pipelineBinaryKeys_.size() != pipelineBinaryData_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceSamplerYcbcrConversionFeatures( *reinterpret_cast( &rhs ) ) + PipelineBinaryKeysAndDataKHR & operator=( PipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineBinaryKeysAndDataKHR & operator=( VkPipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR & setBinaryCount( uint32_t binaryCount_ ) VULKAN_HPP_NOEXCEPT + { + binaryCount = binaryCount_; + return *this; + } - PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR & + setPPipelineBinaryKeys( const VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKeys_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + pPipelineBinaryKeys = pPipelineBinaryKeys_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryKeysAndDataKHR & setPipelineBinaryKeys( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaryKeys_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + binaryCount = static_cast( pipelineBinaryKeys_.size() ); + pPipelineBinaryKeys = pipelineBinaryKeys_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & - setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR & + setPPipelineBinaryData( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR * pPipelineBinaryData_ ) VULKAN_HPP_NOEXCEPT { - samplerYcbcrConversion = samplerYcbcrConversion_; + pPipelineBinaryData = pPipelineBinaryData_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryKeysAndDataKHR & setPipelineBinaryData( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaryData_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + binaryCount = static_cast( pipelineBinaryData_.size() ); + pPipelineBinaryData = pipelineBinaryData_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT + operator VkPipelineBinaryKeysAndDataKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryKeysAndDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, samplerYcbcrConversion ); + return std::tie( binaryCount, pPipelineBinaryKeys, pPipelineBinaryData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const & ) const = default; + auto operator<=>( PipelineBinaryKeysAndDataKHR const & ) const = default; #else - bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineBinaryKeysAndDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ); + return ( binaryCount == rhs.binaryCount ) && ( pPipelineBinaryKeys == rhs.pPipelineBinaryKeys ) && ( pPipelineBinaryData == rhs.pPipelineBinaryData ); # endif } - bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineBinaryKeysAndDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceSamplerYcbcrConversionFeatures; + uint32_t binaryCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKeys = {}; + const VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR * pPipelineBinaryData = {}; }; - using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; - struct PhysicalDeviceScalarBlockLayoutFeatures + struct PipelineCreateInfoKHR { - using NativeType = VkPhysicalDeviceScalarBlockLayoutFeatures; + using NativeType = VkPipelineCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreateInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , scalarBlockLayout( scalarBlockLayout_ ) - { - } +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreateInfoKHR( void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} - VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineCreateInfoKHR( PipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceScalarBlockLayoutFeatures( *reinterpret_cast( &rhs ) ) + PipelineCreateInfoKHR( VkPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceScalarBlockLayoutFeatures & operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineCreateInfoKHR & operator=( PipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceScalarBlockLayoutFeatures & operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCreateInfoKHR & operator=( VkPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCreateInfoKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & - setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT - { - scalarBlockLayout = scalarBlockLayout_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT + operator VkPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, scalarBlockLayout ); + return std::tie( sType, pNext ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const & ) const = default; + auto operator<=>( PipelineCreateInfoKHR const & ) const = default; #else - bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( scalarBlockLayout == rhs.scalarBlockLayout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); # endif } - bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreateInfoKHR; + void * pNext = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceScalarBlockLayoutFeatures; + using Type = PipelineCreateInfoKHR; }; - using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures; - struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures + struct PipelineBinaryCreateInfoKHR { - using NativeType = VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures; + using NativeType = VkPipelineBinaryCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryCreateInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , separateDepthStencilLayouts( separateDepthStencilLayouts_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR * pKeysAndDataInfo_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pKeysAndDataInfo{ pKeysAndDataInfo_ } + , pipeline{ pipeline_ } + , pPipelineCreateInfo{ pPipelineCreateInfo_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceSeparateDepthStencilLayoutsFeatures( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR( PipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceSeparateDepthStencilLayoutsFeatures( *reinterpret_cast( &rhs ) ) + PipelineBinaryCreateInfoKHR( VkPipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceSeparateDepthStencilLayoutsFeatures & - operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineBinaryCreateInfoKHR & operator=( PipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineBinaryCreateInfoKHR & operator=( VkPipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & - setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & + setPKeysAndDataInfo( const VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR * pKeysAndDataInfo_ ) VULKAN_HPP_NOEXCEPT { - separateDepthStencilLayouts = separateDepthStencilLayouts_; + pKeysAndDataInfo = pKeysAndDataInfo_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pipeline = pipeline_; + return *this; } - operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & + setPPipelineCreateInfo( const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pPipelineCreateInfo = pPipelineCreateInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineBinaryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, separateDepthStencilLayouts ); + return std::tie( sType, pNext, pKeysAndDataInfo, pipeline, pPipelineCreateInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & ) const = default; + auto operator<=>( PipelineBinaryCreateInfoKHR const & ) const = default; #else - bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineBinaryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pKeysAndDataInfo == rhs.pKeysAndDataInfo ) && ( pipeline == rhs.pipeline ) && + ( pPipelineCreateInfo == rhs.pPipelineCreateInfo ); # endif } - bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineBinaryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryCreateInfoKHR; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR * pKeysAndDataInfo = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + using Type = PipelineBinaryCreateInfoKHR; }; - using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures; - struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT + struct PipelineBinaryDataInfoKHR { - using NativeType = VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT; + using NativeType = VkPipelineBinaryDataInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryDataInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderBufferFloat16Atomics( shaderBufferFloat16Atomics_ ) - , shaderBufferFloat16AtomicAdd( shaderBufferFloat16AtomicAdd_ ) - , shaderBufferFloat16AtomicMinMax( shaderBufferFloat16AtomicMinMax_ ) - , shaderBufferFloat32AtomicMinMax( shaderBufferFloat32AtomicMinMax_ ) - , shaderBufferFloat64AtomicMinMax( shaderBufferFloat64AtomicMinMax_ ) - , shaderSharedFloat16Atomics( shaderSharedFloat16Atomics_ ) - , shaderSharedFloat16AtomicAdd( shaderSharedFloat16AtomicAdd_ ) - , shaderSharedFloat16AtomicMinMax( shaderSharedFloat16AtomicMinMax_ ) - , shaderSharedFloat32AtomicMinMax( shaderSharedFloat32AtomicMinMax_ ) - , shaderSharedFloat64AtomicMinMax( shaderSharedFloat64AtomicMinMax_ ) - , shaderImageFloat32AtomicMinMax( shaderImageFloat32AtomicMinMax_ ) - , sparseImageFloat32AtomicMinMax( sparseImageFloat32AtomicMinMax_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineBinaryDataInfoKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBinary{ pipelineBinary_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat2FeaturesEXT( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineBinaryDataInfoKHR( PipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderAtomicFloat2FeaturesEXT( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderAtomicFloat2FeaturesEXT( *reinterpret_cast( &rhs ) ) + PipelineBinaryDataInfoKHR( VkPipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryDataInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineBinaryDataInfoKHR & operator=( PipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceShaderAtomicFloat2FeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineBinaryDataInfoKHR & operator=( VkPipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataInfoKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & - setShaderBufferFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataInfoKHR & setPipelineBinary( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary_ ) VULKAN_HPP_NOEXCEPT { - shaderBufferFloat16Atomics = shaderBufferFloat16Atomics_; + pipelineBinary = pipelineBinary_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & - setShaderBufferFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + operator VkPipelineBinaryDataInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - shaderBufferFloat16AtomicAdd = shaderBufferFloat16AtomicAdd_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & - setShaderBufferFloat16AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + operator VkPipelineBinaryDataInfoKHR &() VULKAN_HPP_NOEXCEPT { - shaderBufferFloat16AtomicMinMax = shaderBufferFloat16AtomicMinMax_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & - setShaderBufferFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - shaderBufferFloat32AtomicMinMax = shaderBufferFloat32AtomicMinMax_; - return *this; + return std::tie( sType, pNext, pipelineBinary ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & - setShaderBufferFloat64AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineBinaryDataInfoKHR const & ) const = default; +#else + bool operator==( PipelineBinaryDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderBufferFloat64AtomicMinMax = shaderBufferFloat64AtomicMinMax_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinary == rhs.pipelineBinary ); +# endif } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & - setShaderSharedFloat16Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineBinaryDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderSharedFloat16Atomics = shaderSharedFloat16Atomics_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & - setShaderSharedFloat16AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryDataInfoKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary = {}; + }; + + template <> + struct CppType + { + using Type = PipelineBinaryDataInfoKHR; + }; + + struct PipelineBinaryHandlesInfoKHR + { + using NativeType = VkPipelineBinaryHandlesInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryHandlesInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineBinaryHandlesInfoKHR( uint32_t pipelineBinaryCount_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBinaryCount{ pipelineBinaryCount_ } + , pPipelineBinaries{ pPipelineBinaries_ } { - shaderSharedFloat16AtomicAdd = shaderSharedFloat16AtomicAdd_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & - setShaderSharedFloat16AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineBinaryHandlesInfoKHR( PipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineBinaryHandlesInfoKHR( VkPipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryHandlesInfoKHR( *reinterpret_cast( &rhs ) ) { - shaderSharedFloat16AtomicMinMax = shaderSharedFloat16AtomicMinMax_; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryHandlesInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaries_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), pipelineBinaryCount( static_cast( pipelineBinaries_.size() ) ), pPipelineBinaries( pipelineBinaries_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineBinaryHandlesInfoKHR & operator=( PipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineBinaryHandlesInfoKHR & operator=( VkPipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & - setShaderSharedFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryHandlesInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - shaderSharedFloat32AtomicMinMax = shaderSharedFloat32AtomicMinMax_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & - setShaderSharedFloat64AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryHandlesInfoKHR & setPipelineBinaryCount( uint32_t pipelineBinaryCount_ ) VULKAN_HPP_NOEXCEPT { - shaderSharedFloat64AtomicMinMax = shaderSharedFloat64AtomicMinMax_; + pipelineBinaryCount = pipelineBinaryCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & - setShaderImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryHandlesInfoKHR & + setPPipelineBinaries( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries_ ) VULKAN_HPP_NOEXCEPT { - shaderImageFloat32AtomicMinMax = shaderImageFloat32AtomicMinMax_; + pPipelineBinaries = pPipelineBinaries_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & - setSparseImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryHandlesInfoKHR & setPipelineBinaries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaries_ ) VULKAN_HPP_NOEXCEPT { - sparseImageFloat32AtomicMinMax = sparseImageFloat32AtomicMinMax_; + pipelineBinaryCount = static_cast( pipelineBinaries_.size() ); + pPipelineBinaries = pipelineBinaries_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineBinaryHandlesInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPipelineBinaryHandlesInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - shaderBufferFloat16Atomics, - shaderBufferFloat16AtomicAdd, - shaderBufferFloat16AtomicMinMax, - shaderBufferFloat32AtomicMinMax, - shaderBufferFloat64AtomicMinMax, - shaderSharedFloat16Atomics, - shaderSharedFloat16AtomicAdd, - shaderSharedFloat16AtomicMinMax, - shaderSharedFloat32AtomicMinMax, - shaderSharedFloat64AtomicMinMax, - shaderImageFloat32AtomicMinMax, - sparseImageFloat32AtomicMinMax ); + return std::tie( sType, pNext, pipelineBinaryCount, pPipelineBinaries ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & ) const = default; + auto operator<=>( PipelineBinaryHandlesInfoKHR const & ) const = default; #else - bool operator==( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineBinaryHandlesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferFloat16Atomics == rhs.shaderBufferFloat16Atomics ) && - ( shaderBufferFloat16AtomicAdd == rhs.shaderBufferFloat16AtomicAdd ) && - ( shaderBufferFloat16AtomicMinMax == rhs.shaderBufferFloat16AtomicMinMax ) && - ( shaderBufferFloat32AtomicMinMax == rhs.shaderBufferFloat32AtomicMinMax ) && - ( shaderBufferFloat64AtomicMinMax == rhs.shaderBufferFloat64AtomicMinMax ) && ( shaderSharedFloat16Atomics == rhs.shaderSharedFloat16Atomics ) && - ( shaderSharedFloat16AtomicAdd == rhs.shaderSharedFloat16AtomicAdd ) && - ( shaderSharedFloat16AtomicMinMax == rhs.shaderSharedFloat16AtomicMinMax ) && - ( shaderSharedFloat32AtomicMinMax == rhs.shaderSharedFloat32AtomicMinMax ) && - ( shaderSharedFloat64AtomicMinMax == rhs.shaderSharedFloat64AtomicMinMax ) && - ( shaderImageFloat32AtomicMinMax == rhs.shaderImageFloat32AtomicMinMax ) && - ( sparseImageFloat32AtomicMinMax == rhs.sparseImageFloat32AtomicMinMax ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinaryCount == rhs.pipelineBinaryCount ) && + ( pPipelineBinaries == rhs.pPipelineBinaries ); # endif } - bool operator!=( PhysicalDeviceShaderAtomicFloat2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineBinaryHandlesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicAdd = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat16AtomicMinMax = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicMinMax = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicMinMax = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicAdd = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat16AtomicMinMax = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicMinMax = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicMinMax = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryHandlesInfoKHR; + const void * pNext = {}; + uint32_t pipelineBinaryCount = {}; + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderAtomicFloat2FeaturesEXT; + using Type = PipelineBinaryHandlesInfoKHR; }; - struct PhysicalDeviceShaderAtomicFloatFeaturesEXT + struct PipelineBinaryInfoKHR { - using NativeType = VkPhysicalDeviceShaderAtomicFloatFeaturesEXT; + using NativeType = VkPipelineBinaryInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderBufferFloat32Atomics( shaderBufferFloat32Atomics_ ) - , shaderBufferFloat32AtomicAdd( shaderBufferFloat32AtomicAdd_ ) - , shaderBufferFloat64Atomics( shaderBufferFloat64Atomics_ ) - , shaderBufferFloat64AtomicAdd( shaderBufferFloat64AtomicAdd_ ) - , shaderSharedFloat32Atomics( shaderSharedFloat32Atomics_ ) - , shaderSharedFloat32AtomicAdd( shaderSharedFloat32AtomicAdd_ ) - , shaderSharedFloat64Atomics( shaderSharedFloat64Atomics_ ) - , shaderSharedFloat64AtomicAdd( shaderSharedFloat64AtomicAdd_ ) - , shaderImageFloat32Atomics( shaderImageFloat32Atomics_ ) - , shaderImageFloat32AtomicAdd( shaderImageFloat32AtomicAdd_ ) - , sparseImageFloat32Atomics( sparseImageFloat32Atomics_ ) - , sparseImageFloat32AtomicAdd( sparseImageFloat32AtomicAdd_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineBinaryInfoKHR( uint32_t binaryCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , binaryCount{ binaryCount_ } + , pPipelineBinaries{ pPipelineBinaries_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineBinaryInfoKHR( PipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderAtomicFloatFeaturesEXT( *reinterpret_cast( &rhs ) ) + PipelineBinaryInfoKHR( VkPipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaries_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), binaryCount( static_cast( pipelineBinaries_.size() ) ), pPipelineBinaries( pipelineBinaries_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineBinaryInfoKHR & operator=( PipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineBinaryInfoKHR & operator=( VkPipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & - setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryInfoKHR & setBinaryCount( uint32_t binaryCount_ ) VULKAN_HPP_NOEXCEPT { - shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_; + binaryCount = binaryCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & - setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryInfoKHR & + setPPipelineBinaries( const VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries_ ) VULKAN_HPP_NOEXCEPT { - shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_; + pPipelineBinaries = pPipelineBinaries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryInfoKHR & setPipelineBinaries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaries_ ) VULKAN_HPP_NOEXCEPT + { + binaryCount = static_cast( pipelineBinaries_.size() ); + pPipelineBinaries = pipelineBinaries_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineBinaryInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, binaryCount, pPipelineBinaries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineBinaryInfoKHR const & ) const = default; +#else + bool operator==( PipelineBinaryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( binaryCount == rhs.binaryCount ) && ( pPipelineBinaries == rhs.pPipelineBinaries ); +# endif + } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & - setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineBinaryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & - setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryInfoKHR; + const void * pNext = {}; + uint32_t binaryCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries = {}; + }; + + template <> + struct CppType + { + using Type = PipelineBinaryInfoKHR; + }; + + struct PipelineCacheCreateInfo + { + using NativeType = VkPipelineCacheCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, + size_t initialDataSize_ = {}, + const void * pInitialData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , initialDataSize{ initialDataSize_ } + , pInitialData{ pInitialData_ } { - shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & - setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCacheCreateInfo( *reinterpret_cast( &rhs ) ) { - shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & - setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() ) { - shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; - return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & - setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT + PipelineCacheCreateInfo & operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & - setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & - setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - shaderImageFloat32Atomics = shaderImageFloat32Atomics_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & - setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT { - shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_; + initialDataSize = initialDataSize_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & - setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT { - sparseImageFloat32Atomics = sparseImageFloat32Atomics_; + pInitialData = pInitialData_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & - setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineCacheCreateInfo & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) VULKAN_HPP_NOEXCEPT { - sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_; + initialDataSize = initialData_.size() * sizeof( T ); + pInitialData = initialData_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCacheCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -62813,2591 +104118,2664 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + const void * const &, + VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags const &, + size_t const &, + const void * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - shaderBufferFloat32Atomics, - shaderBufferFloat32AtomicAdd, - shaderBufferFloat64Atomics, - shaderBufferFloat64AtomicAdd, - shaderSharedFloat32Atomics, - shaderSharedFloat32AtomicAdd, - shaderSharedFloat64Atomics, - shaderSharedFloat64AtomicAdd, - shaderImageFloat32Atomics, - shaderImageFloat32AtomicAdd, - sparseImageFloat32Atomics, - sparseImageFloat32AtomicAdd ); + return std::tie( sType, pNext, flags, initialDataSize, pInitialData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & ) const = default; + auto operator<=>( PipelineCacheCreateInfo const & ) const = default; #else - bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics ) && - ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd ) && ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics ) && - ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd ) && ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics ) && - ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd ) && ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics ) && - ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd ) && ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics ) && - ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd ) && ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics ) && - ( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( initialDataSize == rhs.initialDataSize ) && + ( pInitialData == rhs.pInitialData ); # endif } - bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {}; + size_t initialDataSize = {}; + const void * pInitialData = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT; + using Type = PipelineCacheCreateInfo; }; - struct PhysicalDeviceShaderAtomicInt64Features + struct PipelineCacheHeaderVersionOne { - using NativeType = VkPhysicalDeviceShaderAtomicInt64Features; + using NativeType = VkPipelineCacheHeaderVersionOne; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + PipelineCacheHeaderVersionOne( uint32_t headerSize_ = {}, + VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne, + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + std::array const & pipelineCacheUUID_ = {} ) VULKAN_HPP_NOEXCEPT + : headerSize{ headerSize_ } + , headerVersion{ headerVersion_ } + , vendorID{ vendorID_ } + , deviceID{ deviceID_ } + , pipelineCacheUUID{ pipelineCacheUUID_ } + { + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) - , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCacheHeaderVersionOne( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCacheHeaderVersionOne( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineCacheHeaderVersionOne & operator=( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderAtomicInt64Features( *reinterpret_cast( &rhs ) ) + PipelineCacheHeaderVersionOne & operator=( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT + { + headerSize = headerSize_; + return *this; + } - PhysicalDeviceShaderAtomicInt64Features & operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & + setHeaderVersion( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + headerVersion = headerVersion_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + vendorID = vendorID_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & - setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT { - shaderBufferInt64Atomics = shaderBufferInt64Atomics_; + deviceID = deviceID_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & - setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setPipelineCacheUUID( std::array pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT { - shaderSharedInt64Atomics = shaderSharedInt64Atomics_; + pipelineCacheUUID = pipelineCacheUUID_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCacheHeaderVersionOne const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT + operator VkPipelineCacheHeaderVersionOne &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderBufferInt64Atomics, shaderSharedInt64Atomics ); + return std::tie( headerSize, headerVersion, vendorID, deviceID, pipelineCacheUUID ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const & ) const = default; + auto operator<=>( PipelineCacheHeaderVersionOne const & ) const = default; #else - bool operator==( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && - ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ); + return ( headerSize == rhs.headerSize ) && ( headerVersion == rhs.headerVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && + ( pipelineCacheUUID == rhs.pipelineCacheUUID ); # endif } - bool operator!=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceShaderAtomicInt64Features; + uint32_t headerSize = {}; + VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; }; - using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features; - struct PhysicalDeviceShaderClockFeaturesKHR + struct PipelineColorBlendAdvancedStateCreateInfoEXT { - using NativeType = VkPhysicalDeviceShaderClockFeaturesKHR; + using NativeType = VkPipelineColorBlendAdvancedStateCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderSubgroupClock( shaderSubgroupClock_ ) - , shaderDeviceClock( shaderDeviceClock_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineColorBlendAdvancedStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcPremultiplied{ srcPremultiplied_ } + , dstPremultiplied{ dstPremultiplied_ } + , blendOverlap{ blendOverlap_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderClockFeaturesKHR( *reinterpret_cast( &rhs ) ) + PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorBlendAdvancedStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceShaderClockFeaturesKHR & operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & - setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & + setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT { - shaderSubgroupClock = shaderSubgroupClock_; + srcPremultiplied = srcPremultiplied_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & + setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT { - shaderDeviceClock = shaderDeviceClock_; + dstPremultiplied = dstPremultiplied_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceShaderClockFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & + setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + blendOverlap = blendOverlap_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderSubgroupClock, shaderDeviceClock ); + return std::tie( sType, pNext, srcPremultiplied, dstPremultiplied, blendOverlap ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const & ) const = default; + auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupClock == rhs.shaderSubgroupClock ) && - ( shaderDeviceClock == rhs.shaderDeviceClock ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcPremultiplied == rhs.srcPremultiplied ) && ( dstPremultiplied == rhs.dstPremultiplied ) && + ( blendOverlap == rhs.blendOverlap ); # endif } - bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {}; + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {}; + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderClockFeaturesKHR; + using Type = PipelineColorBlendAdvancedStateCreateInfoEXT; }; - struct PhysicalDeviceShaderCoreProperties2AMD + struct PipelineColorWriteCreateInfoEXT { - using NativeType = VkPhysicalDeviceShaderCoreProperties2AMD; + using NativeType = VkPipelineColorWriteCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorWriteCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, - uint32_t activeComputeUnitCount_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderCoreFeatures( shaderCoreFeatures_ ) - , activeComputeUnitCount( activeComputeUnitCount_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , attachmentCount{ attachmentCount_ } + , pColorWriteEnables{ pColorWriteEnables_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderCoreProperties2AMD( *reinterpret_cast( &rhs ) ) + PipelineColorWriteCreateInfoEXT( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineColorWriteCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorWriteCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorWriteEnables_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), attachmentCount( static_cast( colorWriteEnables_.size() ) ), pColorWriteEnables( colorWriteEnables_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceShaderCoreProperties2AMD & operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineColorWriteCreateInfoEXT & operator=( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineColorWriteCreateInfoEXT & operator=( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceShaderCoreProperties2AMD const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + attachmentCount = attachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & + setPColorWriteEnables( const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ ) VULKAN_HPP_NOEXCEPT + { + pColorWriteEnables = pColorWriteEnables_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineColorWriteCreateInfoEXT & + setColorWriteEnables( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorWriteEnables_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = static_cast( colorWriteEnables_.size() ); + pColorWriteEnables = colorWriteEnables_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineColorWriteCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorWriteCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderCoreFeatures, activeComputeUnitCount ); + return std::tie( sType, pNext, attachmentCount, pColorWriteEnables ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const & ) const = default; + auto operator<=>( PipelineColorWriteCreateInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreFeatures == rhs.shaderCoreFeatures ) && - ( activeComputeUnitCount == rhs.activeComputeUnitCount ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && ( pColorWriteEnables == rhs.pColorWriteEnables ); # endif } - bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {}; - uint32_t activeComputeUnitCount = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorWriteCreateInfoEXT; + const void * pNext = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderCoreProperties2AMD; + using Type = PipelineColorWriteCreateInfoEXT; }; - struct PhysicalDeviceShaderCorePropertiesAMD + struct PipelineCompilerControlCreateInfoAMD { - using NativeType = VkPhysicalDeviceShaderCorePropertiesAMD; + using NativeType = VkPipelineCompilerControlCreateInfoAMD; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCompilerControlCreateInfoAMD; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = {}, - uint32_t shaderArraysPerEngineCount_ = {}, - uint32_t computeUnitsPerShaderArray_ = {}, - uint32_t simdPerComputeUnit_ = {}, - uint32_t wavefrontsPerSimd_ = {}, - uint32_t wavefrontSize_ = {}, - uint32_t sgprsPerSimd_ = {}, - uint32_t minSgprAllocation_ = {}, - uint32_t maxSgprAllocation_ = {}, - uint32_t sgprAllocationGranularity_ = {}, - uint32_t vgprsPerSimd_ = {}, - uint32_t minVgprAllocation_ = {}, - uint32_t maxVgprAllocation_ = {}, - uint32_t vgprAllocationGranularity_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderEngineCount( shaderEngineCount_ ) - , shaderArraysPerEngineCount( shaderArraysPerEngineCount_ ) - , computeUnitsPerShaderArray( computeUnitsPerShaderArray_ ) - , simdPerComputeUnit( simdPerComputeUnit_ ) - , wavefrontsPerSimd( wavefrontsPerSimd_ ) - , wavefrontSize( wavefrontSize_ ) - , sgprsPerSimd( sgprsPerSimd_ ) - , minSgprAllocation( minSgprAllocation_ ) - , maxSgprAllocation( maxSgprAllocation_ ) - , sgprAllocationGranularity( sgprAllocationGranularity_ ) - , vgprsPerSimd( vgprsPerSimd_ ) - , minVgprAllocation( minVgprAllocation_ ) - , maxVgprAllocation( maxVgprAllocation_ ) - , vgprAllocationGranularity( vgprAllocationGranularity_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , compilerControlFlags{ compilerControlFlags_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderCorePropertiesAMD( *reinterpret_cast( &rhs ) ) + PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCompilerControlCreateInfoAMD( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineCompilerControlCreateInfoAMD & operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceShaderCorePropertiesAMD & operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCompilerControlCreateInfoAMD & operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceShaderCorePropertiesAMD const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & + setCompilerControlFlags( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + compilerControlFlags = compilerControlFlags_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCompilerControlCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - shaderEngineCount, - shaderArraysPerEngineCount, - computeUnitsPerShaderArray, - simdPerComputeUnit, - wavefrontsPerSimd, - wavefrontSize, - sgprsPerSimd, - minSgprAllocation, - maxSgprAllocation, - sgprAllocationGranularity, - vgprsPerSimd, - minVgprAllocation, - maxVgprAllocation, - vgprAllocationGranularity ); + return *reinterpret_cast( this ); + } + + operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, compilerControlFlags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const & ) const = default; + auto operator<=>( PipelineCompilerControlCreateInfoAMD const & ) const = default; #else - bool operator==( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEngineCount == rhs.shaderEngineCount ) && - ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) && - ( simdPerComputeUnit == rhs.simdPerComputeUnit ) && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) && ( wavefrontSize == rhs.wavefrontSize ) && - ( sgprsPerSimd == rhs.sgprsPerSimd ) && ( minSgprAllocation == rhs.minSgprAllocation ) && ( maxSgprAllocation == rhs.maxSgprAllocation ) && - ( sgprAllocationGranularity == rhs.sgprAllocationGranularity ) && ( vgprsPerSimd == rhs.vgprsPerSimd ) && - ( minVgprAllocation == rhs.minVgprAllocation ) && ( maxVgprAllocation == rhs.maxVgprAllocation ) && - ( vgprAllocationGranularity == rhs.vgprAllocationGranularity ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compilerControlFlags == rhs.compilerControlFlags ); # endif } - bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; - void * pNext = {}; - uint32_t shaderEngineCount = {}; - uint32_t shaderArraysPerEngineCount = {}; - uint32_t computeUnitsPerShaderArray = {}; - uint32_t simdPerComputeUnit = {}; - uint32_t wavefrontsPerSimd = {}; - uint32_t wavefrontSize = {}; - uint32_t sgprsPerSimd = {}; - uint32_t minSgprAllocation = {}; - uint32_t maxSgprAllocation = {}; - uint32_t sgprAllocationGranularity = {}; - uint32_t vgprsPerSimd = {}; - uint32_t minVgprAllocation = {}; - uint32_t maxVgprAllocation = {}; - uint32_t vgprAllocationGranularity = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderCorePropertiesAMD; + using Type = PipelineCompilerControlCreateInfoAMD; }; - struct PhysicalDeviceShaderDemoteToHelperInvocationFeatures + struct PipelineCoverageModulationStateCreateInfoNV { - using NativeType = VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures; + using NativeType = VkPipelineCoverageModulationStateCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, + uint32_t coverageModulationTableCount_ = {}, + const float * pCoverageModulationTable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , coverageModulationMode{ coverageModulationMode_ } + , coverageModulationTableEnable{ coverageModulationTableEnable_ } + , coverageModulationTableCount{ coverageModulationTableCount_ } + , pCoverageModulationTable{ pCoverageModulationTable_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceShaderDemoteToHelperInvocationFeatures( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderDemoteToHelperInvocationFeatures( *reinterpret_cast( &rhs ) ) + PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCoverageModulationStateCreateInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderDemoteToHelperInvocationFeatures & - operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_, + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_, + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & coverageModulationTable_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , coverageModulationMode( coverageModulationMode_ ) + , coverageModulationTableEnable( coverageModulationTableEnable_ ) + , coverageModulationTableCount( static_cast( coverageModulationTable_.size() ) ) + , pCoverageModulationTable( coverageModulationTable_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceShaderDemoteToHelperInvocationFeatures & operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCoverageModulationStateCreateInfoNV & operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineCoverageModulationStateCreateInfoNV & operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & - setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { - shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; + flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setCoverageModulationMode( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + coverageModulationMode = coverageModulationMode_; + return *this; } - operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setCoverageModulationTableEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + coverageModulationTableEnable = coverageModulationTableEnable_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationTableCount = coverageModulationTableCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & + setPCoverageModulationTable( const float * pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + { + pCoverageModulationTable = pCoverageModulationTable_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCoverageModulationStateCreateInfoNV & + setCoverageModulationTable( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationTableCount = static_cast( coverageModulationTable_.size() ); + pCoverageModulationTable = coverageModulationTable_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineCoverageModulationStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderDemoteToHelperInvocation ); + return std::tie( sType, pNext, flags, coverageModulationMode, coverageModulationTableEnable, coverageModulationTableCount, pCoverageModulationTable ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & ) const = default; + auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const & ) const = default; #else - bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageModulationMode == rhs.coverageModulationMode ) && + ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) && ( coverageModulationTableCount == rhs.coverageModulationTableCount ) && + ( pCoverageModulationTable == rhs.pCoverageModulationTable ); # endif } - bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone; + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {}; + uint32_t coverageModulationTableCount = {}; + const float * pCoverageModulationTable = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + using Type = PipelineCoverageModulationStateCreateInfoNV; }; - using PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = PhysicalDeviceShaderDemoteToHelperInvocationFeatures; - struct PhysicalDeviceShaderDrawParametersFeatures + struct PipelineCoverageReductionStateCreateInfoNV { - using NativeType = VkPhysicalDeviceShaderDrawParametersFeatures; + using NativeType = VkPipelineCoverageReductionStateCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderDrawParameters( shaderDrawParameters_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , coverageReductionMode{ coverageReductionMode_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderDrawParametersFeatures( *reinterpret_cast( &rhs ) ) + PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCoverageReductionStateCreateInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineCoverageReductionStateCreateInfoNV & operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceShaderDrawParametersFeatures & operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCoverageReductionStateCreateInfoNV & operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & - setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { - shaderDrawParameters = shaderDrawParameters_; + flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceShaderDrawParametersFeatures const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & + setCoverageReductionMode( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + coverageReductionMode = coverageReductionMode_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT + operator VkPipelineCoverageReductionStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderDrawParameters ); + return std::tie( sType, pNext, flags, coverageReductionMode ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const & ) const = default; + auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const & ) const = default; #else - bool operator==( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderDrawParameters == rhs.shaderDrawParameters ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageReductionMode == rhs.coverageReductionMode ); # endif } - bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderDrawParametersFeatures; + using Type = PipelineCoverageReductionStateCreateInfoNV; }; - using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures; - struct PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD + struct PipelineCoverageToColorStateCreateInfoNV { - using NativeType = VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + using NativeType = VkPipelineCoverageToColorStateCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderEarlyAndLateFragmentTests( shaderEarlyAndLateFragmentTests_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, + uint32_t coverageToColorLocation_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , coverageToColorEnable{ coverageToColorEnable_ } + , coverageToColorLocation{ coverageToColorLocation_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) - VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( *reinterpret_cast( &rhs ) ) + PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCoverageToColorStateCreateInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & - operator=( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineCoverageToColorStateCreateInfoNV & operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & - operator=( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCoverageToColorStateCreateInfoNV & operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD & - setShaderEarlyAndLateFragmentTests( VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { - shaderEarlyAndLateFragmentTests = shaderEarlyAndLateFragmentTests_; + flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & + setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + coverageToColorEnable = coverageToColorEnable_; + return *this; } - operator VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + coverageToColorLocation = coverageToColorLocation_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineCoverageToColorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderEarlyAndLateFragmentTests ); + return std::tie( sType, pNext, flags, coverageToColorEnable, coverageToColorLocation ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & ) const = default; + auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const & ) const = default; #else - bool operator==( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEarlyAndLateFragmentTests == rhs.shaderEarlyAndLateFragmentTests ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageToColorEnable == rhs.coverageToColorEnable ) && + ( coverageToColorLocation == rhs.coverageToColorLocation ); # endif } - bool operator!=( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {}; + uint32_t coverageToColorLocation = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + using Type = PipelineCoverageToColorStateCreateInfoNV; }; - struct PhysicalDeviceShaderFloat16Int8Features + struct PipelineCreateFlags2CreateInfo { - using NativeType = VkPhysicalDeviceShaderFloat16Int8Features; + using NativeType = VkPipelineCreateFlags2CreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreateFlags2CreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderFloat16( shaderFloat16_ ) - , shaderInt8( shaderInt8_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2 flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfo( PipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderFloat16Int8Features( *reinterpret_cast( &rhs ) ) + PipelineCreateFlags2CreateInfo( VkPipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreateFlags2CreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineCreateFlags2CreateInfo & operator=( PipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceShaderFloat16Int8Features & operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCreateFlags2CreateInfo & operator=( VkPipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT - { - shaderFloat16 = shaderFloat16_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2 flags_ ) VULKAN_HPP_NOEXCEPT { - shaderInt8 = shaderInt8_; + flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCreateFlags2CreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT + operator VkPipelineCreateFlags2CreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderFloat16, shaderInt8 ); + return std::tie( sType, pNext, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const & ) const = default; + auto operator<=>( PipelineCreateFlags2CreateInfo const & ) const = default; #else - bool operator==( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCreateFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16 == rhs.shaderFloat16 ) && ( shaderInt8 == rhs.shaderInt8 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); # endif } - bool operator!=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCreateFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreateFlags2CreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags2 flags = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderFloat16Int8Features; + using Type = PipelineCreateFlags2CreateInfo; }; - using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; - using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; - struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT - { - using NativeType = VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderImageInt64Atomics( shaderImageInt64Atomics_ ) - , sparseImageInt64Atomics( sparseImageInt64Atomics_ ) - { - } + using PipelineCreateFlags2CreateInfoKHR = PipelineCreateFlags2CreateInfo; - VULKAN_HPP_CONSTEXPR - PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + struct PipelineCreationFeedback + { + using NativeType = VkPipelineCreationFeedback; - PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags_ = {}, + uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT + : flags{ flags_ } + , duration{ duration_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineCreationFeedback( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreationFeedback( *reinterpret_cast( &rhs ) ) { - pNext = pNext_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & - setShaderImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT - { - shaderImageInt64Atomics = shaderImageInt64Atomics_; - return *this; - } + PipelineCreationFeedback & operator=( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & - setSparseImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + PipelineCreationFeedback & operator=( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT { - sparseImageInt64Atomics = sparseImageInt64Atomics_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCreationFeedback const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPipelineCreationFeedback &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderImageInt64Atomics, sparseImageInt64Atomics ); + return std::tie( flags, duration ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & ) const = default; + auto operator<=>( PipelineCreationFeedback const & ) const = default; #else - bool operator==( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics ) && - ( sparseImageInt64Atomics == rhs.sparseImageInt64Atomics ); + return ( flags == rhs.flags ) && ( duration == rhs.duration ); # endif } - bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags = {}; + uint64_t duration = {}; }; - template <> - struct CppType - { - using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT; - }; + using PipelineCreationFeedbackEXT = PipelineCreationFeedback; - struct PhysicalDeviceShaderImageFootprintFeaturesNV + struct PipelineCreationFeedbackCreateInfo { - using NativeType = VkPhysicalDeviceShaderImageFootprintFeaturesNV; + using NativeType = VkPipelineCreationFeedbackCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreationFeedbackCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageFootprint( imageFootprint_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ = {}, + uint32_t pipelineStageCreationFeedbackCount_ = {}, + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pPipelineCreationFeedback{ pPipelineCreationFeedback_ } + , pipelineStageCreationFeedbackCount{ pipelineStageCreationFeedbackCount_ } + , pPipelineStageCreationFeedbacks{ pPipelineStageCreationFeedbacks_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderImageFootprintFeaturesNV( *reinterpret_cast( &rhs ) ) + PipelineCreationFeedbackCreateInfo( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreationFeedbackCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCreationFeedbackCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineStageCreationFeedbacks_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , pPipelineCreationFeedback( pPipelineCreationFeedback_ ) + , pipelineStageCreationFeedbackCount( static_cast( pipelineStageCreationFeedbacks_.size() ) ) + , pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCreationFeedbackCreateInfo & operator=( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineCreationFeedbackCreateInfo & operator=( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & + setPPipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineCreationFeedback = pPipelineCreationFeedback_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & + setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & + setPPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineCreationFeedbackCreateInfo & setPipelineStageCreationFeedbacks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineStageCreationFeedbacks_ ) + VULKAN_HPP_NOEXCEPT { - imageFootprint = imageFootprint_; + pipelineStageCreationFeedbackCount = static_cast( pipelineStageCreationFeedbacks_.size() ); + pPipelineStageCreationFeedbacks = pipelineStageCreationFeedbacks_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCreationFeedbackCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT + operator VkPipelineCreationFeedbackCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, imageFootprint ); + return std::tie( sType, pNext, pPipelineCreationFeedback, pipelineStageCreationFeedbackCount, pPipelineStageCreationFeedbacks ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const & ) const = default; + auto operator<=>( PipelineCreationFeedbackCreateInfo const & ) const = default; #else - bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFootprint == rhs.imageFootprint ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback ) && + ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount ) && + ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks ); # endif } - bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback = {}; + uint32_t pipelineStageCreationFeedbackCount = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderImageFootprintFeaturesNV; + using Type = PipelineCreationFeedbackCreateInfo; }; - struct PhysicalDeviceShaderIntegerDotProductFeatures + using PipelineCreationFeedbackCreateInfoEXT = PipelineCreationFeedbackCreateInfo; + + struct PipelineDiscardRectangleStateCreateInfoEXT { - using NativeType = VkPhysicalDeviceShaderIntegerDotProductFeatures; + using NativeType = VkPipelineDiscardRectangleStateCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderIntegerDotProduct( shaderIntegerDotProduct_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, + uint32_t discardRectangleCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , discardRectangleMode{ discardRectangleMode_ } + , discardRectangleCount{ discardRectangleCount_ } + , pDiscardRectangles{ pDiscardRectangles_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceShaderIntegerDotProductFeatures( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderIntegerDotProductFeatures( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderIntegerDotProductFeatures( *reinterpret_cast( &rhs ) ) + PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineDiscardRectangleStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderIntegerDotProductFeatures & operator=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & discardRectangles_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , discardRectangleMode( discardRectangleMode_ ) + , discardRectangleCount( static_cast( discardRectangles_.size() ) ) + , pDiscardRectangles( discardRectangles_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceShaderIntegerDotProductFeatures & operator=( VkPhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineDiscardRectangleStateCreateInfoEXT & operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineDiscardRectangleStateCreateInfoEXT & operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerDotProductFeatures & - setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { - shaderIntegerDotProduct = shaderIntegerDotProduct_; + flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceShaderIntegerDotProductFeatures const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & + setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + discardRectangleMode = discardRectangleMode_; + return *this; } - operator VkPhysicalDeviceShaderIntegerDotProductFeatures &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + discardRectangleCount = discardRectangleCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & + setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT + { + pDiscardRectangles = pDiscardRectangles_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineDiscardRectangleStateCreateInfoEXT & + setDiscardRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & discardRectangles_ ) VULKAN_HPP_NOEXCEPT + { + discardRectangleCount = static_cast( discardRectangles_.size() ); + pDiscardRectangles = discardRectangles_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineDiscardRectangleStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderIntegerDotProduct ); + return std::tie( sType, pNext, flags, discardRectangleMode, discardRectangleCount, pDiscardRectangles ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderIntegerDotProductFeatures const & ) const = default; + auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( discardRectangleMode == rhs.discardRectangleMode ) && + ( discardRectangleCount == rhs.discardRectangleCount ) && ( pDiscardRectangles == rhs.pDiscardRectangles ); # endif } - bool operator!=( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive; + uint32_t discardRectangleCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderIntegerDotProductFeatures; + using Type = PipelineDiscardRectangleStateCreateInfoEXT; }; - using PhysicalDeviceShaderIntegerDotProductFeaturesKHR = PhysicalDeviceShaderIntegerDotProductFeatures; - struct PhysicalDeviceShaderIntegerDotProductProperties + struct PipelineExecutableInfoKHR { - using NativeType = VkPhysicalDeviceShaderIntegerDotProductProperties; + using NativeType = VkPipelineExecutableInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductProperties( - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , integerDotProduct8BitUnsignedAccelerated( integerDotProduct8BitUnsignedAccelerated_ ) - , integerDotProduct8BitSignedAccelerated( integerDotProduct8BitSignedAccelerated_ ) - , integerDotProduct8BitMixedSignednessAccelerated( integerDotProduct8BitMixedSignednessAccelerated_ ) - , integerDotProduct4x8BitPackedUnsignedAccelerated( integerDotProduct4x8BitPackedUnsignedAccelerated_ ) - , integerDotProduct4x8BitPackedSignedAccelerated( integerDotProduct4x8BitPackedSignedAccelerated_ ) - , integerDotProduct4x8BitPackedMixedSignednessAccelerated( integerDotProduct4x8BitPackedMixedSignednessAccelerated_ ) - , integerDotProduct16BitUnsignedAccelerated( integerDotProduct16BitUnsignedAccelerated_ ) - , integerDotProduct16BitSignedAccelerated( integerDotProduct16BitSignedAccelerated_ ) - , integerDotProduct16BitMixedSignednessAccelerated( integerDotProduct16BitMixedSignednessAccelerated_ ) - , integerDotProduct32BitUnsignedAccelerated( integerDotProduct32BitUnsignedAccelerated_ ) - , integerDotProduct32BitSignedAccelerated( integerDotProduct32BitSignedAccelerated_ ) - , integerDotProduct32BitMixedSignednessAccelerated( integerDotProduct32BitMixedSignednessAccelerated_ ) - , integerDotProduct64BitUnsignedAccelerated( integerDotProduct64BitUnsignedAccelerated_ ) - , integerDotProduct64BitSignedAccelerated( integerDotProduct64BitSignedAccelerated_ ) - , integerDotProduct64BitMixedSignednessAccelerated( integerDotProduct64BitMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating8BitSignedAccelerated( integerDotProductAccumulatingSaturating8BitSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated( - integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating16BitSignedAccelerated( integerDotProductAccumulatingSaturating16BitSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating32BitSignedAccelerated( integerDotProductAccumulatingSaturating32BitSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating64BitSignedAccelerated( integerDotProductAccumulatingSaturating64BitSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + uint32_t executableIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipeline{ pipeline_ } + , executableIndex{ executableIndex_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceShaderIntegerDotProductProperties( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderIntegerDotProductProperties( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderIntegerDotProductProperties( *reinterpret_cast( &rhs ) ) + PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutableInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderIntegerDotProductProperties & operator=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineExecutableInfoKHR & operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceShaderIntegerDotProductProperties & operator=( VkPhysicalDeviceShaderIntegerDotProductProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineExecutableInfoKHR & operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceShaderIntegerDotProductProperties const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceShaderIntegerDotProductProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pipeline = pipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT + { + executableIndex = executableIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineExecutableInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - integerDotProduct8BitUnsignedAccelerated, - integerDotProduct8BitSignedAccelerated, - integerDotProduct8BitMixedSignednessAccelerated, - integerDotProduct4x8BitPackedUnsignedAccelerated, - integerDotProduct4x8BitPackedSignedAccelerated, - integerDotProduct4x8BitPackedMixedSignednessAccelerated, - integerDotProduct16BitUnsignedAccelerated, - integerDotProduct16BitSignedAccelerated, - integerDotProduct16BitMixedSignednessAccelerated, - integerDotProduct32BitUnsignedAccelerated, - integerDotProduct32BitSignedAccelerated, - integerDotProduct32BitMixedSignednessAccelerated, - integerDotProduct64BitUnsignedAccelerated, - integerDotProduct64BitSignedAccelerated, - integerDotProduct64BitMixedSignednessAccelerated, - integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, - integerDotProductAccumulatingSaturating8BitSignedAccelerated, - integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, - integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, - integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, - integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, - integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, - integerDotProductAccumulatingSaturating16BitSignedAccelerated, - integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, - integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, - integerDotProductAccumulatingSaturating32BitSignedAccelerated, - integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, - integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, - integerDotProductAccumulatingSaturating64BitSignedAccelerated, - integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ); + return std::tie( sType, pNext, pipeline, executableIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderIntegerDotProductProperties const & ) const = default; + auto operator<=>( PipelineExecutableInfoKHR const & ) const = default; #else - bool operator==( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) && - ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated ) && - ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated ) && - ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) && - ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated ) && - ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) && - ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated ) && - ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated ) && - ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated ) && - ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated ) && - ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated ) && - ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated ) && - ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated ) && - ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated ) && - ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated ) && - ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) && - ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) && - ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == - rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ) && - ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == - rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ) && - ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == - rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ) && - ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == - rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ) && - ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) && - ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) && - ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == - rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ) && - ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) && - ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) && - ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == - rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ) && - ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) && - ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) && - ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == - rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ) && ( executableIndex == rhs.executableIndex ); # endif } - bool operator!=( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + uint32_t executableIndex = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderIntegerDotProductProperties; + using Type = PipelineExecutableInfoKHR; }; - using PhysicalDeviceShaderIntegerDotProductPropertiesKHR = PhysicalDeviceShaderIntegerDotProductProperties; - struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL + struct PipelineExecutableInternalRepresentationKHR { - using NativeType = VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + using NativeType = VkPipelineExecutableInternalRepresentationKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderIntegerFunctions2( shaderIntegerFunctions2_ ) - { - } - - VULKAN_HPP_CONSTEXPR - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInternalRepresentationKHR; - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( std::array const & name_ = {}, + std::array const & description_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, + size_t dataSize_ = {}, + void * pData_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , name{ name_ } + , description{ description_ } + , isText{ isText_ } + , dataSize{ dataSize_ } + , pData{ pData_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & - operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 + PipelineExecutableInternalRepresentationKHR( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutableInternalRepresentationKHR( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + PipelineExecutableInternalRepresentationKHR & operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & - setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT + PipelineExecutableInternalRepresentationKHR & operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - shaderIntegerFunctions2 = shaderIntegerFunctions2_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutableInternalRepresentationKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + size_t const &, + void * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderIntegerFunctions2 ); + return std::tie( sType, pNext, name, description, isText, dataSize, pData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & ) const = default; -#else - bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = isText <=> rhs.isText; cmp != 0 ) + return cmp; + if ( auto cmp = dataSize <=> rhs.dataSize; cmp != 0 ) + return cmp; + if ( auto cmp = pData <=> rhs.pData; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; } +#endif - bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( name, rhs.name ) == 0 ) && ( strcmp( description, rhs.description ) == 0 ) && + ( isText == rhs.isText ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); + } + + bool operator!=( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::Bool32 isText = {}; + size_t dataSize = {}; + void * pData = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + using Type = PipelineExecutableInternalRepresentationKHR; }; - struct PhysicalDeviceShaderModuleIdentifierFeaturesEXT - { - using NativeType = VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderModuleIdentifierFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderModuleIdentifier( shaderModuleIdentifier_ ) - { - } + struct PipelineExecutablePropertiesKHR + { + using NativeType = VkPipelineExecutablePropertiesKHR; - VULKAN_HPP_CONSTEXPR - PhysicalDeviceShaderModuleIdentifierFeaturesEXT( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR; - PhysicalDeviceShaderModuleIdentifierFeaturesEXT( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderModuleIdentifierFeaturesEXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, + std::array const & name_ = {}, + std::array const & description_ = {}, + uint32_t subgroupSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stages{ stages_ } + , name{ name_ } + , description{ description_ } + , subgroupSize{ subgroupSize_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderModuleIdentifierFeaturesEXT & operator=( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderModuleIdentifierFeaturesEXT & operator=( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutablePropertiesKHR( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierFeaturesEXT & - setShaderModuleIdentifier( VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier_ ) VULKAN_HPP_NOEXCEPT + PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - shaderModuleIdentifier = shaderModuleIdentifier_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutablePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderModuleIdentifier ); + return std::tie( sType, pNext, stages, name, description, subgroupSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & ) const = default; -#else - bool operator==( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderModuleIdentifier == rhs.shaderModuleIdentifier ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = stages <=> rhs.stages; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = subgroupSize <=> rhs.subgroupSize; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; } +#endif - bool operator!=( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stages == rhs.stages ) && ( strcmp( name, rhs.name ) == 0 ) && + ( strcmp( description, rhs.description ) == 0 ) && ( subgroupSize == rhs.subgroupSize ); + } + + bool operator!=( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + uint32_t subgroupSize = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderModuleIdentifierFeaturesEXT; + using Type = PipelineExecutablePropertiesKHR; }; - struct PhysicalDeviceShaderModuleIdentifierPropertiesEXT + union PipelineExecutableStatisticValueKHR { - using NativeType = VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT; + using NativeType = VkPipelineExecutableStatisticValueKHR; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 - PhysicalDeviceShaderModuleIdentifierPropertiesEXT( std::array const & shaderModuleIdentifierAlgorithmUUID_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderModuleIdentifierAlgorithmUUID( shaderModuleIdentifierAlgorithmUUID_ ) - { - } + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} ) : b32( b32_ ) {} - VULKAN_HPP_CONSTEXPR_14 - PhysicalDeviceShaderModuleIdentifierPropertiesEXT( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( int64_t i64_ ) : i64( i64_ ) {} - PhysicalDeviceShaderModuleIdentifierPropertiesEXT( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderModuleIdentifierPropertiesEXT( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( uint64_t u64_ ) : u64( u64_ ) {} - PhysicalDeviceShaderModuleIdentifierPropertiesEXT & - operator=( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( double f64_ ) : f64( f64_ ) {} +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceShaderModuleIdentifierPropertiesEXT & operator=( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + b32 = b32_; return *this; } - operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + i64 = i64_; + return *this; } - operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + u64 = u64_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple const &> -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderModuleIdentifierAlgorithmUUID ); + f64 = f64_; + return *this; } -#endif +#endif /*VULKAN_HPP_NO_SETTERS*/ -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & ) const = default; -#else - bool operator==( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutableStatisticValueKHR const &() const { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderModuleIdentifierAlgorithmUUID == rhs.shaderModuleIdentifierAlgorithmUUID ); -# endif + return *reinterpret_cast( this ); } - bool operator!=( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutableStatisticValueKHR &() { - return !operator==( rhs ); + return *reinterpret_cast( this ); } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D shaderModuleIdentifierAlgorithmUUID = {}; - }; - template <> - struct CppType - { - using Type = PhysicalDeviceShaderModuleIdentifierPropertiesEXT; +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::Bool32 b32; + int64_t i64; + uint64_t u64; + double f64; +#else + VkBool32 b32; + int64_t i64; + uint64_t u64; + double f64; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; - struct PhysicalDeviceShaderSMBuiltinsFeaturesNV + struct PipelineExecutableStatisticKHR { - using NativeType = VkPhysicalDeviceShaderSMBuiltinsFeaturesNV; + using NativeType = VkPipelineExecutableStatisticKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderSMBuiltins( shaderSMBuiltins_ ) - { - } - - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableStatisticKHR; - PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderSMBuiltinsFeaturesNV( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR( + std::array const & name_ = {}, + std::array const & description_ = {}, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , name{ name_ } + , description{ description_ } + , format{ format_ } + , value{ value_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineExecutableStatisticKHR( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT + PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - shaderSMBuiltins = shaderSMBuiltins_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutableStatisticKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT + operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR const &, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderSMBuiltins ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & ) const = default; -#else - bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMBuiltins == rhs.shaderSMBuiltins ); -# endif - } - - bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); + return std::tie( sType, pNext, name, description, format, value ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32; + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV; + using Type = PipelineExecutableStatisticKHR; }; - struct PhysicalDeviceShaderSMBuiltinsPropertiesNV + struct PipelineFragmentShadingRateEnumStateCreateInfoNV { - using NativeType = VkPhysicalDeviceShaderSMBuiltinsPropertiesNV; + using NativeType = VkPipelineFragmentShadingRateEnumStateCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {}, uint32_t shaderWarpsPerSM_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderSMCount( shaderSMCount_ ) - , shaderWarpsPerSM( shaderWarpsPerSM_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize, + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel, + std::array const & + combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shadingRateType{ shadingRateType_ } + , shadingRate{ shadingRate_ } + , combinerOps{ combinerOps_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 + PipelineFragmentShadingRateEnumStateCreateInfoNV( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderSMBuiltinsPropertiesNV( *reinterpret_cast( &rhs ) ) + PipelineFragmentShadingRateEnumStateCreateInfoNV( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineFragmentShadingRateEnumStateCreateInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & + setShadingRateType( VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + shadingRateType = shadingRateType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & + setShadingRate( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ ) VULKAN_HPP_NOEXCEPT + { + shadingRate = shadingRate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & + setCombinerOps( std::array combinerOps_ ) VULKAN_HPP_NOEXCEPT + { + combinerOps = combinerOps_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderSMCount, shaderWarpsPerSM ); + return std::tie( sType, pNext, shadingRateType, shadingRate, combinerOps ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & ) const = default; + auto operator<=>( PipelineFragmentShadingRateEnumStateCreateInfoNV const & ) const = default; #else - bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSMCount == rhs.shaderSMCount ) && ( shaderWarpsPerSM == rhs.shaderWarpsPerSM ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateType == rhs.shadingRateType ) && ( shadingRate == rhs.shadingRate ) && + ( combinerOps == rhs.combinerOps ); # endif } - bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; - void * pNext = {}; - uint32_t shaderSMCount = {}; - uint32_t shaderWarpsPerSM = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize; + VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D combinerOps = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV; + using Type = PipelineFragmentShadingRateEnumStateCreateInfoNV; }; - struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures + struct PipelineFragmentShadingRateStateCreateInfoKHR { - using NativeType = VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures; + using NativeType = VkPipelineFragmentShadingRateStateCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR( + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, + std::array const & + combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentSize{ fragmentSize_ } + , combinerOps{ combinerOps_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceShaderSubgroupExtendedTypesFeatures( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 + PipelineFragmentShadingRateStateCreateInfoKHR( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderSubgroupExtendedTypesFeatures( *reinterpret_cast( &rhs ) ) + PipelineFragmentShadingRateStateCreateInfoKHR( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineFragmentShadingRateStateCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderSubgroupExtendedTypesFeatures & - operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineFragmentShadingRateStateCreateInfoKHR & operator=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineFragmentShadingRateStateCreateInfoKHR & operator=( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & - setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & + setFragmentSize( VULKAN_HPP_NAMESPACE::Extent2D const & fragmentSize_ ) VULKAN_HPP_NOEXCEPT { - shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; + fragmentSize = fragmentSize_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & + setCombinerOps( std::array combinerOps_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + combinerOps = combinerOps_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT + operator VkPipelineFragmentShadingRateStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderSubgroupExtendedTypes ); + return std::tie( sType, pNext, fragmentSize, combinerOps ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & ) const = default; + auto operator<=>( PipelineFragmentShadingRateStateCreateInfoKHR const & ) const = default; #else - bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentSize == rhs.fragmentSize ) && ( combinerOps == rhs.combinerOps ); # endif } - bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D combinerOps = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + using Type = PipelineFragmentShadingRateStateCreateInfoKHR; }; - using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; - struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR + struct PipelineIndirectDeviceAddressInfoNV { - using NativeType = VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + using NativeType = VkPipelineIndirectDeviceAddressInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineIndirectDeviceAddressInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderSubgroupUniformControlFlow( shaderSubgroupUniformControlFlow_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineIndirectDeviceAddressInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , pipeline{ pipeline_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) - VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineIndirectDeviceAddressInfoNV( PipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( - *reinterpret_cast( &rhs ) ) + PipelineIndirectDeviceAddressInfoNV( VkPipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineIndirectDeviceAddressInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & - operator=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineIndirectDeviceAddressInfoNV & operator=( PipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & - operator=( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineIndirectDeviceAddressInfoNV & operator=( VkPipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & - setShaderSubgroupUniformControlFlow( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT { - shaderSubgroupUniformControlFlow = shaderSubgroupUniformControlFlow_; + pipelineBindPoint = pipelineBindPoint_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pipeline = pipeline_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPipelineIndirectDeviceAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkPipelineIndirectDeviceAddressInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderSubgroupUniformControlFlow ); + return std::tie( sType, pNext, pipelineBindPoint, pipeline ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & ) const = default; + auto operator<=>( PipelineIndirectDeviceAddressInfoNV const & ) const = default; #else - bool operator==( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineIndirectDeviceAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderSubgroupUniformControlFlow == rhs.shaderSubgroupUniformControlFlow ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ); # endif } - bool operator!=( PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineIndirectDeviceAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineIndirectDeviceAddressInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + using Type = PipelineIndirectDeviceAddressInfoNV; }; - struct PhysicalDeviceShaderTerminateInvocationFeatures + struct PipelineInfoKHR { - using NativeType = VkPhysicalDeviceShaderTerminateInvocationFeatures; + using NativeType = VkPipelineInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderTerminateInvocation( shaderTerminateInvocation_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipeline{ pipeline_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceShaderTerminateInvocationFeatures( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderTerminateInvocationFeatures( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderTerminateInvocationFeatures( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineInfoKHR( *reinterpret_cast( &rhs ) ) {} - PhysicalDeviceShaderTerminateInvocationFeatures & operator=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceShaderTerminateInvocationFeatures & operator=( VkPhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineInfoKHR & operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeatures & - setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT { - shaderTerminateInvocation = shaderTerminateInvocation_; + pipeline = pipeline_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceShaderTerminateInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceShaderTerminateInvocationFeatures &() VULKAN_HPP_NOEXCEPT + operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderTerminateInvocation ); + return std::tie( sType, pNext, pipeline ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderTerminateInvocationFeatures const & ) const = default; + auto operator<=>( PipelineInfoKHR const & ) const = default; #else - bool operator==( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ); # endif } - bool operator!=( PhysicalDeviceShaderTerminateInvocationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderTerminateInvocationFeatures; + using Type = PipelineInfoKHR; }; - using PhysicalDeviceShaderTerminateInvocationFeaturesKHR = PhysicalDeviceShaderTerminateInvocationFeatures; - struct PhysicalDeviceShadingRateImageFeaturesNV + using PipelineInfoEXT = PipelineInfoKHR; + + struct PipelineLayoutCreateInfo { - using NativeType = VkPhysicalDeviceShadingRateImageFeaturesNV; + using NativeType = VkPipelineLayoutCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shadingRateImage( shadingRateImage_ ) - , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, + uint32_t setLayoutCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, + uint32_t pushConstantRangeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , setLayoutCount{ setLayoutCount_ } + , pSetLayouts{ pSetLayouts_ } + , pushConstantRangeCount{ pushConstantRangeCount_ } + , pPushConstantRanges{ pPushConstantRanges_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShadingRateImageFeaturesNV( *reinterpret_cast( &rhs ) ) + PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineLayoutCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShadingRateImageFeaturesNV & operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , setLayoutCount( static_cast( setLayouts_.size() ) ) + , pSetLayouts( setLayouts_.data() ) + , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) + , pPushConstantRanges( pushConstantRanges_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceShadingRateImageFeaturesNV & operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineLayoutCreateInfo & operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - shadingRateImage = shadingRateImage_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & - setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT { - shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_; + setLayoutCount = setLayoutCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceShadingRateImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pSetLayouts = pSetLayouts_; + return *this; } - operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo & + setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + setLayoutCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = pushConstantRangeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & + setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pPushConstantRanges = pPushConstantRanges_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLayoutCreateInfo & setPushConstantRanges( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); + pPushConstantRanges = pushConstantRanges_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shadingRateImage, shadingRateCoarseSampleOrder ); + return std::tie( sType, pNext, flags, setLayoutCount, pSetLayouts, pushConstantRangeCount, pPushConstantRanges ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const & ) const = default; + auto operator<=>( PipelineLayoutCreateInfo const & ) const = default; #else - bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateImage == rhs.shadingRateImage ) && - ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( setLayoutCount == rhs.setLayoutCount ) && + ( pSetLayouts == rhs.pSetLayouts ) && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && + ( pPushConstantRanges == rhs.pPushConstantRanges ); # endif } - bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {}; - VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {}; + uint32_t setLayoutCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + uint32_t pushConstantRangeCount = {}; + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShadingRateImageFeaturesNV; + using Type = PipelineLayoutCreateInfo; }; - struct PhysicalDeviceShadingRateImagePropertiesNV + struct PipelinePropertiesIdentifierEXT { - using NativeType = VkPhysicalDeviceShadingRateImagePropertiesNV; + using NativeType = VkPipelinePropertiesIdentifierEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelinePropertiesIdentifierEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, - uint32_t shadingRatePaletteSize_ = {}, - uint32_t shadingRateMaxCoarseSamples_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shadingRateTexelSize( shadingRateTexelSize_ ) - , shadingRatePaletteSize( shadingRatePaletteSize_ ) - , shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT( std::array const & pipelineIdentifier_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineIdentifier{ pipelineIdentifier_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT( PipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShadingRateImagePropertiesNV( *reinterpret_cast( &rhs ) ) + PipelinePropertiesIdentifierEXT( VkPipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelinePropertiesIdentifierEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShadingRateImagePropertiesNV & operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelinePropertiesIdentifierEXT & operator=( PipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceShadingRateImagePropertiesNV & operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PipelinePropertiesIdentifierEXT & operator=( VkPipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceShadingRateImagePropertiesNV const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelinePropertiesIdentifierEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT + operator VkPipelinePropertiesIdentifierEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shadingRateTexelSize, shadingRatePaletteSize, shadingRateMaxCoarseSamples ); + return std::tie( sType, pNext, pipelineIdentifier ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const & ) const = default; + auto operator<=>( PipelinePropertiesIdentifierEXT const & ) const = default; #else - bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelinePropertiesIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateTexelSize == rhs.shadingRateTexelSize ) && - ( shadingRatePaletteSize == rhs.shadingRatePaletteSize ) && ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineIdentifier == rhs.pipelineIdentifier ); # endif } - bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelinePropertiesIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {}; - uint32_t shadingRatePaletteSize = {}; - uint32_t shadingRateMaxCoarseSamples = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelinePropertiesIdentifierEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineIdentifier = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShadingRateImagePropertiesNV; + using Type = PipelinePropertiesIdentifierEXT; }; - struct PhysicalDeviceSparseImageFormatInfo2 + struct PipelineRasterizationConservativeStateCreateInfoEXT { - using NativeType = VkPhysicalDeviceSparseImageFormatInfo2; + using NativeType = VkPipelineRasterizationConservativeStateCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PhysicalDeviceSparseImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, - VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , format( format_ ) - , type( type_ ) - , samples( samples_ ) - , usage( usage_ ) - , tiling( tiling_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, + float extraPrimitiveOverestimationSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , conservativeRasterizationMode{ conservativeRasterizationMode_ } + , extraPrimitiveOverestimationSize{ extraPrimitiveOverestimationSize_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceSparseImageFormatInfo2( *reinterpret_cast( &rhs ) ) + PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationConservativeStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceSparseImageFormatInfo2 & operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineRasterizationConservativeStateCreateInfoEXT & + operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceSparseImageFormatInfo2 & operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT - { - format = format_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { - samples = samples_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & + setConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT { - usage = usage_; + conservativeRasterizationMode = conservativeRasterizationMode_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & + setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT { - tiling = tiling_; + extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationConservativeStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -65406,99 +106784,114 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT const &, + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT const &, + float const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, format, type, samples, usage, tiling ); + return std::tie( sType, pNext, flags, conservativeRasterizationMode, extraPrimitiveOverestimationSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const & ) const = default; + auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && ( samples == rhs.samples ) && - ( usage == rhs.usage ) && ( tiling == rhs.tiling ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( conservativeRasterizationMode == rhs.conservativeRasterizationMode ) && + ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize ); # endif } - bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; - VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; - VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; - VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled; + float extraPrimitiveOverestimationSize = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceSparseImageFormatInfo2; + using Type = PipelineRasterizationConservativeStateCreateInfoEXT; }; - using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; - struct PhysicalDeviceSubgroupProperties + struct PipelineRasterizationDepthClipStateCreateInfoEXT { - using NativeType = VkPhysicalDeviceSubgroupProperties; + using NativeType = VkPipelineRasterizationDepthClipStateCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( uint32_t subgroupSize_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, - VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , subgroupSize( subgroupSize_ ) - , supportedStages( supportedStages_ ) - , supportedOperations( supportedOperations_ ) - , quadOperationsInAllStages( quadOperationsInAllStages_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , depthClipEnable{ depthClipEnable_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PipelineRasterizationDepthClipStateCreateInfoEXT( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceSubgroupProperties( *reinterpret_cast( &rhs ) ) + PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationDepthClipStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceSubgroupProperties const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & + setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClipEnable = depthClipEnable_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -65506,207 +106899,127 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, subgroupSize, supportedStages, supportedOperations, quadOperationsInAllStages ); + return std::tie( sType, pNext, flags, depthClipEnable ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceSubgroupProperties const & ) const = default; + auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSize == rhs.subgroupSize ) && ( supportedStages == rhs.supportedStages ) && - ( supportedOperations == rhs.supportedOperations ) && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthClipEnable == rhs.depthClipEnable ); # endif } - bool operator!=( PhysicalDeviceSubgroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties; - void * pNext = {}; - uint32_t subgroupSize = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {}; - VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {}; - VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceSubgroupProperties; + using Type = PipelineRasterizationDepthClipStateCreateInfoEXT; }; - struct PhysicalDeviceSubgroupSizeControlFeatures + struct PipelineRasterizationLineStateCreateInfo { - using NativeType = VkPhysicalDeviceSubgroupSizeControlFeatures; + using NativeType = VkPipelineRasterizationLineStateCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationLineStateCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , subgroupSizeControl( subgroupSizeControl_ ) - , computeFullSubgroups( computeFullSubgroups_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfo( + VULKAN_HPP_NAMESPACE::LineRasterizationMode lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationMode::eDefault, + VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, + uint32_t lineStippleFactor_ = {}, + uint16_t lineStipplePattern_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , lineRasterizationMode{ lineRasterizationMode_ } + , stippledLineEnable{ stippledLineEnable_ } + , lineStippleFactor{ lineStippleFactor_ } + , lineStipplePattern{ lineStipplePattern_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfo( PipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceSubgroupSizeControlFeatures( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceSubgroupSizeControlFeatures( *reinterpret_cast( &rhs ) ) + PipelineRasterizationLineStateCreateInfo( VkPipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationLineStateCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceSubgroupSizeControlFeatures & operator=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineRasterizationLineStateCreateInfo & operator=( PipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceSubgroupSizeControlFeatures & operator=( VkPhysicalDeviceSubgroupSizeControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationLineStateCreateInfo & operator=( VkPipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & - setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & + setLineRasterizationMode( VULKAN_HPP_NAMESPACE::LineRasterizationMode lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT { - subgroupSizeControl = subgroupSizeControl_; + lineRasterizationMode = lineRasterizationMode_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & - setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & + setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT { - computeFullSubgroups = computeFullSubgroups_; + stippledLineEnable = stippledLineEnable_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkPhysicalDeviceSubgroupSizeControlFeatures const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSubgroupSizeControlFeatures &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, subgroupSizeControl, computeFullSubgroups ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceSubgroupSizeControlFeatures const & ) const = default; -#else - bool operator==( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subgroupSizeControl == rhs.subgroupSizeControl ) && - ( computeFullSubgroups == rhs.computeFullSubgroups ); -# endif - } - - bool operator!=( PhysicalDeviceSubgroupSizeControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; - VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceSubgroupSizeControlFeatures; - }; - using PhysicalDeviceSubgroupSizeControlFeaturesEXT = PhysicalDeviceSubgroupSizeControlFeatures; - - struct PhysicalDeviceSubgroupSizeControlProperties - { - using NativeType = VkPhysicalDeviceSubgroupSizeControlProperties; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( uint32_t minSubgroupSize_ = {}, - uint32_t maxSubgroupSize_ = {}, - uint32_t maxComputeWorkgroupSubgroups_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minSubgroupSize( minSubgroupSize_ ) - , maxSubgroupSize( maxSubgroupSize_ ) - , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ) - , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ ) - { - } - - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlProperties( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceSubgroupSizeControlProperties( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceSubgroupSizeControlProperties( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceSubgroupSizeControlProperties & operator=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + lineStippleFactor = lineStippleFactor_; + return *this; + } - PhysicalDeviceSubgroupSizeControlProperties & operator=( VkPhysicalDeviceSubgroupSizeControlProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + lineStipplePattern = lineStipplePattern_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceSubgroupSizeControlProperties const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationLineStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceSubgroupSizeControlProperties &() VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationLineStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -65714,670 +107027,839 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + uint16_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, minSubgroupSize, maxSubgroupSize, maxComputeWorkgroupSubgroups, requiredSubgroupSizeStages ); + return std::tie( sType, pNext, lineRasterizationMode, stippledLineEnable, lineStippleFactor, lineStipplePattern ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceSubgroupSizeControlProperties const & ) const = default; + auto operator<=>( PipelineRasterizationLineStateCreateInfo const & ) const = default; #else - bool operator==( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationLineStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && ( maxSubgroupSize == rhs.maxSubgroupSize ) && - ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lineRasterizationMode == rhs.lineRasterizationMode ) && + ( stippledLineEnable == rhs.stippledLineEnable ) && ( lineStippleFactor == rhs.lineStippleFactor ) && + ( lineStipplePattern == rhs.lineStipplePattern ); # endif } - bool operator!=( PhysicalDeviceSubgroupSizeControlProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationLineStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlProperties; - void * pNext = {}; - uint32_t minSubgroupSize = {}; - uint32_t maxSubgroupSize = {}; - uint32_t maxComputeWorkgroupSubgroups = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::LineRasterizationMode lineRasterizationMode = VULKAN_HPP_NAMESPACE::LineRasterizationMode::eDefault; + VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {}; + uint32_t lineStippleFactor = {}; + uint16_t lineStipplePattern = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceSubgroupSizeControlProperties; + using Type = PipelineRasterizationLineStateCreateInfo; }; - using PhysicalDeviceSubgroupSizeControlPropertiesEXT = PhysicalDeviceSubgroupSizeControlProperties; - struct PhysicalDeviceSubpassMergeFeedbackFeaturesEXT + using PipelineRasterizationLineStateCreateInfoEXT = PipelineRasterizationLineStateCreateInfo; + using PipelineRasterizationLineStateCreateInfoKHR = PipelineRasterizationLineStateCreateInfo; + + struct PipelineRasterizationProvokingVertexStateCreateInfoEXT { - using NativeType = VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + using NativeType = VkPipelineRasterizationProvokingVertexStateCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , subpassMergeFeedback( subpassMergeFeedback_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , provokingVertexMode{ provokingVertexMode_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) + VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( *reinterpret_cast( &rhs ) ) + PipelineRasterizationProvokingVertexStateCreateInfoEXT( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationProvokingVertexStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & operator=( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineRasterizationProvokingVertexStateCreateInfoEXT & + operator=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & operator=( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationProvokingVertexStateCreateInfoEXT & + operator=( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassMergeFeedbackFeaturesEXT & - setSubpassMergeFeedback( VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & + setProvokingVertexMode( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ ) VULKAN_HPP_NOEXCEPT { - subpassMergeFeedback = subpassMergeFeedback_; + provokingVertexMode = provokingVertexMode_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, subpassMergeFeedback ); + return std::tie( sType, pNext, provokingVertexMode ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & ) const = default; + auto operator<=>( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassMergeFeedback == rhs.subpassMergeFeedback ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexMode == rhs.provokingVertexMode ); # endif } - bool operator!=( PhysicalDeviceSubpassMergeFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + using Type = PipelineRasterizationProvokingVertexStateCreateInfoEXT; }; - struct PhysicalDeviceSubpassShadingFeaturesHUAWEI + struct PipelineRasterizationStateRasterizationOrderAMD { - using NativeType = VkPhysicalDeviceSubpassShadingFeaturesHUAWEI; + using NativeType = VkPipelineRasterizationStateRasterizationOrderAMD; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , subpassShading( subpassShading_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , rasterizationOrder{ rasterizationOrder_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PipelineRasterizationStateRasterizationOrderAMD( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceSubpassShadingFeaturesHUAWEI( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceSubpassShadingFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) + PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationStateRasterizationOrderAMD( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineRasterizationStateRasterizationOrderAMD & operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceSubpassShadingFeaturesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateRasterizationOrderAMD & operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubpassShadingFeaturesHUAWEI & setSubpassShading( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & + setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT { - subpassShading = subpassShading_; + rasterizationOrder = rasterizationOrder_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationStateRasterizationOrderAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceSubpassShadingFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, subpassShading ); + return std::tie( sType, pNext, rasterizationOrder ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & ) const = default; + auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const & ) const = default; #else - bool operator==( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassShading == rhs.subpassShading ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrder == rhs.rasterizationOrder ); # endif } - bool operator!=( PhysicalDeviceSubpassShadingFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingFeaturesHUAWEI; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 subpassShading = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceSubpassShadingFeaturesHUAWEI; + using Type = PipelineRasterizationStateRasterizationOrderAMD; }; - struct PhysicalDeviceSubpassShadingPropertiesHUAWEI + struct PipelineRasterizationStateStreamCreateInfoEXT { - using NativeType = VkPhysicalDeviceSubpassShadingPropertiesHUAWEI; + using NativeType = VkPipelineRasterizationStateStreamCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( uint32_t maxSubpassShadingWorkgroupSizeAspectRatio_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxSubpassShadingWorkgroupSizeAspectRatio( maxSubpassShadingWorkgroupSizeAspectRatio_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, + uint32_t rasterizationStream_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , rasterizationStream{ rasterizationStream_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PipelineRasterizationStateStreamCreateInfoEXT( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceSubpassShadingPropertiesHUAWEI( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceSubpassShadingPropertiesHUAWEI( *reinterpret_cast( &rhs ) ) + PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationStateStreamCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineRasterizationStateStreamCreateInfoEXT & operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceSubpassShadingPropertiesHUAWEI & operator=( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationStateStreamCreateInfoEXT & operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceSubpassShadingPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationStream = rasterizationStream_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineRasterizationStateStreamCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxSubpassShadingWorkgroupSizeAspectRatio ); + return std::tie( sType, pNext, flags, rasterizationStream ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & ) const = default; + auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSubpassShadingWorkgroupSizeAspectRatio == rhs.maxSubpassShadingWorkgroupSizeAspectRatio ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rasterizationStream == rhs.rasterizationStream ); # endif } - bool operator!=( PhysicalDeviceSubpassShadingPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubpassShadingPropertiesHUAWEI; - void * pNext = {}; - uint32_t maxSubpassShadingWorkgroupSizeAspectRatio = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {}; + uint32_t rasterizationStream = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceSubpassShadingPropertiesHUAWEI; + using Type = PipelineRasterizationStateStreamCreateInfoEXT; }; - struct PhysicalDeviceSurfaceInfo2KHR + struct PipelineRenderingCreateInfo { - using NativeType = VkPhysicalDeviceSurfaceInfo2KHR; + using NativeType = VkPipelineRenderingCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRenderingCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , surface( surface_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( uint32_t viewMask_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , viewMask{ viewMask_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentFormats{ pColorAttachmentFormats_ } + , depthAttachmentFormat{ depthAttachmentFormat_ } + , stencilAttachmentFormat{ stencilAttachmentFormat_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceSurfaceInfo2KHR( *reinterpret_cast( &rhs ) ) + PipelineRenderingCreateInfo( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRenderingCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineRenderingCreateInfo( uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( static_cast( colorAttachmentFormats_.size() ) ) + , pColorAttachmentFormats( colorAttachmentFormats_.data() ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRenderingCreateInfo & operator=( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineRenderingCreateInfo & operator=( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT { - surface = surface_; + viewMask = viewMask_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + colorAttachmentCount = colorAttachmentCount_; + return *this; } - operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & + setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pColorAttachmentFormats = pColorAttachmentFormats_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineRenderingCreateInfo & setColorAttachmentFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachmentFormats_.size() ); + pColorAttachmentFormats = colorAttachmentFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + depthAttachmentFormat = depthAttachmentFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & + setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + stencilAttachmentFormat = stencilAttachmentFormat_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineRenderingCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRenderingCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, surface ); + return std::tie( sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const & ) const = default; + auto operator<=>( PipelineRenderingCreateInfo const & ) const = default; #else - bool operator==( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surface == rhs.surface ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && + ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ); # endif } - bool operator!=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRenderingCreateInfo; + const void * pNext = {}; + uint32_t viewMask = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {}; + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceSurfaceInfo2KHR; + using Type = PipelineRenderingCreateInfo; }; - struct PhysicalDeviceSynchronization2Features + using PipelineRenderingCreateInfoKHR = PipelineRenderingCreateInfo; + + struct PipelineRepresentativeFragmentTestStateCreateInfoNV { - using NativeType = VkPhysicalDeviceSynchronization2Features; + using NativeType = VkPipelineRepresentativeFragmentTestStateCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSynchronization2Features; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , synchronization2( synchronization2_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , representativeFragmentTestEnable{ representativeFragmentTestEnable_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PipelineRepresentativeFragmentTestStateCreateInfoNV( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceSynchronization2Features( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceSynchronization2Features( *reinterpret_cast( &rhs ) ) + PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRepresentativeFragmentTestStateCreateInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceSynchronization2Features & operator=( PhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineRepresentativeFragmentTestStateCreateInfoNV & + operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceSynchronization2Features & operator=( VkPhysicalDeviceSynchronization2Features const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSynchronization2Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & + setRepresentativeFragmentTestEnable( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT { - synchronization2 = synchronization2_; + representativeFragmentTestEnable = representativeFragmentTestEnable_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceSynchronization2Features const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceSynchronization2Features &() VULKAN_HPP_NOEXCEPT + operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, synchronization2 ); + return std::tie( sType, pNext, representativeFragmentTestEnable ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceSynchronization2Features const & ) const = default; + auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const & ) const = default; #else - bool operator==( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( synchronization2 == rhs.synchronization2 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable ); # endif } - bool operator!=( PhysicalDeviceSynchronization2Features const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSynchronization2Features; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceSynchronization2Features; + using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV; }; - using PhysicalDeviceSynchronization2FeaturesKHR = PhysicalDeviceSynchronization2Features; - struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT + struct PipelineRobustnessCreateInfo { - using NativeType = VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT; + using NativeType = VkPipelineRobustnessCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRobustnessCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , texelBufferAlignment( texelBufferAlignment_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior storageBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior uniformBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior vertexInputs_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior images_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , storageBuffers{ storageBuffers_ } + , uniformBuffers{ uniformBuffers_ } + , vertexInputs{ vertexInputs_ } + , images{ images_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceTexelBufferAlignmentFeaturesEXT( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfo( PipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceTexelBufferAlignmentFeaturesEXT( *reinterpret_cast( &rhs ) ) + PipelineRobustnessCreateInfo( VkPipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRobustnessCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineRobustnessCreateInfo & operator=( PipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRobustnessCreateInfo & operator=( VkPipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & - setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & + setStorageBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior storageBuffers_ ) VULKAN_HPP_NOEXCEPT { - texelBufferAlignment = texelBufferAlignment_; + storageBuffers = storageBuffers_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & + setUniformBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior uniformBuffers_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + uniformBuffers = uniformBuffers_; + return *this; } - operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & + setVertexInputs( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior vertexInputs_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + vertexInputs = vertexInputs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & setImages( VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior images_ ) VULKAN_HPP_NOEXCEPT + { + images = images_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineRobustnessCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRobustnessCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, texelBufferAlignment ); + return std::tie( sType, pNext, storageBuffers, uniformBuffers, vertexInputs, images ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & ) const = default; + auto operator<=>( PipelineRobustnessCreateInfo const & ) const = default; #else - bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRobustnessCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( texelBufferAlignment == rhs.texelBufferAlignment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffers == rhs.storageBuffers ) && ( uniformBuffers == rhs.uniformBuffers ) && + ( vertexInputs == rhs.vertexInputs ) && ( images == rhs.images ); # endif } - bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRobustnessCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRobustnessCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior storageBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior uniformBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior vertexInputs = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior images = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT; + using Type = PipelineRobustnessCreateInfo; }; - struct PhysicalDeviceTexelBufferAlignmentProperties + using PipelineRobustnessCreateInfoEXT = PipelineRobustnessCreateInfo; + + struct PipelineSampleLocationsStateCreateInfoEXT { - using NativeType = VkPhysicalDeviceTexelBufferAlignmentProperties; + using NativeType = VkPipelineSampleLocationsStateCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ) - , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ) - , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ) - , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , sampleLocationsEnable{ sampleLocationsEnable_ } + , sampleLocationsInfo{ sampleLocationsInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineSampleLocationsStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineSampleLocationsStateCreateInfoEXT & operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineSampleLocationsStateCreateInfoEXT & operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { + pNext = pNext_; + return *this; } - VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentProperties( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceTexelBufferAlignmentProperties( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceTexelBufferAlignmentProperties( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & + setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT { + sampleLocationsEnable = sampleLocationsEnable_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceTexelBufferAlignmentProperties & operator=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceTexelBufferAlignmentProperties & operator=( VkPhysicalDeviceTexelBufferAlignmentProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & + setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + sampleLocationsInfo = sampleLocationsInfo_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceTexelBufferAlignmentProperties const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineSampleLocationsStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceTexelBufferAlignmentProperties &() VULKAN_HPP_NOEXCEPT + operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -66385,707 +107867,802 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - storageTexelBufferOffsetAlignmentBytes, - storageTexelBufferOffsetSingleTexelAlignment, - uniformTexelBufferOffsetAlignmentBytes, - uniformTexelBufferOffsetSingleTexelAlignment ); + return std::tie( sType, pNext, sampleLocationsEnable, sampleLocationsInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceTexelBufferAlignmentProperties const & ) const = default; + auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) && - ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) && - ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) && - ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationsEnable == rhs.sampleLocationsEnable ) && + ( sampleLocationsInfo == rhs.sampleLocationsInfo ); # endif } - bool operator!=( PhysicalDeviceTexelBufferAlignmentProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentProperties; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceTexelBufferAlignmentProperties; + using Type = PipelineSampleLocationsStateCreateInfoEXT; }; - using PhysicalDeviceTexelBufferAlignmentPropertiesEXT = PhysicalDeviceTexelBufferAlignmentProperties; - struct PhysicalDeviceTextureCompressionASTCHDRFeatures + struct PipelineShaderStageModuleIdentifierCreateInfoEXT { - using NativeType = VkPhysicalDeviceTextureCompressionASTCHDRFeatures; + using NativeType = VkPipelineShaderStageModuleIdentifierCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , textureCompressionASTC_HDR( textureCompressionASTC_HDR_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineShaderStageModuleIdentifierCreateInfoEXT( uint32_t identifierSize_ = {}, + const uint8_t * pIdentifier_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , identifierSize{ identifierSize_ } + , pIdentifier{ pIdentifier_ } { } VULKAN_HPP_CONSTEXPR - PhysicalDeviceTextureCompressionASTCHDRFeatures( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineShaderStageModuleIdentifierCreateInfoEXT( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceTextureCompressionASTCHDRFeatures( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceTextureCompressionASTCHDRFeatures( *reinterpret_cast( &rhs ) ) + PipelineShaderStageModuleIdentifierCreateInfoEXT( VkPipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineShaderStageModuleIdentifierCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineShaderStageModuleIdentifierCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & identifier_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), identifierSize( static_cast( identifier_.size() ) ), pIdentifier( identifier_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceTextureCompressionASTCHDRFeatures & operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageModuleIdentifierCreateInfoEXT & operator=( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineShaderStageModuleIdentifierCreateInfoEXT & operator=( VkPipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeatures & - setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setIdentifierSize( uint32_t identifierSize_ ) VULKAN_HPP_NOEXCEPT { - textureCompressionASTC_HDR = textureCompressionASTC_HDR_; + identifierSize = identifierSize_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setPIdentifier( const uint8_t * pIdentifier_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pIdentifier = pIdentifier_; + return *this; } - operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineShaderStageModuleIdentifierCreateInfoEXT & + setIdentifier( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & identifier_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + identifierSize = static_cast( identifier_.size() ); + pIdentifier = identifier_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, textureCompressionASTC_HDR ); + return std::tie( sType, pNext, identifierSize, pIdentifier ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeatures const & ) const = default; + auto operator<=>( PipelineShaderStageModuleIdentifierCreateInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( identifierSize == rhs.identifierSize ) && ( pIdentifier == rhs.pIdentifier ); # endif } - bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT; + const void * pNext = {}; + uint32_t identifierSize = {}; + const uint8_t * pIdentifier = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceTextureCompressionASTCHDRFeatures; + using Type = PipelineShaderStageModuleIdentifierCreateInfoEXT; }; - using PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT = PhysicalDeviceTextureCompressionASTCHDRFeatures; - struct PhysicalDeviceTilePropertiesFeaturesQCOM +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PipelineShaderStageNodeCreateInfoAMDX { - using NativeType = VkPhysicalDeviceTilePropertiesFeaturesQCOM; + using NativeType = VkPipelineShaderStageNodeCreateInfoAMDX; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageNodeCreateInfoAMDX; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceTilePropertiesFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 tileProperties_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , tileProperties( tileProperties_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineShaderStageNodeCreateInfoAMDX( const char * pName_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pName{ pName_ } + , index{ index_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceTilePropertiesFeaturesQCOM( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineShaderStageNodeCreateInfoAMDX( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceTilePropertiesFeaturesQCOM( VkPhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceTilePropertiesFeaturesQCOM( *reinterpret_cast( &rhs ) ) + PipelineShaderStageNodeCreateInfoAMDX( VkPipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineShaderStageNodeCreateInfoAMDX( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceTilePropertiesFeaturesQCOM & operator=( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineShaderStageNodeCreateInfoAMDX & operator=( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceTilePropertiesFeaturesQCOM & operator=( VkPhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageNodeCreateInfoAMDX & operator=( VkPipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTilePropertiesFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTilePropertiesFeaturesQCOM & setTileProperties( VULKAN_HPP_NAMESPACE::Bool32 tileProperties_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT { - tileProperties = tileProperties_; + pName = pName_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceTilePropertiesFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + index = index_; + return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceTilePropertiesFeaturesQCOM &() VULKAN_HPP_NOEXCEPT + operator VkPipelineShaderStageNodeCreateInfoAMDX const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkPipelineShaderStageNodeCreateInfoAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, tileProperties ); + return std::tie( sType, pNext, pName, index ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceTilePropertiesFeaturesQCOM const & ) const = default; -#else - bool operator==( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tileProperties == rhs.tileProperties ); + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = index <=> rhs.index; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } # endif + + bool operator==( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( index == rhs.index ); } - bool operator!=( PhysicalDeviceTilePropertiesFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 tileProperties = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageNodeCreateInfoAMDX; + const void * pNext = {}; + const char * pName = {}; + uint32_t index = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceTilePropertiesFeaturesQCOM; + using Type = PipelineShaderStageNodeCreateInfoAMDX; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct PhysicalDeviceTimelineSemaphoreFeatures + struct PipelineShaderStageRequiredSubgroupSizeCreateInfo { - using NativeType = VkPhysicalDeviceTimelineSemaphoreFeatures; + using NativeType = VkPipelineShaderStageRequiredSubgroupSizeCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , timelineSemaphore( timelineSemaphore_ ) - { - } - - VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo; - PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceTimelineSemaphoreFeatures( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo( uint32_t requiredSubgroupSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , requiredSubgroupSize{ requiredSubgroupSize_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceTimelineSemaphoreFeatures & operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PipelineShaderStageRequiredSubgroupSizeCreateInfo( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceTimelineSemaphoreFeatures & operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageRequiredSubgroupSizeCreateInfo( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineShaderStageRequiredSubgroupSizeCreateInfo( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + PipelineShaderStageRequiredSubgroupSizeCreateInfo & + operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & - setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT + PipelineShaderStageRequiredSubgroupSizeCreateInfo & operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - timelineSemaphore = timelineSemaphore_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT + operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, timelineSemaphore ); + return std::tie( sType, pNext, requiredSubgroupSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const & ) const = default; + auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & ) const = default; #else - bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timelineSemaphore == rhs.timelineSemaphore ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( requiredSubgroupSize == rhs.requiredSubgroupSize ); # endif } - bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo; + void * pNext = {}; + uint32_t requiredSubgroupSize = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceTimelineSemaphoreFeatures; + using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfo; }; - using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures; - struct PhysicalDeviceTimelineSemaphoreProperties + using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + using ShaderRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + + struct PipelineTessellationDomainOriginStateCreateInfo { - using NativeType = VkPhysicalDeviceTimelineSemaphoreProperties; + using NativeType = VkPipelineTessellationDomainOriginStateCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , domainOrigin{ domainOrigin_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PipelineTessellationDomainOriginStateCreateInfo( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceTimelineSemaphoreProperties( *reinterpret_cast( &rhs ) ) + PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineTessellationDomainOriginStateCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceTimelineSemaphoreProperties & operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineTessellationDomainOriginStateCreateInfo & operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceTimelineSemaphoreProperties & operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineTessellationDomainOriginStateCreateInfo & operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceTimelineSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & + setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + domainOrigin = domainOrigin_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineTessellationDomainOriginStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxTimelineSemaphoreValueDifference ); + return std::tie( sType, pNext, domainOrigin ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const & ) const = default; + auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const & ) const = default; #else - bool operator==( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( domainOrigin == rhs.domainOrigin ); # endif } - bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties; - void * pNext = {}; - uint64_t maxTimelineSemaphoreValueDifference = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceTimelineSemaphoreProperties; + using Type = PipelineTessellationDomainOriginStateCreateInfo; }; - using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties; - struct PhysicalDeviceToolProperties + using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; + + struct VertexInputBindingDivisorDescription { - using NativeType = VkPhysicalDeviceToolProperties; + using NativeType = VkVertexInputBindingDivisorDescription; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolProperties; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescription( uint32_t binding_ = {}, uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT + : binding{ binding_ } + , divisor{ divisor_ } + { + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( std::array const & name_ = {}, - std::array const & version_ = {}, - VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes_ = {}, - std::array const & description_ = {}, - std::array const & layer_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , name( name_ ) - , version( version_ ) - , purposes( purposes_ ) - , description( description_ ) - , layer( layer_ ) + VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescription( VertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputBindingDivisorDescription( VkVertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDivisorDescription( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolProperties( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VertexInputBindingDivisorDescription & operator=( VertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceToolProperties( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceToolProperties( *reinterpret_cast( &rhs ) ) + VertexInputBindingDivisorDescription & operator=( VkVertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceToolProperties & operator=( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } - PhysicalDeviceToolProperties & operator=( VkPhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescription & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + divisor = divisor_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceToolProperties const &() const VULKAN_HPP_NOEXCEPT + operator VkVertexInputBindingDivisorDescription const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceToolProperties &() VULKAN_HPP_NOEXCEPT + operator VkVertexInputBindingDivisorDescription &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - VULKAN_HPP_NAMESPACE::ToolPurposeFlags const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, name, version, purposes, description, layer ); + return std::tie( binding, divisor ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceToolProperties const & ) const = default; + auto operator<=>( VertexInputBindingDivisorDescription const & ) const = default; #else - bool operator==( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VertexInputBindingDivisorDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && ( version == rhs.version ) && ( purposes == rhs.purposes ) && - ( description == rhs.description ) && ( layer == rhs.layer ); + return ( binding == rhs.binding ) && ( divisor == rhs.divisor ); # endif } - bool operator!=( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VertexInputBindingDivisorDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolProperties; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D version = {}; - VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D layer = {}; + uint32_t binding = {}; + uint32_t divisor = {}; }; - template <> - struct CppType - { - using Type = PhysicalDeviceToolProperties; - }; - using PhysicalDeviceToolPropertiesEXT = PhysicalDeviceToolProperties; + using VertexInputBindingDivisorDescriptionEXT = VertexInputBindingDivisorDescription; + using VertexInputBindingDivisorDescriptionKHR = VertexInputBindingDivisorDescription; - struct PhysicalDeviceTransformFeedbackFeaturesEXT + struct PipelineVertexInputDivisorStateCreateInfo { - using NativeType = VkPhysicalDeviceTransformFeedbackFeaturesEXT; + using NativeType = VkPipelineVertexInputDivisorStateCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputDivisorStateCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , transformFeedback( transformFeedback_ ) - , geometryStreams( geometryStreams_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineVertexInputDivisorStateCreateInfo( uint32_t vertexBindingDivisorCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription * pVertexBindingDivisors_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexBindingDivisorCount{ vertexBindingDivisorCount_ } + , pVertexBindingDivisors{ pVertexBindingDivisors_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfo( PipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceTransformFeedbackFeaturesEXT( *reinterpret_cast( &rhs ) ) + PipelineVertexInputDivisorStateCreateInfo( VkPipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineVertexInputDivisorStateCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputDivisorStateCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDivisors_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , vertexBindingDivisorCount( static_cast( vertexBindingDivisors_.size() ) ) + , pVertexBindingDivisors( vertexBindingDivisors_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputDivisorStateCreateInfo & operator=( PipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineVertexInputDivisorStateCreateInfo & operator=( VkPipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & - setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfo & setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT { - transformFeedback = transformFeedback_; + vertexBindingDivisorCount = vertexBindingDivisorCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfo & + setPVertexBindingDivisors( const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription * pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT { - geometryStreams = geometryStreams_; + pVertexBindingDivisors = pVertexBindingDivisors_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineVertexInputDivisorStateCreateInfo & setVertexBindingDivisors( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDivisors_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + vertexBindingDivisorCount = static_cast( vertexBindingDivisors_.size() ); + pVertexBindingDivisors = vertexBindingDivisors_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPipelineVertexInputDivisorStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkPipelineVertexInputDivisorStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, transformFeedback, geometryStreams ); + return std::tie( sType, pNext, vertexBindingDivisorCount, pVertexBindingDivisors ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const & ) const = default; + auto operator<=>( PipelineVertexInputDivisorStateCreateInfo const & ) const = default; #else - bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineVertexInputDivisorStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transformFeedback == rhs.transformFeedback ) && ( geometryStreams == rhs.geometryStreams ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) && + ( pVertexBindingDivisors == rhs.pVertexBindingDivisors ); # endif } - bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineVertexInputDivisorStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {}; - VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfo; + const void * pNext = {}; + uint32_t vertexBindingDivisorCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription * pVertexBindingDivisors = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceTransformFeedbackFeaturesEXT; + using Type = PipelineVertexInputDivisorStateCreateInfo; }; - struct PhysicalDeviceTransformFeedbackPropertiesEXT - { - using NativeType = VkPhysicalDeviceTransformFeedbackPropertiesEXT; + using PipelineVertexInputDivisorStateCreateInfoEXT = PipelineVertexInputDivisorStateCreateInfo; + using PipelineVertexInputDivisorStateCreateInfoKHR = PipelineVertexInputDivisorStateCreateInfo; + + struct PipelineViewportCoarseSampleOrderStateCreateInfoNV + { + using NativeType = VkPipelineViewportCoarseSampleOrderStateCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, + uint32_t customSampleOrderCount_ = {}, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , sampleOrderType{ sampleOrderType_ } + , customSampleOrderCount{ customSampleOrderCount_ } + , pCustomSampleOrders{ pCustomSampleOrders_ } + { + } + + VULKAN_HPP_CONSTEXPR + PipelineViewportCoarseSampleOrderStateCreateInfoNV( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportCoarseSampleOrderStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportCoarseSampleOrderStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & customSampleOrders_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , sampleOrderType( sampleOrderType_ ) + , customSampleOrderCount( static_cast( customSampleOrders_.size() ) ) + , pCustomSampleOrders( customSampleOrders_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & + operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; + PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( uint32_t maxTransformFeedbackStreams_ = {}, - uint32_t maxTransformFeedbackBuffers_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, - uint32_t maxTransformFeedbackStreamDataSize_ = {}, - uint32_t maxTransformFeedbackBufferDataSize_ = {}, - uint32_t maxTransformFeedbackBufferDataStride_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxTransformFeedbackStreams( maxTransformFeedbackStreams_ ) - , maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ ) - , maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ ) - , maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ ) - , maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ ) - , maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ ) - , transformFeedbackQueries( transformFeedbackQueries_ ) - , transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ ) - , transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ ) - , transformFeedbackDraw( transformFeedbackDraw_ ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { + pNext = pNext_; + return *this; } - VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT + { + sampleOrderType = sampleOrderType_; + return *this; + } - PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceTransformFeedbackPropertiesEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT { + customSampleOrderCount = customSampleOrderCount_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & + setPCustomSampleOrders( const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT + { + pCustomSampleOrders = pCustomSampleOrders_; + return *this; + } - PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrders( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & customSampleOrders_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + customSampleOrderCount = static_cast( customSampleOrders_.size() ); + pCustomSampleOrders = customSampleOrders_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -67093,833 +108670,842 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - maxTransformFeedbackStreams, - maxTransformFeedbackBuffers, - maxTransformFeedbackBufferSize, - maxTransformFeedbackStreamDataSize, - maxTransformFeedbackBufferDataSize, - maxTransformFeedbackBufferDataStride, - transformFeedbackQueries, - transformFeedbackStreamsLinesTriangles, - transformFeedbackRasterizationStreamSelect, - transformFeedbackDraw ); + return std::tie( sType, pNext, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const & ) const = default; + auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & ) const = default; #else - bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams ) && - ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers ) && ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize ) && - ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize ) && - ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize ) && - ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride ) && - ( transformFeedbackQueries == rhs.transformFeedbackQueries ) && - ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles ) && - ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect ) && - ( transformFeedbackDraw == rhs.transformFeedbackDraw ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleOrderType == rhs.sampleOrderType ) && + ( customSampleOrderCount == rhs.customSampleOrderCount ) && ( pCustomSampleOrders == rhs.pCustomSampleOrders ); # endif } - bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; - void * pNext = {}; - uint32_t maxTransformFeedbackStreams = {}; - uint32_t maxTransformFeedbackBuffers = {}; - VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {}; - uint32_t maxTransformFeedbackStreamDataSize = {}; - uint32_t maxTransformFeedbackBufferDataSize = {}; - uint32_t maxTransformFeedbackBufferDataStride = {}; - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {}; - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {}; - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {}; - VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault; + uint32_t customSampleOrderCount = {}; + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceTransformFeedbackPropertiesEXT; + using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV; }; - struct PhysicalDeviceUniformBufferStandardLayoutFeatures + struct PipelineViewportDepthClampControlCreateInfoEXT { - using NativeType = VkPhysicalDeviceUniformBufferStandardLayoutFeatures; + using NativeType = VkPipelineViewportDepthClampControlCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportDepthClampControlCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , uniformBufferStandardLayout( uniformBufferStandardLayout_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportDepthClampControlCreateInfoEXT( + VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode_ = VULKAN_HPP_NAMESPACE::DepthClampModeEXT::eViewportRange, + const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthClampMode{ depthClampMode_ } + , pDepthClampRange{ pDepthClampRange_ } { } VULKAN_HPP_CONSTEXPR - PhysicalDeviceUniformBufferStandardLayoutFeatures( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineViewportDepthClampControlCreateInfoEXT( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceUniformBufferStandardLayoutFeatures( *reinterpret_cast( &rhs ) ) + PipelineViewportDepthClampControlCreateInfoEXT( VkPipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportDepthClampControlCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceUniformBufferStandardLayoutFeatures & - operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineViewportDepthClampControlCreateInfoEXT & operator=( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportDepthClampControlCreateInfoEXT & operator=( VkPipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClampControlCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & - setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClampControlCreateInfoEXT & + setDepthClampMode( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode_ ) VULKAN_HPP_NOEXCEPT { - uniformBufferStandardLayout = uniformBufferStandardLayout_; + depthClampMode = depthClampMode_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClampControlCreateInfoEXT & + setPDepthClampRange( const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pDepthClampRange = pDepthClampRange_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportDepthClampControlCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportDepthClampControlCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, uniformBufferStandardLayout ); + return std::tie( sType, pNext, depthClampMode, pDepthClampRange ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const & ) const = default; + auto operator<=>( PipelineViewportDepthClampControlCreateInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClampMode == rhs.depthClampMode ) && ( pDepthClampRange == rhs.pDepthClampRange ); # endif } - bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportDepthClampControlCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode = VULKAN_HPP_NAMESPACE::DepthClampModeEXT::eViewportRange; + const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures; + using Type = PipelineViewportDepthClampControlCreateInfoEXT; }; - using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures; - struct PhysicalDeviceVariablePointersFeatures + struct PipelineViewportDepthClipControlCreateInfoEXT { - using NativeType = VkPhysicalDeviceVariablePointersFeatures; + using NativeType = VkPipelineViewportDepthClipControlCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVariablePointersFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , variablePointersStorageBuffer( variablePointersStorageBuffer_ ) - , variablePointers( variablePointers_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , negativeOneToOne{ negativeOneToOne_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + PipelineViewportDepthClipControlCreateInfoEXT( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceVariablePointersFeatures( *reinterpret_cast( &rhs ) ) + PipelineViewportDepthClipControlCreateInfoEXT( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportDepthClipControlCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineViewportDepthClipControlCreateInfoEXT & operator=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceVariablePointersFeatures & operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportDepthClipControlCreateInfoEXT & operator=( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & - setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT - { - variablePointersStorageBuffer = variablePointersStorageBuffer_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & + setNegativeOneToOne( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ ) VULKAN_HPP_NOEXCEPT { - variablePointers = variablePointers_; + negativeOneToOne = negativeOneToOne_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceVariablePointersFeatures const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportDepthClipControlCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportDepthClipControlCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, variablePointersStorageBuffer, variablePointers ); + return std::tie( sType, pNext, negativeOneToOne ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceVariablePointersFeatures const & ) const = default; + auto operator<=>( PipelineViewportDepthClipControlCreateInfoEXT const & ) const = default; #else - bool operator==( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && - ( variablePointers == rhs.variablePointers ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( negativeOneToOne == rhs.negativeOneToOne ); # endif } - bool operator!=( PhysicalDeviceVariablePointersFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; - VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceVariablePointersFeatures; + using Type = PipelineViewportDepthClipControlCreateInfoEXT; }; - using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; - using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; - using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; - struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT + struct PipelineViewportExclusiveScissorStateCreateInfoNV { - using NativeType = VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT; + using NativeType = VkPipelineViewportExclusiveScissorStateCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ ) - , vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , exclusiveScissorCount{ exclusiveScissorCount_ } + , pExclusiveScissors{ pExclusiveScissors_ } { } VULKAN_HPP_CONSTEXPR - PhysicalDeviceVertexAttributeDivisorFeaturesEXT( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineViewportExclusiveScissorStateCreateInfoNV( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportExclusiveScissorStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } - PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceVertexAttributeDivisorFeaturesEXT( *reinterpret_cast( &rhs ) ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportExclusiveScissorStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & exclusiveScissors_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), exclusiveScissorCount( static_cast( exclusiveScissors_.size() ) ), pExclusiveScissors( exclusiveScissors_.data() ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineViewportExclusiveScissorStateCreateInfoNV & + operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & - setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT { - vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_; + exclusiveScissorCount = exclusiveScissorCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & - setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & + setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT { - vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_; + pExclusiveScissors = pExclusiveScissors_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportExclusiveScissorStateCreateInfoNV & + setExclusiveScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & exclusiveScissors_ ) VULKAN_HPP_NOEXCEPT + { + exclusiveScissorCount = static_cast( exclusiveScissors_.size() ); + pExclusiveScissors = exclusiveScissors_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, vertexAttributeInstanceRateDivisor, vertexAttributeInstanceRateZeroDivisor ); + return std::tie( sType, pNext, exclusiveScissorCount, pExclusiveScissors ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & ) const = default; + auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const & ) const = default; #else - bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) && - ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissorCount == rhs.exclusiveScissorCount ) && + ( pExclusiveScissors == rhs.pExclusiveScissors ); # endif } - bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {}; - VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; + const void * pNext = {}; + uint32_t exclusiveScissorCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceVertexAttributeDivisorFeaturesEXT; + using Type = PipelineViewportExclusiveScissorStateCreateInfoNV; }; - struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT + struct ShadingRatePaletteNV { - using NativeType = VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; + using NativeType = VkShadingRatePaletteNV; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( uint32_t shadingRatePaletteEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRatePaletteEntryCount{ shadingRatePaletteEntryCount_ } + , pShadingRatePaletteEntries{ pShadingRatePaletteEntries_ } + { + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxVertexAttribDivisor( maxVertexAttribDivisor_ ) + VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ShadingRatePaletteNV( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceVertexAttributeDivisorPropertiesEXT( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShadingRatePaletteNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePaletteEntries_ ) + : shadingRatePaletteEntryCount( static_cast( shadingRatePaletteEntries_.size() ) ) + , pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceVertexAttributeDivisorPropertiesEXT( *reinterpret_cast( &rhs ) ) + ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceVertexAttributeDivisorPropertiesEXT & - operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_; + return *this; + } - PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & + setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + pShadingRatePaletteEntries = pShadingRatePaletteEntries_; return *this; } - operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShadingRatePaletteNV & setShadingRatePaletteEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePaletteEntries_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + shadingRatePaletteEntryCount = static_cast( shadingRatePaletteEntries_.size() ); + pShadingRatePaletteEntries = shadingRatePaletteEntries_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT + operator VkShadingRatePaletteNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxVertexAttribDivisor ); + return std::tie( shadingRatePaletteEntryCount, pShadingRatePaletteEntries ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & ) const = default; + auto operator<=>( ShadingRatePaletteNV const & ) const = default; #else - bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ); + return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries ); # endif } - bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; - void * pNext = {}; - uint32_t maxVertexAttribDivisor = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT; + uint32_t shadingRatePaletteEntryCount = {}; + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries = {}; }; - struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT + struct PipelineViewportShadingRateImageStateCreateInfoNV { - using NativeType = VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT; + using NativeType = VkPipelineViewportShadingRateImageStateCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , vertexInputDynamicState( vertexInputDynamicState_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shadingRateImageEnable{ shadingRateImageEnable_ } + , viewportCount{ viewportCount_ } + , pShadingRatePalettes{ pShadingRatePalettes_ } { } VULKAN_HPP_CONSTEXPR - PhysicalDeviceVertexInputDynamicStateFeaturesEXT( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineViewportShadingRateImageStateCreateInfoNV( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceVertexInputDynamicStateFeaturesEXT( *reinterpret_cast( &rhs ) ) + PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportShadingRateImageStateCreateInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportShadingRateImageStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePalettes_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , shadingRateImageEnable( shadingRateImageEnable_ ) + , viewportCount( static_cast( shadingRatePalettes_.size() ) ) + , pShadingRatePalettes( shadingRatePalettes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceVertexInputDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineViewportShadingRateImageStateCreateInfoNV & + operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PipelineViewportShadingRateImageStateCreateInfoNV & operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexInputDynamicStateFeaturesEXT & - setVertexInputDynamicState( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & + setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT { - vertexInputDynamicState = vertexInputDynamicState_; + shadingRateImageEnable = shadingRateImageEnable_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + viewportCount = viewportCount_; + return *this; } - operator VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & + setPShadingRatePalettes( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pShadingRatePalettes = pShadingRatePalettes_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRatePalettes( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = static_cast( shadingRatePalettes_.size() ); + pShadingRatePalettes = shadingRatePalettes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPipelineViewportShadingRateImageStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, vertexInputDynamicState ); + return std::tie( sType, pNext, shadingRateImageEnable, viewportCount, pShadingRatePalettes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & ) const = default; + auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const & ) const = default; #else - bool operator==( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexInputDynamicState == rhs.vertexInputDynamicState ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateImageEnable == rhs.shadingRateImageEnable ) && + ( viewportCount == rhs.viewportCount ) && ( pShadingRatePalettes == rhs.pShadingRatePalettes ); # endif } - bool operator!=( PhysicalDeviceVertexInputDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceVertexInputDynamicStateFeaturesEXT; + using Type = PipelineViewportShadingRateImageStateCreateInfoNV; }; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct PhysicalDeviceVideoFormatInfoKHR + struct ViewportSwizzleNV { - using NativeType = VkPhysicalDeviceVideoFormatInfoKHR; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoFormatInfoKHR; + using NativeType = VkViewportSwizzleNV; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageUsage( imageUsage_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT + : x{ x_ } + , y{ y_ } + , z{ z_ } + , w{ w_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT : ViewportSwizzleNV( *reinterpret_cast( &rhs ) ) {} + + ViewportSwizzleNV & operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceVideoFormatInfoKHR( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceVideoFormatInfoKHR( *reinterpret_cast( &rhs ) ) + ViewportSwizzleNV & operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceVideoFormatInfoKHR & operator=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setX( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } - PhysicalDeviceVideoFormatInfoKHR & operator=( VkPhysicalDeviceVideoFormatInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setY( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + y = y_; return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setZ( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + z = z_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoFormatInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setW( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT { - imageUsage = imageUsage_; + w = w_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceVideoFormatInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkViewportSwizzleNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceVideoFormatInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, imageUsage ); + return std::tie( x, y, z, w ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceVideoFormatInfoKHR const & ) const = default; -# else - bool operator==( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViewportSwizzleNV const & ) const = default; +#else + bool operator==( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageUsage == rhs.imageUsage ); -# endif +# else + return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ) && ( w == rhs.w ); +# endif } - bool operator!=( PhysicalDeviceVideoFormatInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoFormatInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceVideoFormatInfoKHR; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct PhysicalDeviceVulkan11Features + struct PipelineViewportSwizzleStateCreateInfoNV { - using NativeType = VkPhysicalDeviceVulkan11Features; + using NativeType = VkPipelineViewportSwizzleStateCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , storageBuffer16BitAccess( storageBuffer16BitAccess_ ) - , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) - , storagePushConstant16( storagePushConstant16_ ) - , storageInputOutput16( storageInputOutput16_ ) - , multiview( multiview_ ) - , multiviewGeometryShader( multiviewGeometryShader_ ) - , multiviewTessellationShader( multiviewTessellationShader_ ) - , variablePointersStorageBuffer( variablePointersStorageBuffer_ ) - , variablePointers( variablePointers_ ) - , protectedMemory( protectedMemory_ ) - , samplerYcbcrConversion( samplerYcbcrConversion_ ) - , shaderDrawParameters( shaderDrawParameters_ ) - { - } - - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceVulkan11Features( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceVulkan11Features & operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & - setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT - { - storageBuffer16BitAccess = storageBuffer16BitAccess_; - return *this; - } + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & - setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , viewportCount{ viewportCount_ } + , pViewportSwizzles{ pViewportSwizzles_ } { - uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT - { - storagePushConstant16 = storagePushConstant16_; - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportSwizzleStateCreateInfoNV( *reinterpret_cast( &rhs ) ) { - storageInputOutput16 = storageInputOutput16_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportSwizzleStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportSwizzles_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), viewportCount( static_cast( viewportSwizzles_.size() ) ), pViewportSwizzles( viewportSwizzles_.data() ) { - multiview = multiview_; - return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & - setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT - { - multiviewGeometryShader = multiviewGeometryShader_; - return *this; - } + PipelineViewportSwizzleStateCreateInfoNV & operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & - setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportSwizzleStateCreateInfoNV & operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - multiviewTessellationShader = multiviewTessellationShader_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & - setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - variablePointersStorageBuffer = variablePointersStorageBuffer_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & + setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT { - variablePointers = variablePointers_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT { - protectedMemory = protectedMemory_; + viewportCount = viewportCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & - setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & + setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT { - samplerYcbcrConversion = samplerYcbcrConversion_; + pViewportSwizzles = pViewportSwizzles_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportSwizzleStateCreateInfoNV & setViewportSwizzles( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT { - shaderDrawParameters = shaderDrawParameters_; + viewportCount = static_cast( viewportSwizzles_.size() ); + pViewportSwizzles = viewportSwizzles_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceVulkan11Features const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportSwizzleStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -67927,2593 +109513,1945 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + const void * const &, + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - storageBuffer16BitAccess, - uniformAndStorageBuffer16BitAccess, - storagePushConstant16, - storageInputOutput16, - multiview, - multiviewGeometryShader, - multiviewTessellationShader, - variablePointersStorageBuffer, - variablePointers, - protectedMemory, - samplerYcbcrConversion, - shaderDrawParameters ); + return std::tie( sType, pNext, flags, viewportCount, pViewportSwizzles ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceVulkan11Features const & ) const = default; + auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const & ) const = default; #else - bool operator==( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) && - ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && ( storagePushConstant16 == rhs.storagePushConstant16 ) && - ( storageInputOutput16 == rhs.storageInputOutput16 ) && ( multiview == rhs.multiview ) && - ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && ( multiviewTessellationShader == rhs.multiviewTessellationShader ) && - ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) && ( variablePointers == rhs.variablePointers ) && - ( protectedMemory == rhs.protectedMemory ) && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ) && - ( shaderDrawParameters == rhs.shaderDrawParameters ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewportCount == rhs.viewportCount ) && + ( pViewportSwizzles == rhs.pViewportSwizzles ); # endif } - bool operator!=( PhysicalDeviceVulkan11Features const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; - VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; - VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; - VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; - VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceVulkan11Features; + using Type = PipelineViewportSwizzleStateCreateInfoNV; }; - struct PhysicalDeviceVulkan11Properties + struct ViewportWScalingNV { - using NativeType = VkPhysicalDeviceVulkan11Properties; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties; + using NativeType = VkViewportWScalingNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( - std::array const & deviceUUID_ = {}, - std::array const & driverUUID_ = {}, - std::array const & deviceLUID_ = {}, - uint32_t deviceNodeMask_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, - uint32_t subgroupSize_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, - VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, - VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, - uint32_t maxMultiviewViewCount_ = {}, - uint32_t maxMultiviewInstanceIndex_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, - uint32_t maxPerSetDescriptors_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceUUID( deviceUUID_ ) - , driverUUID( driverUUID_ ) - , deviceLUID( deviceLUID_ ) - , deviceNodeMask( deviceNodeMask_ ) - , deviceLUIDValid( deviceLUIDValid_ ) - , subgroupSize( subgroupSize_ ) - , subgroupSupportedStages( subgroupSupportedStages_ ) - , subgroupSupportedOperations( subgroupSupportedOperations_ ) - , subgroupQuadOperationsInAllStages( subgroupQuadOperationsInAllStages_ ) - , pointClippingBehavior( pointClippingBehavior_ ) - , maxMultiviewViewCount( maxMultiviewViewCount_ ) - , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) - , protectedNoFault( protectedNoFault_ ) - , maxPerSetDescriptors( maxPerSetDescriptors_ ) - , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {}, float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT + : xcoeff{ xcoeff_ } + , ycoeff{ ycoeff_ } { } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceVulkan11Properties( *reinterpret_cast( &rhs ) ) + ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT : ViewportWScalingNV( *reinterpret_cast( &rhs ) ) {} + + ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceVulkan11Properties & operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT + { + xcoeff = xcoeff_; + return *this; + } - PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + ycoeff = ycoeff_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceVulkan11Properties const &() const VULKAN_HPP_NOEXCEPT + operator VkViewportWScalingNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT + operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, - VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::PointClippingBehavior const &, - uint32_t const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::DeviceSize const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - deviceUUID, - driverUUID, - deviceLUID, - deviceNodeMask, - deviceLUIDValid, - subgroupSize, - subgroupSupportedStages, - subgroupSupportedOperations, - subgroupQuadOperationsInAllStages, - pointClippingBehavior, - maxMultiviewViewCount, - maxMultiviewInstanceIndex, - protectedNoFault, - maxPerSetDescriptors, - maxMemoryAllocationSize ); + return std::tie( xcoeff, ycoeff ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceVulkan11Properties const & ) const = default; + auto operator<=>( ViewportWScalingNV const & ) const = default; #else - bool operator==( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && ( driverUUID == rhs.driverUUID ) && - ( deviceLUID == rhs.deviceLUID ) && ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ) && - ( subgroupSize == rhs.subgroupSize ) && ( subgroupSupportedStages == rhs.subgroupSupportedStages ) && - ( subgroupSupportedOperations == rhs.subgroupSupportedOperations ) && - ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages ) && ( pointClippingBehavior == rhs.pointClippingBehavior ) && - ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ) && - ( protectedNoFault == rhs.protectedNoFault ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) && - ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); + return ( xcoeff == rhs.xcoeff ) && ( ycoeff == rhs.ycoeff ); # endif } - bool operator!=( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceUUID = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverUUID = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceLUID = {}; - uint32_t deviceNodeMask = {}; - VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; - uint32_t subgroupSize = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {}; - VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {}; - VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {}; - VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; - uint32_t maxMultiviewViewCount = {}; - uint32_t maxMultiviewInstanceIndex = {}; - VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; - uint32_t maxPerSetDescriptors = {}; - VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceVulkan11Properties; + float xcoeff = {}; + float ycoeff = {}; }; - struct PhysicalDeviceVulkan12Features + struct PipelineViewportWScalingStateCreateInfoNV { - using NativeType = VkPhysicalDeviceVulkan12Features; + using NativeType = VkPipelineViewportWScalingStateCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , samplerMirrorClampToEdge( samplerMirrorClampToEdge_ ) - , drawIndirectCount( drawIndirectCount_ ) - , storageBuffer8BitAccess( storageBuffer8BitAccess_ ) - , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) - , storagePushConstant8( storagePushConstant8_ ) - , shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) - , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) - , shaderFloat16( shaderFloat16_ ) - , shaderInt8( shaderInt8_ ) - , descriptorIndexing( descriptorIndexing_ ) - , shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) - , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) - , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) - , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) - , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) - , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) - , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) - , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) - , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) - , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) - , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) - , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) - , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) - , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) - , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) - , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) - , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) - , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) - , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) - , runtimeDescriptorArray( runtimeDescriptorArray_ ) - , samplerFilterMinmax( samplerFilterMinmax_ ) - , scalarBlockLayout( scalarBlockLayout_ ) - , imagelessFramebuffer( imagelessFramebuffer_ ) - , uniformBufferStandardLayout( uniformBufferStandardLayout_ ) - , shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ) - , separateDepthStencilLayouts( separateDepthStencilLayouts_ ) - , hostQueryReset( hostQueryReset_ ) - , timelineSemaphore( timelineSemaphore_ ) - , bufferDeviceAddress( bufferDeviceAddress_ ) - , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) - , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) - , vulkanMemoryModel( vulkanMemoryModel_ ) - , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ) - , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ) - , shaderOutputViewportIndex( shaderOutputViewportIndex_ ) - , shaderOutputLayer( shaderOutputLayer_ ) - , subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ ) - { - } - - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; - PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceVulkan12Features( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , viewportWScalingEnable{ viewportWScalingEnable_ } + , viewportCount{ viewportCount_ } + , pViewportWScalings{ pViewportWScalings_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportWScalingStateCreateInfoNV( *reinterpret_cast( &rhs ) ) { - pNext = pNext_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportWScalingStateCreateInfoNV( + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportWScalings_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , viewportWScalingEnable( viewportWScalingEnable_ ) + , viewportCount( static_cast( viewportWScalings_.size() ) ) + , pViewportWScalings( viewportWScalings_.data() ) { - samplerMirrorClampToEdge = samplerMirrorClampToEdge_; - return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT - { - drawIndirectCount = drawIndirectCount_; - return *this; - } + PipelineViewportWScalingStateCreateInfoNV & operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + PipelineViewportWScalingStateCreateInfoNV & operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - storageBuffer8BitAccess = storageBuffer8BitAccess_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & + setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT { - storagePushConstant8 = storagePushConstant8_; + viewportWScalingEnable = viewportWScalingEnable_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT { - shaderBufferInt64Atomics = shaderBufferInt64Atomics_; + viewportCount = viewportCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & + setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT { - shaderSharedInt64Atomics = shaderSharedInt64Atomics_; + pViewportWScalings = pViewportWScalings_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineViewportWScalingStateCreateInfoNV & setViewportWScalings( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportWScalings_ ) VULKAN_HPP_NOEXCEPT { - shaderFloat16 = shaderFloat16_; + viewportCount = static_cast( viewportWScalings_.size() ); + pViewportWScalings = viewportWScalings_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportWScalingStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - shaderInt8 = shaderInt8_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT + operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - descriptorIndexing = descriptorIndexing_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; - return *this; + return std::tie( sType, pNext, viewportWScalingEnable, viewportCount, pViewportWScalings ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const & ) const = default; +#else + bool operator==( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportWScalingEnable == rhs.viewportWScalingEnable ) && + ( viewportCount == rhs.viewportCount ) && ( pViewportWScalings == rhs.pViewportWScalings ); +# endif } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings = {}; + }; + + template <> + struct CppType + { + using Type = PipelineViewportWScalingStateCreateInfoNV; + }; + +#if defined( VK_USE_PLATFORM_GGP ) + struct PresentFrameTokenGGP + { + using NativeType = VkPresentFrameTokenGGP; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentFrameTokenGGP; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , frameToken{ frameToken_ } { - shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentFrameTokenGGP( *reinterpret_cast( &rhs ) ) { - shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + PresentFrameTokenGGP & operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PresentFrameTokenGGP & operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT { - shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setFrameToken( GgpFrameToken frameToken_ ) VULKAN_HPP_NOEXCEPT { - shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + frameToken = frameToken_; return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + operator VkPresentFrameTokenGGP const &() const VULKAN_HPP_NOEXCEPT { - shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT { - shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; - return *this; + return std::tie( sType, pNext, frameToken ); } +# endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT { - descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; - return *this; + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; } +# endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + bool operator==( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT { - descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; - return *this; + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT { - descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; - return *this; + return !operator==( rhs ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind( - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP; + const void * pNext = {}; + GgpFrameToken frameToken = {}; + }; + + template <> + struct CppType + { + using Type = PresentFrameTokenGGP; + }; +#endif /*VK_USE_PLATFORM_GGP*/ + + struct PresentIdKHR + { + using NativeType = VkPresentIdKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentIdKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentIdKHR( uint32_t swapchainCount_ = {}, const uint64_t * pPresentIds_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pPresentIds{ pPresentIds_ } { - descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind( - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PresentIdKHR( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentIdKHR( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentIdKHR( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentIdKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentIds_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( presentIds_.size() ) ), pPresentIds( presentIds_.data() ) { - descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; - return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT + PresentIdKHR & operator=( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PresentIdKHR & operator=( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT { - descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; + swapchainCount = swapchainCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPPresentIds( const uint64_t * pPresentIds_ ) VULKAN_HPP_NOEXCEPT { - runtimeDescriptorArray = runtimeDescriptorArray_; + pPresentIds = pPresentIds_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentIdKHR & setPresentIds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentIds_ ) VULKAN_HPP_NOEXCEPT { - samplerFilterMinmax = samplerFilterMinmax_; + swapchainCount = static_cast( presentIds_.size() ); + pPresentIds = presentIds_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + operator VkPresentIdKHR const &() const VULKAN_HPP_NOEXCEPT { - scalarBlockLayout = scalarBlockLayout_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT + operator VkPresentIdKHR &() VULKAN_HPP_NOEXCEPT { - imagelessFramebuffer = imagelessFramebuffer_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - uniformBufferStandardLayout = uniformBufferStandardLayout_; - return *this; + return std::tie( sType, pNext, swapchainCount, pPresentIds ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentIdKHR const & ) const = default; +#else + bool operator==( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pPresentIds == rhs.pPresentIds ); +# endif } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - separateDepthStencilLayouts = separateDepthStencilLayouts_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentIdKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const uint64_t * pPresentIds = {}; + }; + + template <> + struct CppType + { + using Type = PresentIdKHR; + }; + + struct PresentInfoKHR + { + using NativeType = VkPresentInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ = {}, + const uint32_t * pImageIndices_ = {}, + VULKAN_HPP_NAMESPACE::Result * pResults_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , waitSemaphoreCount{ waitSemaphoreCount_ } + , pWaitSemaphores{ pWaitSemaphores_ } + , swapchainCount{ swapchainCount_ } + , pSwapchains{ pSwapchains_ } + , pImageIndices{ pImageIndices_ } + , pResults{ pResults_ } { - hostQueryReset = hostQueryReset_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentInfoKHR( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & swapchains_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & results_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) + , pWaitSemaphores( waitSemaphores_.data() ) + , swapchainCount( static_cast( swapchains_.size() ) ) + , pSwapchains( swapchains_.data() ) + , pImageIndices( imageIndices_.data() ) + , pResults( results_.data() ) { - timelineSemaphore = timelineSemaphore_; - return *this; +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains_.size() == imageIndices_.size() ); + VULKAN_HPP_ASSERT( results_.empty() || ( swapchains_.size() == results_.size() ) ); + VULKAN_HPP_ASSERT( results_.empty() || ( imageIndices_.size() == results_.size() ) ); +# else + if ( swapchains_.size() != imageIndices_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" ); + } + if ( !results_.empty() && ( swapchains_.size() != results_.size() ) ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" ); + } + if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - bufferDeviceAddress = bufferDeviceAddress_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT { - bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + waitSemaphoreCount = waitSemaphoreCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT { - vulkanMemoryModel = vulkanMemoryModel_; + pWaitSemaphores = pWaitSemaphores_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & + setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT { - vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT { - vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; + swapchainCount = swapchainCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ ) VULKAN_HPP_NOEXCEPT { - shaderOutputViewportIndex = shaderOutputViewportIndex_; + pSwapchains = pSwapchains_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & + setSwapchains( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & swapchains_ ) VULKAN_HPP_NOEXCEPT { - shaderOutputLayer = shaderOutputLayer_; + swapchainCount = static_cast( swapchains_.size() ); + pSwapchains = swapchains_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & - setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT { - subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_; + pImageIndices = pImageIndices_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + swapchainCount = static_cast( imageIndices_.size() ); + pImageIndices = imageIndices_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result * pResults_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pResults = pResults_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, - pNext, - samplerMirrorClampToEdge, - drawIndirectCount, - storageBuffer8BitAccess, - uniformAndStorageBuffer8BitAccess, - storagePushConstant8, - shaderBufferInt64Atomics, - shaderSharedInt64Atomics, - shaderFloat16, - shaderInt8, - descriptorIndexing, - shaderInputAttachmentArrayDynamicIndexing, - shaderUniformTexelBufferArrayDynamicIndexing, - shaderStorageTexelBufferArrayDynamicIndexing, - shaderUniformBufferArrayNonUniformIndexing, - shaderSampledImageArrayNonUniformIndexing, - shaderStorageBufferArrayNonUniformIndexing, - shaderStorageImageArrayNonUniformIndexing, - shaderInputAttachmentArrayNonUniformIndexing, - shaderUniformTexelBufferArrayNonUniformIndexing, - shaderStorageTexelBufferArrayNonUniformIndexing, - descriptorBindingUniformBufferUpdateAfterBind, - descriptorBindingSampledImageUpdateAfterBind, - descriptorBindingStorageImageUpdateAfterBind, - descriptorBindingStorageBufferUpdateAfterBind, - descriptorBindingUniformTexelBufferUpdateAfterBind, - descriptorBindingStorageTexelBufferUpdateAfterBind, - descriptorBindingUpdateUnusedWhilePending, - descriptorBindingPartiallyBound, - descriptorBindingVariableDescriptorCount, - runtimeDescriptorArray, - samplerFilterMinmax, - scalarBlockLayout, - imagelessFramebuffer, - uniformBufferStandardLayout, - shaderSubgroupExtendedTypes, - separateDepthStencilLayouts, - hostQueryReset, - timelineSemaphore, - bufferDeviceAddress, - bufferDeviceAddressCaptureReplay, - bufferDeviceAddressMultiDevice, - vulkanMemoryModel, - vulkanMemoryModelDeviceScope, - vulkanMemoryModelAvailabilityVisibilityChains, - shaderOutputViewportIndex, - shaderOutputLayer, - subgroupBroadcastDynamicId ); +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentInfoKHR & setResults( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & results_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = static_cast( results_.size() ); + pResults = results_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, swapchainCount, pSwapchains, pImageIndices, pResults ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceVulkan12Features const & ) const = default; + auto operator<=>( PresentInfoKHR const & ) const = default; #else - bool operator==( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge ) && - ( drawIndirectCount == rhs.drawIndirectCount ) && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) && - ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && ( storagePushConstant8 == rhs.storagePushConstant8 ) && - ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ) && - ( shaderFloat16 == rhs.shaderFloat16 ) && ( shaderInt8 == rhs.shaderInt8 ) && ( descriptorIndexing == rhs.descriptorIndexing ) && - ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) && - ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) && - ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) && - ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) && - ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) && - ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) && - ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) && - ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) && - ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) && - ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) && - ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) && - ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) && - ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) && - ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) && - ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) && - ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) && - ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) && - ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) && - ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) && - ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ) && ( samplerFilterMinmax == rhs.samplerFilterMinmax ) && - ( scalarBlockLayout == rhs.scalarBlockLayout ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer ) && - ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ) && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes ) && - ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts ) && ( hostQueryReset == rhs.hostQueryReset ) && - ( timelineSemaphore == rhs.timelineSemaphore ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && - ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && - ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) && - ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) && - ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ) && - ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex ) && ( shaderOutputLayer == rhs.shaderOutputLayer ) && - ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( swapchainCount == rhs.swapchainCount ) && ( pSwapchains == rhs.pSwapchains ) && + ( pImageIndices == rhs.pImageIndices ) && ( pResults == rhs.pResults ); # endif } - bool operator!=( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {}; - VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; - VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; - VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {}; - VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; - VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {}; - VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {}; - VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; - VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; - VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {}; - VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains = {}; + const uint32_t * pImageIndices = {}; + VULKAN_HPP_NAMESPACE::Result * pResults = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceVulkan12Features; + using Type = PresentInfoKHR; }; - struct PhysicalDeviceVulkan12Properties + struct RectLayerKHR { - using NativeType = VkPhysicalDeviceVulkan12Properties; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties; + using NativeType = VkRectLayerKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( - VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, - std::array const & driverName_ = {}, - std::array const & driverInfo_ = {}, - VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, - uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, - uint32_t maxPerStageUpdateAfterBindResources_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, - uint64_t maxTimelineSemaphoreValueDifference_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , driverID( driverID_ ) - , driverName( driverName_ ) - , driverInfo( driverInfo_ ) - , conformanceVersion( conformanceVersion_ ) - , denormBehaviorIndependence( denormBehaviorIndependence_ ) - , roundingModeIndependence( roundingModeIndependence_ ) - , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) - , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) - , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) - , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) - , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) - , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) - , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) - , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) - , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) - , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) - , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) - , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) - , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) - , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) - , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) - , maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) - , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) - , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) - , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) - , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) - , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) - , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) - , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) - , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) - , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) - , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) - , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) - , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) - , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) - , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) - , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) - , supportedDepthResolveModes( supportedDepthResolveModes_ ) - , supportedStencilResolveModes( supportedStencilResolveModes_ ) - , independentResolveNone( independentResolveNone_ ) - , independentResolve( independentResolve_ ) - , filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) - , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) - , maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) - , framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT + : offset{ offset_ } + , extent{ extent_ } + , layer{ layer_ } { } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceVulkan12Properties( *reinterpret_cast( &rhs ) ) + RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT : RectLayerKHR( *reinterpret_cast( &rhs ) ) {} + + explicit RectLayerKHR( Rect2D const & rect2D, uint32_t layer_ = {} ) : offset( rect2D.offset ), extent( rect2D.extent ), layer( layer_ ) {} + + RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } - PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + extent = extent_; return *this; } - operator VkPhysicalDeviceVulkan12Properties const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setLayer( uint32_t layer_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + layer = layer_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT + operator VkRectLayerKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - VULKAN_HPP_NAMESPACE::ConformanceVersion const &, - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - uint32_t const &, - VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, - VULKAN_HPP_NAMESPACE::ResolveModeFlags const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - uint64_t const &, - VULKAN_HPP_NAMESPACE::SampleCountFlags const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - driverID, - driverName, - driverInfo, - conformanceVersion, - denormBehaviorIndependence, - roundingModeIndependence, - shaderSignedZeroInfNanPreserveFloat16, - shaderSignedZeroInfNanPreserveFloat32, - shaderSignedZeroInfNanPreserveFloat64, - shaderDenormPreserveFloat16, - shaderDenormPreserveFloat32, - shaderDenormPreserveFloat64, - shaderDenormFlushToZeroFloat16, - shaderDenormFlushToZeroFloat32, - shaderDenormFlushToZeroFloat64, - shaderRoundingModeRTEFloat16, - shaderRoundingModeRTEFloat32, - shaderRoundingModeRTEFloat64, - shaderRoundingModeRTZFloat16, - shaderRoundingModeRTZFloat32, - shaderRoundingModeRTZFloat64, - maxUpdateAfterBindDescriptorsInAllPools, - shaderUniformBufferArrayNonUniformIndexingNative, - shaderSampledImageArrayNonUniformIndexingNative, - shaderStorageBufferArrayNonUniformIndexingNative, - shaderStorageImageArrayNonUniformIndexingNative, - shaderInputAttachmentArrayNonUniformIndexingNative, - robustBufferAccessUpdateAfterBind, - quadDivergentImplicitLod, - maxPerStageDescriptorUpdateAfterBindSamplers, - maxPerStageDescriptorUpdateAfterBindUniformBuffers, - maxPerStageDescriptorUpdateAfterBindStorageBuffers, - maxPerStageDescriptorUpdateAfterBindSampledImages, - maxPerStageDescriptorUpdateAfterBindStorageImages, - maxPerStageDescriptorUpdateAfterBindInputAttachments, - maxPerStageUpdateAfterBindResources, - maxDescriptorSetUpdateAfterBindSamplers, - maxDescriptorSetUpdateAfterBindUniformBuffers, - maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, - maxDescriptorSetUpdateAfterBindStorageBuffers, - maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, - maxDescriptorSetUpdateAfterBindSampledImages, - maxDescriptorSetUpdateAfterBindStorageImages, - maxDescriptorSetUpdateAfterBindInputAttachments, - supportedDepthResolveModes, - supportedStencilResolveModes, - independentResolveNone, - independentResolve, - filterMinmaxSingleComponentFormats, - filterMinmaxImageComponentMapping, - maxTimelineSemaphoreValueDifference, - framebufferIntegerColorSampleCounts ); + return std::tie( offset, extent, layer ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceVulkan12Properties const & ) const = default; + auto operator<=>( RectLayerKHR const & ) const = default; #else - bool operator==( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( driverName == rhs.driverName ) && - ( driverInfo == rhs.driverInfo ) && ( conformanceVersion == rhs.conformanceVersion ) && - ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && ( roundingModeIndependence == rhs.roundingModeIndependence ) && - ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && - ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && - ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) && - ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) && - ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) && - ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) && - ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) && - ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) && - ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) && - ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ) && - ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) && - ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) && - ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) && - ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) && - ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) && - ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) && - ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) && - ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) && - ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) && - ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) && - ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) && - ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) && - ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) && - ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) && - ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) && - ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) && - ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) && - ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) && - ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) && - ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) && - ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) && - ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ) && - ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && - ( independentResolveNone == rhs.independentResolveNone ) && ( independentResolve == rhs.independentResolve ) && - ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) && - ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ) && - ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference ) && - ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts ); + return ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( layer == rhs.layer ); # endif } - bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverName = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D driverInfo = {}; - VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {}; - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly; - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; - uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; - uint32_t maxPerStageUpdateAfterBindResources = {}; - uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; - uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; - uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {}; - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {}; - VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; - VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; - uint64_t maxTimelineSemaphoreValueDifference = {}; - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceVulkan12Properties; + VULKAN_HPP_NAMESPACE::Offset2D offset = {}; + VULKAN_HPP_NAMESPACE::Extent2D extent = {}; + uint32_t layer = {}; }; - struct PhysicalDeviceVulkan13Features + struct PresentRegionKHR { - using NativeType = VkPhysicalDeviceVulkan13Features; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Features; + using NativeType = VkPresentRegionKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , robustImageAccess( robustImageAccess_ ) - , inlineUniformBlock( inlineUniformBlock_ ) - , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ) - , pipelineCreationCacheControl( pipelineCreationCacheControl_ ) - , privateData( privateData_ ) - , shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ) - , shaderTerminateInvocation( shaderTerminateInvocation_ ) - , subgroupSizeControl( subgroupSizeControl_ ) - , computeFullSubgroups( computeFullSubgroups_ ) - , synchronization2( synchronization2_ ) - , textureCompressionASTC_HDR( textureCompressionASTC_HDR_ ) - , shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ ) - , dynamicRendering( dynamicRendering_ ) - , shaderIntegerDotProduct( shaderIntegerDotProduct_ ) - , maintenance4( maintenance4_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentRegionKHR( uint32_t rectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT + : rectangleCount{ rectangleCount_ } + , pRectangles{ pRectangles_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Features( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceVulkan13Features( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceVulkan13Features( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceVulkan13Features & operator=( PhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentRegionKHR( *reinterpret_cast( &rhs ) ) {} - PhysicalDeviceVulkan13Features & operator=( VkPhysicalDeviceVulkan13Features const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & rectangles_ ) + : rectangleCount( static_cast( rectangles_.size() ) ), pRectangles( rectangles_.data() ) { - *this = *reinterpret_cast( &rhs ); - return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setRectangleCount( uint32_t rectangleCount_ ) VULKAN_HPP_NOEXCEPT { - robustImageAccess = robustImageAccess_; + rectangleCount = rectangleCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ ) VULKAN_HPP_NOEXCEPT { - inlineUniformBlock = inlineUniformBlock_; + pRectangles = pRectangles_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDescriptorBindingInlineUniformBlockUpdateAfterBind( - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionKHR & + setRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & rectangles_ ) VULKAN_HPP_NOEXCEPT { - descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; + rectangleCount = static_cast( rectangles_.size() ); + pRectangles = rectangles_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & - setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT + operator VkPresentRegionKHR const &() const VULKAN_HPP_NOEXCEPT { - pipelineCreationCacheControl = pipelineCreationCacheControl_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT + operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT { - privateData = privateData_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & - setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; - return *this; + return std::tie( rectangleCount, pRectangles ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & - setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PresentRegionKHR const & ) const = default; +#else + bool operator==( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderTerminateInvocation = shaderTerminateInvocation_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( rectangleCount == rhs.rectangleCount ) && ( pRectangles == rhs.pRectangles ); +# endif } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - subgroupSizeControl = subgroupSizeControl_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT + public: + uint32_t rectangleCount = {}; + const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles = {}; + }; + + struct PresentRegionsKHR + { + using NativeType = VkPresentRegionsKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentRegionsKHR( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pRegions{ pRegions_ } { - computeFullSubgroups = computeFullSubgroups_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setSynchronization2( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentRegionsKHR( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionsKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) { - synchronization2 = synchronization2_; - return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & - setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT + PresentRegionsKHR & operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - textureCompressionASTC_HDR = textureCompressionASTC_HDR_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & - setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setDynamicRendering( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT { - dynamicRendering = dynamicRendering_; + swapchainCount = swapchainCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & - setShaderIntegerDotProduct( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ ) VULKAN_HPP_NOEXCEPT { - shaderIntegerDotProduct = shaderIntegerDotProduct_; + pRegions = pRegions_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan13Features & setMaintenance4( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentRegionsKHR & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { - maintenance4 = maintenance4_; + swapchainCount = static_cast( regions_.size() ); + pRegions = regions_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceVulkan13Features const &() const VULKAN_HPP_NOEXCEPT + operator VkPresentRegionsKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceVulkan13Features &() VULKAN_HPP_NOEXCEPT + operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - robustImageAccess, - inlineUniformBlock, - descriptorBindingInlineUniformBlockUpdateAfterBind, - pipelineCreationCacheControl, - privateData, - shaderDemoteToHelperInvocation, - shaderTerminateInvocation, - subgroupSizeControl, - computeFullSubgroups, - synchronization2, - textureCompressionASTC_HDR, - shaderZeroInitializeWorkgroupMemory, - dynamicRendering, - shaderIntegerDotProduct, - maintenance4 ); + return std::tie( sType, pNext, swapchainCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceVulkan13Features const & ) const = default; + auto operator<=>( PresentRegionsKHR const & ) const = default; #else - bool operator==( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess ) && - ( inlineUniformBlock == rhs.inlineUniformBlock ) && - ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ) && - ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ) && ( privateData == rhs.privateData ) && - ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ) && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation ) && - ( subgroupSizeControl == rhs.subgroupSizeControl ) && ( computeFullSubgroups == rhs.computeFullSubgroups ) && - ( synchronization2 == rhs.synchronization2 ) && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ) && - ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory ) && ( dynamicRendering == rhs.dynamicRendering ) && - ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct ) && ( maintenance4 == rhs.maintenance4 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pRegions == rhs.pRegions ); # endif } - bool operator!=( PhysicalDeviceVulkan13Features const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Features; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; - VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; - VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {}; - VULKAN_HPP_NAMESPACE::Bool32 privateData = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {}; - VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; - VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; - VULKAN_HPP_NAMESPACE::Bool32 synchronization2 = {}; - VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {}; - VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct = {}; - VULKAN_HPP_NAMESPACE::Bool32 maintenance4 = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceVulkan13Features; + using Type = PresentRegionsKHR; }; - struct PhysicalDeviceVulkan13Properties + struct PresentTimeGOOGLE { - using NativeType = VkPhysicalDeviceVulkan13Properties; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan13Properties; + using NativeType = VkPresentTimeGOOGLE; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PhysicalDeviceVulkan13Properties( uint32_t minSubgroupSize_ = {}, - uint32_t maxSubgroupSize_ = {}, - uint32_t maxComputeWorkgroupSubgroups_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}, - uint32_t maxInlineUniformBlockSize_ = {}, - uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, - uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, - uint32_t maxInlineUniformTotalSize_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minSubgroupSize( minSubgroupSize_ ) - , maxSubgroupSize( maxSubgroupSize_ ) - , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ) - , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ ) - , maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ) - , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ) - , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ) - , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ) - , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ) - , maxInlineUniformTotalSize( maxInlineUniformTotalSize_ ) - , integerDotProduct8BitUnsignedAccelerated( integerDotProduct8BitUnsignedAccelerated_ ) - , integerDotProduct8BitSignedAccelerated( integerDotProduct8BitSignedAccelerated_ ) - , integerDotProduct8BitMixedSignednessAccelerated( integerDotProduct8BitMixedSignednessAccelerated_ ) - , integerDotProduct4x8BitPackedUnsignedAccelerated( integerDotProduct4x8BitPackedUnsignedAccelerated_ ) - , integerDotProduct4x8BitPackedSignedAccelerated( integerDotProduct4x8BitPackedSignedAccelerated_ ) - , integerDotProduct4x8BitPackedMixedSignednessAccelerated( integerDotProduct4x8BitPackedMixedSignednessAccelerated_ ) - , integerDotProduct16BitUnsignedAccelerated( integerDotProduct16BitUnsignedAccelerated_ ) - , integerDotProduct16BitSignedAccelerated( integerDotProduct16BitSignedAccelerated_ ) - , integerDotProduct16BitMixedSignednessAccelerated( integerDotProduct16BitMixedSignednessAccelerated_ ) - , integerDotProduct32BitUnsignedAccelerated( integerDotProduct32BitUnsignedAccelerated_ ) - , integerDotProduct32BitSignedAccelerated( integerDotProduct32BitSignedAccelerated_ ) - , integerDotProduct32BitMixedSignednessAccelerated( integerDotProduct32BitMixedSignednessAccelerated_ ) - , integerDotProduct64BitUnsignedAccelerated( integerDotProduct64BitUnsignedAccelerated_ ) - , integerDotProduct64BitSignedAccelerated( integerDotProduct64BitSignedAccelerated_ ) - , integerDotProduct64BitMixedSignednessAccelerated( integerDotProduct64BitMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating8BitSignedAccelerated( integerDotProductAccumulatingSaturating8BitSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated( - integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating16BitSignedAccelerated( integerDotProductAccumulatingSaturating16BitSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating32BitSignedAccelerated( integerDotProductAccumulatingSaturating32BitSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating64BitSignedAccelerated( integerDotProductAccumulatingSaturating64BitSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ ) - , storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ) - , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ) - , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ) - , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ ) - , maxBufferSize( maxBufferSize_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {} ) VULKAN_HPP_NOEXCEPT + : presentID{ presentID_ } + , desiredPresentTime{ desiredPresentTime_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan13Properties( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceVulkan13Properties( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceVulkan13Properties( *reinterpret_cast( &rhs ) ) + PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT : PresentTimeGOOGLE( *reinterpret_cast( &rhs ) ) {} + + PresentTimeGOOGLE & operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + PresentTimeGOOGLE & operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceVulkan13Properties & operator=( PhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setPresentID( uint32_t presentID_ ) VULKAN_HPP_NOEXCEPT + { + presentID = presentID_; + return *this; + } - PhysicalDeviceVulkan13Properties & operator=( VkPhysicalDeviceVulkan13Properties const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setDesiredPresentTime( uint64_t desiredPresentTime_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + desiredPresentTime = desiredPresentTime_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceVulkan13Properties const &() const VULKAN_HPP_NOEXCEPT + operator VkPresentTimeGOOGLE const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceVulkan13Properties &() VULKAN_HPP_NOEXCEPT + operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - minSubgroupSize, - maxSubgroupSize, - maxComputeWorkgroupSubgroups, - requiredSubgroupSizeStages, - maxInlineUniformBlockSize, - maxPerStageDescriptorInlineUniformBlocks, - maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, - maxDescriptorSetInlineUniformBlocks, - maxDescriptorSetUpdateAfterBindInlineUniformBlocks, - maxInlineUniformTotalSize, - integerDotProduct8BitUnsignedAccelerated, - integerDotProduct8BitSignedAccelerated, - integerDotProduct8BitMixedSignednessAccelerated, - integerDotProduct4x8BitPackedUnsignedAccelerated, - integerDotProduct4x8BitPackedSignedAccelerated, - integerDotProduct4x8BitPackedMixedSignednessAccelerated, - integerDotProduct16BitUnsignedAccelerated, - integerDotProduct16BitSignedAccelerated, - integerDotProduct16BitMixedSignednessAccelerated, - integerDotProduct32BitUnsignedAccelerated, - integerDotProduct32BitSignedAccelerated, - integerDotProduct32BitMixedSignednessAccelerated, - integerDotProduct64BitUnsignedAccelerated, - integerDotProduct64BitSignedAccelerated, - integerDotProduct64BitMixedSignednessAccelerated, - integerDotProductAccumulatingSaturating8BitUnsignedAccelerated, - integerDotProductAccumulatingSaturating8BitSignedAccelerated, - integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated, - integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated, - integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated, - integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated, - integerDotProductAccumulatingSaturating16BitUnsignedAccelerated, - integerDotProductAccumulatingSaturating16BitSignedAccelerated, - integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated, - integerDotProductAccumulatingSaturating32BitUnsignedAccelerated, - integerDotProductAccumulatingSaturating32BitSignedAccelerated, - integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated, - integerDotProductAccumulatingSaturating64BitUnsignedAccelerated, - integerDotProductAccumulatingSaturating64BitSignedAccelerated, - integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated, - storageTexelBufferOffsetAlignmentBytes, - storageTexelBufferOffsetSingleTexelAlignment, - uniformTexelBufferOffsetAlignmentBytes, - uniformTexelBufferOffsetSingleTexelAlignment, - maxBufferSize ); + return std::tie( presentID, desiredPresentTime ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceVulkan13Properties const & ) const = default; + auto operator<=>( PresentTimeGOOGLE const & ) const = default; #else - bool operator==( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSubgroupSize == rhs.minSubgroupSize ) && ( maxSubgroupSize == rhs.maxSubgroupSize ) && - ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ) && - ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) && - ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) && - ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) && - ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) && - ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ) && - ( maxInlineUniformTotalSize == rhs.maxInlineUniformTotalSize ) && - ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) && - ( integerDotProduct8BitSignedAccelerated == rhs.integerDotProduct8BitSignedAccelerated ) && - ( integerDotProduct8BitMixedSignednessAccelerated == rhs.integerDotProduct8BitMixedSignednessAccelerated ) && - ( integerDotProduct4x8BitPackedUnsignedAccelerated == rhs.integerDotProduct4x8BitPackedUnsignedAccelerated ) && - ( integerDotProduct4x8BitPackedSignedAccelerated == rhs.integerDotProduct4x8BitPackedSignedAccelerated ) && - ( integerDotProduct4x8BitPackedMixedSignednessAccelerated == rhs.integerDotProduct4x8BitPackedMixedSignednessAccelerated ) && - ( integerDotProduct16BitUnsignedAccelerated == rhs.integerDotProduct16BitUnsignedAccelerated ) && - ( integerDotProduct16BitSignedAccelerated == rhs.integerDotProduct16BitSignedAccelerated ) && - ( integerDotProduct16BitMixedSignednessAccelerated == rhs.integerDotProduct16BitMixedSignednessAccelerated ) && - ( integerDotProduct32BitUnsignedAccelerated == rhs.integerDotProduct32BitUnsignedAccelerated ) && - ( integerDotProduct32BitSignedAccelerated == rhs.integerDotProduct32BitSignedAccelerated ) && - ( integerDotProduct32BitMixedSignednessAccelerated == rhs.integerDotProduct32BitMixedSignednessAccelerated ) && - ( integerDotProduct64BitUnsignedAccelerated == rhs.integerDotProduct64BitUnsignedAccelerated ) && - ( integerDotProduct64BitSignedAccelerated == rhs.integerDotProduct64BitSignedAccelerated ) && - ( integerDotProduct64BitMixedSignednessAccelerated == rhs.integerDotProduct64BitMixedSignednessAccelerated ) && - ( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitUnsignedAccelerated ) && - ( integerDotProductAccumulatingSaturating8BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating8BitSignedAccelerated ) && - ( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated == - rhs.integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated ) && - ( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated == - rhs.integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated ) && - ( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated == - rhs.integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated ) && - ( integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated == - rhs.integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated ) && - ( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitUnsignedAccelerated ) && - ( integerDotProductAccumulatingSaturating16BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating16BitSignedAccelerated ) && - ( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated == - rhs.integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated ) && - ( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitUnsignedAccelerated ) && - ( integerDotProductAccumulatingSaturating32BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating32BitSignedAccelerated ) && - ( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated == - rhs.integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated ) && - ( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitUnsignedAccelerated ) && - ( integerDotProductAccumulatingSaturating64BitSignedAccelerated == rhs.integerDotProductAccumulatingSaturating64BitSignedAccelerated ) && - ( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated == - rhs.integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated ) && - ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) && - ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) && - ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) && - ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ) && ( maxBufferSize == rhs.maxBufferSize ); + return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ); # endif } - bool operator!=( PhysicalDeviceVulkan13Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan13Properties; - void * pNext = {}; - uint32_t minSubgroupSize = {}; - uint32_t maxSubgroupSize = {}; - uint32_t maxComputeWorkgroupSubgroups = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; - uint32_t maxInlineUniformBlockSize = {}; - uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; - uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; - uint32_t maxDescriptorSetInlineUniformBlocks = {}; - uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; - uint32_t maxInlineUniformTotalSize = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct8BitMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct16BitMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct32BitMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProduct64BitMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated = {}; - VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated = {}; - VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; - VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; - VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceVulkan13Properties; + uint32_t presentID = {}; + uint64_t desiredPresentTime = {}; }; - struct PhysicalDeviceVulkanMemoryModelFeatures + struct PresentTimesInfoGOOGLE { - using NativeType = VkPhysicalDeviceVulkanMemoryModelFeatures; + using NativeType = VkPresentTimesInfoGOOGLE; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentTimesInfoGOOGLE; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , vulkanMemoryModel( vulkanMemoryModel_ ) - , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ) - , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pTimes{ pTimes_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceVulkanMemoryModelFeatures( *reinterpret_cast( &rhs ) ) + PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : PresentTimesInfoGOOGLE( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceVulkanMemoryModelFeatures & operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentTimesInfoGOOGLE( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & times_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( times_.size() ) ), pTimes( times_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PresentTimesInfoGOOGLE & operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceVulkanMemoryModelFeatures & operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + PresentTimesInfoGOOGLE & operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & - setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT { - vulkanMemoryModel = vulkanMemoryModel_; + swapchainCount = swapchainCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & - setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ ) VULKAN_HPP_NOEXCEPT { - vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; + pTimes = pTimes_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & - setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PresentTimesInfoGOOGLE & + setTimes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & times_ ) VULKAN_HPP_NOEXCEPT { - vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; + swapchainCount = static_cast( times_.size() ); + pTimes = times_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceVulkanMemoryModelFeatures const &() const VULKAN_HPP_NOEXCEPT + operator VkPresentTimesInfoGOOGLE const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT + operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, vulkanMemoryModel, vulkanMemoryModelDeviceScope, vulkanMemoryModelAvailabilityVisibilityChains ); + return std::tie( sType, pNext, swapchainCount, pTimes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const & ) const = default; + auto operator<=>( PresentTimesInfoGOOGLE const & ) const = default; #else - bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) && - ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) && - ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pTimes == rhs.pTimes ); # endif } - bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; - VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceVulkanMemoryModelFeatures; + using Type = PresentTimesInfoGOOGLE; }; - using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures; - struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR + struct PrivateDataSlotCreateInfo { - using NativeType = VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + using NativeType = VkPrivateDataSlotCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , workgroupMemoryExplicitLayout( workgroupMemoryExplicitLayout_ ) - , workgroupMemoryExplicitLayoutScalarBlockLayout( workgroupMemoryExplicitLayoutScalarBlockLayout_ ) - , workgroupMemoryExplicitLayout8BitAccess( workgroupMemoryExplicitLayout8BitAccess_ ) - , workgroupMemoryExplicitLayout16BitAccess( workgroupMemoryExplicitLayout16BitAccess_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) - VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( *reinterpret_cast( &rhs ) ) + PrivateDataSlotCreateInfo( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PrivateDataSlotCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & - operator=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PrivateDataSlotCreateInfo & operator=( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & - operator=( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PrivateDataSlotCreateInfo & operator=( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & - setWorkgroupMemoryExplicitLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ ) VULKAN_HPP_NOEXCEPT - { - workgroupMemoryExplicitLayout = workgroupMemoryExplicitLayout_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & - setWorkgroupMemoryExplicitLayoutScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT - { - workgroupMemoryExplicitLayoutScalarBlockLayout = workgroupMemoryExplicitLayoutScalarBlockLayout_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & - setWorkgroupMemoryExplicitLayout8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ ) VULKAN_HPP_NOEXCEPT - { - workgroupMemoryExplicitLayout8BitAccess = workgroupMemoryExplicitLayout8BitAccess_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & - setWorkgroupMemoryExplicitLayout16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - workgroupMemoryExplicitLayout16BitAccess = workgroupMemoryExplicitLayout16BitAccess_; + flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPrivateDataSlotCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPrivateDataSlotCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - workgroupMemoryExplicitLayout, - workgroupMemoryExplicitLayoutScalarBlockLayout, - workgroupMemoryExplicitLayout8BitAccess, - workgroupMemoryExplicitLayout16BitAccess ); + return std::tie( sType, pNext, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & ) const = default; + auto operator<=>( PrivateDataSlotCreateInfo const & ) const = default; #else - bool operator==( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( workgroupMemoryExplicitLayout == rhs.workgroupMemoryExplicitLayout ) && - ( workgroupMemoryExplicitLayoutScalarBlockLayout == rhs.workgroupMemoryExplicitLayoutScalarBlockLayout ) && - ( workgroupMemoryExplicitLayout8BitAccess == rhs.workgroupMemoryExplicitLayout8BitAccess ) && - ( workgroupMemoryExplicitLayout16BitAccess == rhs.workgroupMemoryExplicitLayout16BitAccess ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); # endif } - bool operator!=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout = {}; - VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout = {}; - VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess = {}; - VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePrivateDataSlotCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + using Type = PrivateDataSlotCreateInfo; }; - struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT + using PrivateDataSlotCreateInfoEXT = PrivateDataSlotCreateInfo; + + struct ProtectedSubmitInfo { - using NativeType = VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + using NativeType = VkProtectedSubmitInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , ycbcr2plane444Formats( ycbcr2plane444Formats_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , protectedSubmit{ protectedSubmit_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( *reinterpret_cast( &rhs ) ) + ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ProtectedSubmitInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ProtectedSubmitInfo & operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & operator=( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT & - setYcbcr2plane444Formats( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT { - ycbcr2plane444Formats = ycbcr2plane444Formats_; + protectedSubmit = protectedSubmit_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkProtectedSubmitInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, ycbcr2plane444Formats ); + return std::tie( sType, pNext, protectedSubmit ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & ) const = default; + auto operator<=>( ProtectedSubmitInfo const & ) const = default; #else - bool operator==( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcr2plane444Formats == rhs.ycbcr2plane444Formats ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedSubmit == rhs.protectedSubmit ); # endif } - bool operator!=( PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + using Type = ProtectedSubmitInfo; }; - struct PhysicalDeviceYcbcrImageArraysFeaturesEXT + struct PushConstantsInfo { - using NativeType = VkPhysicalDeviceYcbcrImageArraysFeaturesEXT; + using NativeType = VkPushConstantsInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushConstantsInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , ycbcrImageArrays( ycbcrImageArrays_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PushConstantsInfo( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + uint32_t offset_ = {}, + uint32_t size_ = {}, + const void * pValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , layout{ layout_ } + , stageFlags{ stageFlags_ } + , offset{ offset_ } + , size{ size_ } + , pValues{ pValues_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PushConstantsInfo( PushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceYcbcrImageArraysFeaturesEXT( *reinterpret_cast( &rhs ) ) + PushConstantsInfo( VkPushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PushConstantsInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PushConstantsInfo( VULKAN_HPP_NAMESPACE::PipelineLayout layout_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + uint32_t offset_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , layout( layout_ ) + , stageFlags( stageFlags_ ) + , offset( offset_ ) + , size( static_cast( values_.size() * sizeof( T ) ) ) + , pValues( values_.data() ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PushConstantsInfo & operator=( PushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PushConstantsInfo & operator=( VkPushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & - setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT { - ycbcrImageArrays = ycbcrImageArrays_; + layout = layout_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + stageFlags = stageFlags_; + return *this; } - operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setPValues( const void * pValues_ ) VULKAN_HPP_NOEXCEPT + { + pValues = pValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PushConstantsInfo & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + size = static_cast( values_.size() * sizeof( T ) ); + pValues = values_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkPushConstantsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushConstantsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, ycbcrImageArrays ); + return std::tie( sType, pNext, layout, stageFlags, offset, size, pValues ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & ) const = default; + auto operator<=>( PushConstantsInfo const & ) const = default; #else - bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PushConstantsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ycbcrImageArrays == rhs.ycbcrImageArrays ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( layout == rhs.layout ) && ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && + ( size == rhs.size ) && ( pValues == rhs.pValues ); # endif } - bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PushConstantsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushConstantsInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + uint32_t offset = {}; + uint32_t size = {}; + const void * pValues = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT; + using Type = PushConstantsInfo; }; - struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures + using PushConstantsInfoKHR = PushConstantsInfo; + + struct WriteDescriptorSet { - using NativeType = VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + using NativeType = VkWriteDescriptorSet; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ = {}, + const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dstSet{ dstSet_ } + , dstBinding{ dstBinding_ } + , dstArrayElement{ dstArrayElement_ } + , descriptorCount{ descriptorCount_ } + , descriptorType{ descriptorType_ } + , pImageInfo{ pImageInfo_ } + , pBufferInfo{ pBufferInfo_ } + , pTexelBufferView{ pTexelBufferView_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( *reinterpret_cast( &rhs ) ) + WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT : WriteDescriptorSet( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, + uint32_t dstBinding_, + uint32_t dstArrayElement_, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferInfo_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & texelBufferView_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( static_cast( !imageInfo_.empty() ? imageInfo_.size() + : !bufferInfo_.empty() ? bufferInfo_.size() + : texelBufferView_.size() ) ) + , descriptorType( descriptorType_ ) + , pImageInfo( imageInfo_.data() ) + , pBufferInfo( bufferInfo_.data() ) + , pTexelBufferView( texelBufferView_.data() ) { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) <= 1 ); +# else + if ( 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::WriteDescriptorSet::WriteDescriptorSet: 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & - operator=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; + WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & operator=( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures & - setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT { - shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_; + dstSet = dstSet_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dstBinding = dstBinding_; + return *this; } - operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dstArrayElement = dstArrayElement_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ ) VULKAN_HPP_NOEXCEPT + { + pImageInfo = pImageInfo_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & + setImageInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageInfo_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( imageInfo_.size() ); + pImageInfo = imageInfo_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ ) VULKAN_HPP_NOEXCEPT + { + pBufferInfo = pBufferInfo_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & + setBufferInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( bufferInfo_.size() ); + pBufferInfo = bufferInfo_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT + { + pTexelBufferView = pTexelBufferView_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSet & + setTexelBufferView( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = static_cast( texelBufferView_.size() ); + pTexelBufferView = texelBufferView_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkWriteDescriptorSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderZeroInitializeWorkgroupMemory ); + return std::tie( sType, pNext, dstSet, dstBinding, dstArrayElement, descriptorCount, descriptorType, pImageInfo, pBufferInfo, pTexelBufferView ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & ) const = default; + auto operator<=>( WriteDescriptorSet const & ) const = default; #else - bool operator==( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) && + ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) && + ( pImageInfo == rhs.pImageInfo ) && ( pBufferInfo == rhs.pBufferInfo ) && ( pTexelBufferView == rhs.pTexelBufferView ); # endif } - bool operator!=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo = {}; + const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo = {}; + const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + using Type = WriteDescriptorSet; }; - using PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; - struct PipelineCacheCreateInfo + struct PushDescriptorSetInfo { - using NativeType = VkPipelineCacheCreateInfo; + using NativeType = VkPushDescriptorSetInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushDescriptorSetInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, - size_t initialDataSize_ = {}, - const void * pInitialData_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , initialDataSize( initialDataSize_ ) - , pInitialData( pInitialData_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PushDescriptorSetInfo( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t set_ = {}, + uint32_t descriptorWriteCount_ = {}, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stageFlags{ stageFlags_ } + , layout{ layout_ } + , set{ set_ } + , descriptorWriteCount{ descriptorWriteCount_ } + , pDescriptorWrites{ pDescriptorWrites_ } { } - VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PushDescriptorSetInfo( PushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineCacheCreateInfo( *reinterpret_cast( &rhs ) ) + PushDescriptorSetInfo( VkPushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PushDescriptorSetInfo( *reinterpret_cast( &rhs ) ) { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - template - PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PushDescriptorSetInfo( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_, + uint32_t set_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorWrites_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stageFlags( stageFlags_ ) + , layout( layout_ ) + , set( set_ ) + , descriptorWriteCount( static_cast( descriptorWrites_.size() ) ) + , pDescriptorWrites( descriptorWrites_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineCacheCreateInfo & operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PushDescriptorSetInfo & operator=( PushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + PushDescriptorSetInfo & operator=( VkPushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + stageFlags = stageFlags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT { - initialDataSize = initialDataSize_; + layout = layout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT { - pInitialData = pInitialData_; + set = set_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setDescriptorWriteCount( uint32_t descriptorWriteCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorWriteCount = descriptorWriteCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & + setPDescriptorWrites( const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorWrites = pDescriptorWrites_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - template - PipelineCacheCreateInfo & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) VULKAN_HPP_NOEXCEPT + PushDescriptorSetInfo & setDescriptorWrites( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorWrites_ ) VULKAN_HPP_NOEXCEPT { - initialDataSize = initialData_.size() * sizeof( T ); - pInitialData = initialData_.data(); + descriptorWriteCount = static_cast( descriptorWrites_.size() ); + pDescriptorWrites = descriptorWrites_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineCacheCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkPushDescriptorSetInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkPushDescriptorSetInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -70522,1094 +111460,974 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::ShaderStageFlags const &, + VULKAN_HPP_NAMESPACE::PipelineLayout const &, + uint32_t const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, initialDataSize, pInitialData ); + return std::tie( sType, pNext, stageFlags, layout, set, descriptorWriteCount, pDescriptorWrites ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineCacheCreateInfo const & ) const = default; + auto operator<=>( PushDescriptorSetInfo const & ) const = default; #else - bool operator==( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PushDescriptorSetInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( initialDataSize == rhs.initialDataSize ) && - ( pInitialData == rhs.pInitialData ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( set == rhs.set ) && + ( descriptorWriteCount == rhs.descriptorWriteCount ) && ( pDescriptorWrites == rhs.pDescriptorWrites ); # endif } - bool operator!=( PipelineCacheCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PushDescriptorSetInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {}; - size_t initialDataSize = {}; - const void * pInitialData = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushDescriptorSetInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + uint32_t set = {}; + uint32_t descriptorWriteCount = {}; + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineCacheCreateInfo; + using Type = PushDescriptorSetInfo; }; - struct PipelineCacheHeaderVersionOne + using PushDescriptorSetInfoKHR = PushDescriptorSetInfo; + + struct PushDescriptorSetWithTemplateInfo { - using NativeType = VkPipelineCacheHeaderVersionOne; + using NativeType = VkPushDescriptorSetWithTemplateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 - PipelineCacheHeaderVersionOne( uint32_t headerSize_ = {}, - VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne, - uint32_t vendorID_ = {}, - uint32_t deviceID_ = {}, - std::array const & pipelineCacheUUID_ = {} ) VULKAN_HPP_NOEXCEPT - : headerSize( headerSize_ ) - , headerVersion( headerVersion_ ) - , vendorID( vendorID_ ) - , deviceID( deviceID_ ) - , pipelineCacheUUID( pipelineCacheUUID_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushDescriptorSetWithTemplateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PushDescriptorSetWithTemplateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t set_ = {}, + const void * pData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , descriptorUpdateTemplate{ descriptorUpdateTemplate_ } + , layout{ layout_ } + , set{ set_ } + , pData{ pData_ } { } - VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PushDescriptorSetWithTemplateInfo( PushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineCacheHeaderVersionOne( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineCacheHeaderVersionOne( *reinterpret_cast( &rhs ) ) + PushDescriptorSetWithTemplateInfo( VkPushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PushDescriptorSetWithTemplateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineCacheHeaderVersionOne & operator=( PipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PushDescriptorSetWithTemplateInfo & operator=( PushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineCacheHeaderVersionOne & operator=( VkPipelineCacheHeaderVersionOne const & rhs ) VULKAN_HPP_NOEXCEPT + PushDescriptorSetWithTemplateInfo & operator=( VkPushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setHeaderSize( uint32_t headerSize_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - headerSize = headerSize_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & - setHeaderVersion( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & + setDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate_ ) VULKAN_HPP_NOEXCEPT { - headerVersion = headerVersion_; + descriptorUpdateTemplate = descriptorUpdateTemplate_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setVendorID( uint32_t vendorID_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT { - vendorID = vendorID_; + layout = layout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setDeviceID( uint32_t deviceID_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT { - deviceID = deviceID_; + set = set_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineCacheHeaderVersionOne & setPipelineCacheUUID( std::array pipelineCacheUUID_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT { - pipelineCacheUUID = pipelineCacheUUID_; + pData = pData_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineCacheHeaderVersionOne const &() const VULKAN_HPP_NOEXCEPT + operator VkPushDescriptorSetWithTemplateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineCacheHeaderVersionOne &() VULKAN_HPP_NOEXCEPT + operator VkPushDescriptorSetWithTemplateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &> + const void * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( headerSize, headerVersion, vendorID, deviceID, pipelineCacheUUID ); + return std::tie( sType, pNext, descriptorUpdateTemplate, layout, set, pData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineCacheHeaderVersionOne const & ) const = default; + auto operator<=>( PushDescriptorSetWithTemplateInfo const & ) const = default; #else - bool operator==( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PushDescriptorSetWithTemplateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( headerSize == rhs.headerSize ) && ( headerVersion == rhs.headerVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && - ( pipelineCacheUUID == rhs.pipelineCacheUUID ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorUpdateTemplate == rhs.descriptorUpdateTemplate ) && ( layout == rhs.layout ) && + ( set == rhs.set ) && ( pData == rhs.pData ); # endif } - bool operator!=( PipelineCacheHeaderVersionOne const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PushDescriptorSetWithTemplateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t headerSize = {}; - VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion headerVersion = VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion::eOne; - uint32_t vendorID = {}; - uint32_t deviceID = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushDescriptorSetWithTemplateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + uint32_t set = {}; + const void * pData = {}; }; - struct PipelineColorBlendAdvancedStateCreateInfoEXT + template <> + struct CppType { - using NativeType = VkPipelineColorBlendAdvancedStateCreateInfoEXT; + using Type = PushDescriptorSetWithTemplateInfo; + }; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; + using PushDescriptorSetWithTemplateInfoKHR = PushDescriptorSetWithTemplateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PipelineColorBlendAdvancedStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, - VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcPremultiplied( srcPremultiplied_ ) - , dstPremultiplied( dstPremultiplied_ ) - , blendOverlap( blendOverlap_ ) - { - } + struct QueryLowLatencySupportNV + { + using NativeType = VkQueryLowLatencySupportNV; - VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryLowLatencySupportNV; - PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineColorBlendAdvancedStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryLowLatencySupportNV( void * pQueriedLowLatencyData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pQueriedLowLatencyData{ pQueriedLowLatencyData_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR QueryLowLatencySupportNV( QueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + QueryLowLatencySupportNV( VkQueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryLowLatencySupportNV( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + QueryLowLatencySupportNV & operator=( QueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & - setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT + QueryLowLatencySupportNV & operator=( VkQueryLowLatencySupportNV const & rhs ) VULKAN_HPP_NOEXCEPT { - srcPremultiplied = srcPremultiplied_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & - setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryLowLatencySupportNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - dstPremultiplied = dstPremultiplied_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & - setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 QueryLowLatencySupportNV & setPQueriedLowLatencyData( void * pQueriedLowLatencyData_ ) VULKAN_HPP_NOEXCEPT { - blendOverlap = blendOverlap_; + pQueriedLowLatencyData = pQueriedLowLatencyData_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkQueryLowLatencySupportNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkQueryLowLatencySupportNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, srcPremultiplied, dstPremultiplied, blendOverlap ); + return std::tie( sType, pNext, pQueriedLowLatencyData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const & ) const = default; + auto operator<=>( QueryLowLatencySupportNV const & ) const = default; #else - bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueryLowLatencySupportNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcPremultiplied == rhs.srcPremultiplied ) && ( dstPremultiplied == rhs.dstPremultiplied ) && - ( blendOverlap == rhs.blendOverlap ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pQueriedLowLatencyData == rhs.pQueriedLowLatencyData ); # endif } - bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueryLowLatencySupportNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {}; - VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {}; - VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryLowLatencySupportNV; + const void * pNext = {}; + void * pQueriedLowLatencyData = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineColorBlendAdvancedStateCreateInfoEXT; + using Type = QueryLowLatencySupportNV; }; - struct PipelineColorWriteCreateInfoEXT + struct QueryPoolCreateInfo { - using NativeType = VkPipelineColorWriteCreateInfoEXT; + using NativeType = VkQueryPoolCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorWriteCreateInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , attachmentCount( attachmentCount_ ) - , pColorWriteEnables( pColorWriteEnables_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, + uint32_t queryCount_ = {}, + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , queryType{ queryType_ } + , queryCount{ queryCount_ } + , pipelineStatistics{ pipelineStatistics_ } { } - VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineColorWriteCreateInfoEXT( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineColorWriteCreateInfoEXT( *reinterpret_cast( &rhs ) ) + QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : QueryPoolCreateInfo( *reinterpret_cast( &rhs ) ) { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineColorWriteCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorWriteEnables_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), attachmentCount( static_cast( colorWriteEnables_.size() ) ), pColorWriteEnables( colorWriteEnables_.data() ) + QueryPoolCreateInfo & operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PipelineColorWriteCreateInfoEXT & operator=( PipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineColorWriteCreateInfoEXT & operator=( VkPipelineColorWriteCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + pNext = pNext_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT { - attachmentCount = attachmentCount_; + queryType = queryType_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineColorWriteCreateInfoEXT & - setPColorWriteEnables( const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT { - pColorWriteEnables = pColorWriteEnables_; + queryCount = queryCount_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineColorWriteCreateInfoEXT & - setColorWriteEnables( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorWriteEnables_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & + setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT { - attachmentCount = static_cast( colorWriteEnables_.size() ); - pColorWriteEnables = colorWriteEnables_.data(); + pipelineStatistics = pipelineStatistics_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineColorWriteCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkQueryPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineColorWriteCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, attachmentCount, pColorWriteEnables ); + return std::tie( sType, pNext, flags, queryType, queryCount, pipelineStatistics ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineColorWriteCreateInfoEXT const & ) const = default; + auto operator<=>( QueryPoolCreateInfo const & ) const = default; #else - bool operator==( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && ( pColorWriteEnables == rhs.pColorWriteEnables ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queryType == rhs.queryType ) && ( queryCount == rhs.queryCount ) && + ( pipelineStatistics == rhs.pipelineStatistics ); # endif } - bool operator!=( PipelineColorWriteCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorWriteCreateInfoEXT; - const void * pNext = {}; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion; + uint32_t queryCount = {}; + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineColorWriteCreateInfoEXT; + using Type = QueryPoolCreateInfo; }; - struct PipelineCompilerControlCreateInfoAMD + struct QueryPoolPerformanceCreateInfoKHR { - using NativeType = VkPipelineCompilerControlCreateInfoAMD; + using NativeType = VkQueryPoolPerformanceCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCompilerControlCreateInfoAMD; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceCreateInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , compilerControlFlags( compilerControlFlags_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = {}, + uint32_t counterIndexCount_ = {}, + const uint32_t * pCounterIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , queueFamilyIndex{ queueFamilyIndex_ } + , counterIndexCount{ counterIndexCount_ } + , pCounterIndices{ pCounterIndices_ } { } - VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineCompilerControlCreateInfoAMD( *reinterpret_cast( &rhs ) ) + QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryPoolPerformanceCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineCompilerControlCreateInfoAMD & operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & counterIndices_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , counterIndexCount( static_cast( counterIndices_.size() ) ) + , pCounterIndices( counterIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PipelineCompilerControlCreateInfoAMD & operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & - setCompilerControlFlags( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT { - compilerControlFlags = compilerControlFlags_; + queueFamilyIndex = queueFamilyIndex_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineCompilerControlCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + counterIndexCount = counterIndexCount_; + return *this; } - operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t * pCounterIndices_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pCounterIndices = pCounterIndices_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + QueryPoolPerformanceCreateInfoKHR & + setCounterIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & counterIndices_ ) VULKAN_HPP_NOEXCEPT + { + counterIndexCount = static_cast( counterIndices_.size() ); + pCounterIndices = counterIndices_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkQueryPoolPerformanceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, compilerControlFlags ); + return std::tie( sType, pNext, queueFamilyIndex, counterIndexCount, pCounterIndices ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineCompilerControlCreateInfoAMD const & ) const = default; + auto operator<=>( QueryPoolPerformanceCreateInfoKHR const & ) const = default; #else - bool operator==( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compilerControlFlags == rhs.compilerControlFlags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && + ( counterIndexCount == rhs.counterIndexCount ) && ( pCounterIndices == rhs.pCounterIndices ); # endif } - bool operator!=( PipelineCompilerControlCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR; + const void * pNext = {}; + uint32_t queueFamilyIndex = {}; + uint32_t counterIndexCount = {}; + const uint32_t * pCounterIndices = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineCompilerControlCreateInfoAMD; + using Type = QueryPoolPerformanceCreateInfoKHR; }; - struct PipelineCoverageModulationStateCreateInfoNV + struct QueryPoolPerformanceQueryCreateInfoINTEL { - using NativeType = VkPipelineCoverageModulationStateCreateInfoNV; + using NativeType = VkQueryPoolPerformanceQueryCreateInfoINTEL; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( - VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, - VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, - VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, - uint32_t coverageModulationTableCount_ = {}, - const float * pCoverageModulationTable_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , coverageModulationMode( coverageModulationMode_ ) - , coverageModulationTableEnable( coverageModulationTableEnable_ ) - , coverageModulationTableCount( coverageModulationTableCount_ ) - , pCoverageModulationTable( pCoverageModulationTable_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , performanceCountersSampling{ performanceCountersSampling_ } { } - VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineCoverageModulationStateCreateInfoNV( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_, - VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_, - VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & coverageModulationTable_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , coverageModulationMode( coverageModulationMode_ ) - , coverageModulationTableEnable( coverageModulationTableEnable_ ) - , coverageModulationTableCount( static_cast( coverageModulationTable_.size() ) ) - , pCoverageModulationTable( coverageModulationTable_.data() ) + QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryPoolPerformanceQueryCreateInfoINTEL( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineCoverageModulationStateCreateInfoNV & operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + QueryPoolPerformanceQueryCreateInfoINTEL & operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineCoverageModulationStateCreateInfoNV & operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + QueryPoolPerformanceQueryCreateInfoINTEL & operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & - setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & - setCoverageModulationMode( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT - { - coverageModulationMode = coverageModulationMode_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & - setCoverageModulationTableEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT - { - coverageModulationTableEnable = coverageModulationTableEnable_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & - setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT - { - coverageModulationTableCount = coverageModulationTableCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & - setPCoverageModulationTable( const float * pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT - { - pCoverageModulationTable = pCoverageModulationTable_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineCoverageModulationStateCreateInfoNV & - setCoverageModulationTable( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & + setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT { - coverageModulationTableCount = static_cast( coverageModulationTable_.size() ); - pCoverageModulationTable = coverageModulationTable_.data(); + performanceCountersSampling = performanceCountersSampling_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineCoverageModulationStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkQueryPoolPerformanceQueryCreateInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, coverageModulationMode, coverageModulationTableEnable, coverageModulationTableCount, pCoverageModulationTable ); + return std::tie( sType, pNext, performanceCountersSampling ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const & ) const = default; + auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const & ) const = default; #else - bool operator==( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageModulationMode == rhs.coverageModulationMode ) && - ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) && ( coverageModulationTableCount == rhs.coverageModulationTableCount ) && - ( pCoverageModulationTable == rhs.pCoverageModulationTable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performanceCountersSampling == rhs.performanceCountersSampling ); # endif } - bool operator!=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {}; - VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone; - VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {}; - uint32_t coverageModulationTableCount = {}; - const float * pCoverageModulationTable = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual; }; template <> - struct CppType + struct CppType { - using Type = PipelineCoverageModulationStateCreateInfoNV; + using Type = QueryPoolPerformanceQueryCreateInfoINTEL; }; - struct PipelineCoverageReductionStateCreateInfoNV + using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL; + + struct QueryPoolVideoEncodeFeedbackCreateInfoKHR { - using NativeType = VkPipelineCoverageReductionStateCreateInfoNV; + using NativeType = VkQueryPoolVideoEncodeFeedbackCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( - VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, - VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , coverageReductionMode( coverageReductionMode_ ) - { - } - - VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolVideoEncodeFeedbackCreateInfoKHR; - PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineCoverageReductionStateCreateInfoNV( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueryPoolVideoEncodeFeedbackCreateInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR encodeFeedbackFlags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , encodeFeedbackFlags{ encodeFeedbackFlags_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineCoverageReductionStateCreateInfoNV & operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR QueryPoolVideoEncodeFeedbackCreateInfoKHR( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineCoverageReductionStateCreateInfoNV & operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + QueryPoolVideoEncodeFeedbackCreateInfoKHR( VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : QueryPoolVideoEncodeFeedbackCreateInfoKHR( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + QueryPoolVideoEncodeFeedbackCreateInfoKHR & operator=( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + QueryPoolVideoEncodeFeedbackCreateInfoKHR & operator=( VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & - setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 QueryPoolVideoEncodeFeedbackCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & - setCoverageReductionMode( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 QueryPoolVideoEncodeFeedbackCreateInfoKHR & + setEncodeFeedbackFlags( VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR encodeFeedbackFlags_ ) VULKAN_HPP_NOEXCEPT { - coverageReductionMode = coverageReductionMode_; + encodeFeedbackFlags = encodeFeedbackFlags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineCoverageReductionStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkQueryPoolVideoEncodeFeedbackCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkQueryPoolVideoEncodeFeedbackCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, coverageReductionMode ); + return std::tie( sType, pNext, encodeFeedbackFlags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const & ) const = default; + auto operator<=>( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & ) const = default; #else - bool operator==( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageReductionMode == rhs.coverageReductionMode ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( encodeFeedbackFlags == rhs.encodeFeedbackFlags ); # endif } - bool operator!=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueryPoolVideoEncodeFeedbackCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {}; - VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolVideoEncodeFeedbackCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR encodeFeedbackFlags = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineCoverageReductionStateCreateInfoNV; + using Type = QueryPoolVideoEncodeFeedbackCreateInfoKHR; }; - struct PipelineCoverageToColorStateCreateInfoNV + struct QueueFamilyCheckpointProperties2NV { - using NativeType = VkPipelineCoverageToColorStateCreateInfoNV; + using NativeType = VkQueueFamilyCheckpointProperties2NV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, - uint32_t coverageToColorLocation_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , coverageToColorEnable( coverageToColorEnable_ ) - , coverageToColorLocation( coverageToColorLocation_ ) - { - } - - VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineCoverageToColorStateCreateInfoNV( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PipelineCoverageToColorStateCreateInfoNV & operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointProperties2NV; - PipelineCoverageToColorStateCreateInfoNV & operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , checkpointExecutionStageMask{ checkpointExecutionStageMask_ } { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & - setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + QueueFamilyCheckpointProperties2NV( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyCheckpointProperties2NV( *reinterpret_cast( &rhs ) ) { - flags = flags_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & - setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT - { - coverageToColorEnable = coverageToColorEnable_; - return *this; - } + QueueFamilyCheckpointProperties2NV & operator=( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT + QueueFamilyCheckpointProperties2NV & operator=( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT { - coverageToColorLocation = coverageToColorLocation_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineCoverageToColorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyCheckpointProperties2NV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, coverageToColorEnable, coverageToColorLocation ); + return std::tie( sType, pNext, checkpointExecutionStageMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const & ) const = default; + auto operator<=>( QueueFamilyCheckpointProperties2NV const & ) const = default; #else - bool operator==( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( coverageToColorEnable == rhs.coverageToColorEnable ) && - ( coverageToColorLocation == rhs.coverageToColorLocation ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); # endif } - bool operator!=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {}; - VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {}; - uint32_t coverageToColorLocation = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2NV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineCoverageToColorStateCreateInfoNV; + using Type = QueueFamilyCheckpointProperties2NV; }; - struct PipelineCreationFeedback + struct QueueFamilyCheckpointPropertiesNV { - using NativeType = VkPipelineCreationFeedback; + using NativeType = VkQueueFamilyCheckpointPropertiesNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags_ = {}, - uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , duration( duration_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointPropertiesNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , checkpointExecutionStageMask{ checkpointExecutionStageMask_ } { } - VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineCreationFeedback( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineCreationFeedback( *reinterpret_cast( &rhs ) ) + QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyCheckpointPropertiesNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineCreationFeedback & operator=( PipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT = default; + QueueFamilyCheckpointPropertiesNV & operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineCreationFeedback & operator=( VkPipelineCreationFeedback const & rhs ) VULKAN_HPP_NOEXCEPT + QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPipelineCreationFeedback const &() const VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyCheckpointPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineCreationFeedback &() VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( flags, duration ); + return std::tie( sType, pNext, checkpointExecutionStageMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineCreationFeedback const & ) const = default; + auto operator<=>( QueueFamilyCheckpointPropertiesNV const & ) const = default; #else - bool operator==( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( flags == rhs.flags ) && ( duration == rhs.duration ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); # endif } - bool operator!=( PipelineCreationFeedback const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags = {}; - uint64_t duration = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {}; }; - using PipelineCreationFeedbackEXT = PipelineCreationFeedback; - struct PipelineCreationFeedbackCreateInfo + template <> + struct CppType { - using NativeType = VkPipelineCreationFeedbackCreateInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreationFeedbackCreateInfo; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ = {}, - uint32_t pipelineStageCreationFeedbackCount_ = {}, - VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pPipelineCreationFeedback( pPipelineCreationFeedback_ ) - , pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ ) - , pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ ) - { - } - - VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfo( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PipelineCreationFeedbackCreateInfo( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineCreationFeedbackCreateInfo( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineCreationFeedbackCreateInfo( - VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineStageCreationFeedbacks_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , pPipelineCreationFeedback( pPipelineCreationFeedback_ ) - , pipelineStageCreationFeedbackCount( static_cast( pipelineStageCreationFeedbacks_.size() ) ) - , pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + using Type = QueueFamilyCheckpointPropertiesNV; + }; - PipelineCreationFeedbackCreateInfo & operator=( PipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + struct QueueFamilyGlobalPriorityProperties + { + using NativeType = VkQueueFamilyGlobalPriorityProperties; - PipelineCreationFeedbackCreateInfo & operator=( VkPipelineCreationFeedbackCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyGlobalPriorityProperties; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityProperties( uint32_t priorityCount_ = {}, + std::array const & + priorities_ = { { VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow } }, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , priorityCount{ priorityCount_ } + , priorities{ priorities_ } { - pNext = pNext_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & - setPPipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT - { - pPipelineCreationFeedback = pPipelineCreationFeedback_; - return *this; - } + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityProperties( QueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & - setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT + QueueFamilyGlobalPriorityProperties( VkQueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyGlobalPriorityProperties( *reinterpret_cast( &rhs ) ) { - pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfo & - setPPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT - { - pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_; - return *this; - } + QueueFamilyGlobalPriorityProperties & operator=( QueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineCreationFeedbackCreateInfo & setPipelineStageCreationFeedbacks( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineStageCreationFeedbacks_ ) - VULKAN_HPP_NOEXCEPT + QueueFamilyGlobalPriorityProperties & operator=( VkQueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - pipelineStageCreationFeedbackCount = static_cast( pipelineStageCreationFeedbacks_.size() ); - pPipelineStageCreationFeedbacks = pipelineStageCreationFeedbacks_.data(); + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineCreationFeedbackCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyGlobalPriorityProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineCreationFeedbackCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyGlobalPriorityProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -71617,638 +112435,590 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pPipelineCreationFeedback, pipelineStageCreationFeedbackCount, pPipelineStageCreationFeedbacks ); + return std::tie( sType, pNext, priorityCount, priorities ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineCreationFeedbackCreateInfo const & ) const = default; -#else - bool operator==( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( QueueFamilyGlobalPriorityProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback ) && - ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount ) && - ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = priorityCount <=> rhs.priorityCount; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < priorityCount; ++i ) + { + if ( auto cmp = priorities[i] <=> rhs.priorities[i]; cmp != 0 ) + return cmp; + } + + return std::strong_ordering::equivalent; } +#endif - bool operator!=( PipelineCreationFeedbackCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueueFamilyGlobalPriorityProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priorityCount == rhs.priorityCount ) && + ( memcmp( priorities, rhs.priorities, priorityCount * sizeof( VULKAN_HPP_NAMESPACE::QueueGlobalPriority ) ) == 0 ); + } + + bool operator!=( QueueFamilyGlobalPriorityProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineCreationFeedback = {}; - uint32_t pipelineStageCreationFeedbackCount = {}; - VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyGlobalPriorityProperties; + void * pNext = {}; + uint32_t priorityCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D priorities = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineCreationFeedbackCreateInfo; + using Type = QueueFamilyGlobalPriorityProperties; }; - using PipelineCreationFeedbackCreateInfoEXT = PipelineCreationFeedbackCreateInfo; - struct PipelineDiscardRectangleStateCreateInfoEXT - { - using NativeType = VkPipelineDiscardRectangleStateCreateInfoEXT; + using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityProperties; + using QueueFamilyGlobalPriorityPropertiesKHR = QueueFamilyGlobalPriorityProperties; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; + struct QueueFamilyProperties + { + using NativeType = VkQueueFamilyProperties; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( - VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, - uint32_t discardRectangleCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , discardRectangleMode( discardRectangleMode_ ) - , discardRectangleCount( discardRectangleCount_ ) - , pDiscardRectangles( pDiscardRectangles_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, + uint32_t queueCount_ = {}, + uint32_t timestampValidBits_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFlags{ queueFlags_ } + , queueCount{ queueCount_ } + , timestampValidBits{ timestampValidBits_ } + , minImageTransferGranularity{ minImageTransferGranularity_ } { } - VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineDiscardRectangleStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyProperties( *reinterpret_cast( &rhs ) ) { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_, - VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & discardRectangles_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , discardRectangleMode( discardRectangleMode_ ) - , discardRectangleCount( static_cast( discardRectangles_.size() ) ) - , pDiscardRectangles( discardRectangles_.data() ) + QueueFamilyProperties & operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineDiscardRectangleStateCreateInfoEXT & operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + operator VkQueueFamilyProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } - PipelineDiscardRectangleStateCreateInfoEXT & operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); - return *this; + return *reinterpret_cast( this ); } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return std::tie( queueFlags, queueCount, timestampValidBits, minImageTransferGranularity ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & - setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( QueueFamilyProperties const & ) const = default; +#else + bool operator==( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - flags = flags_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( queueFlags == rhs.queueFlags ) && ( queueCount == rhs.queueCount ) && ( timestampValidBits == rhs.timestampValidBits ) && + ( minImageTransferGranularity == rhs.minImageTransferGranularity ); +# endif } - VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & - setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - discardRectangleMode = discardRectangleMode_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {}; + uint32_t queueCount = {}; + uint32_t timestampValidBits = {}; + VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {}; + }; + + struct QueueFamilyProperties2 + { + using NativeType = VkQueueFamilyProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , queueFamilyProperties{ queueFamilyProperties_ } { - discardRectangleCount = discardRectangleCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & - setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyProperties2( *reinterpret_cast( &rhs ) ) { - pDiscardRectangles = pDiscardRectangles_; - return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineDiscardRectangleStateCreateInfoEXT & - setDiscardRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & discardRectangles_ ) VULKAN_HPP_NOEXCEPT + QueueFamilyProperties2 & operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { - discardRectangleCount = static_cast( discardRectangles_.size() ); - pDiscardRectangles = discardRectangles_.data(); + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineDiscardRectangleStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyProperties2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, discardRectangleMode, discardRectangleCount, pDiscardRectangles ); + return std::tie( sType, pNext, queueFamilyProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const & ) const = default; + auto operator<=>( QueueFamilyProperties2 const & ) const = default; #else - bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( discardRectangleMode == rhs.discardRectangleMode ) && - ( discardRectangleCount == rhs.discardRectangleCount ) && ( pDiscardRectangles == rhs.pDiscardRectangles ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyProperties == rhs.queueFamilyProperties ); # endif } - bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive; - uint32_t discardRectangleCount = {}; - const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineDiscardRectangleStateCreateInfoEXT; + using Type = QueueFamilyProperties2; }; - struct PipelineExecutableInfoKHR + using QueueFamilyProperties2KHR = QueueFamilyProperties2; + + struct QueueFamilyQueryResultStatusPropertiesKHR { - using NativeType = VkPipelineExecutableInfoKHR; + using NativeType = VkQueueFamilyQueryResultStatusPropertiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, - uint32_t executableIndex_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipeline( pipeline_ ) - , executableIndex( executableIndex_ ) - { - } - - VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyQueryResultStatusPropertiesKHR; - PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineExecutableInfoKHR( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 queryResultStatusSupport_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , queryResultStatusSupport{ queryResultStatusSupport_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PipelineExecutableInfoKHR & operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineExecutableInfoKHR & operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusPropertiesKHR( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + QueueFamilyQueryResultStatusPropertiesKHR( VkQueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyQueryResultStatusPropertiesKHR( *reinterpret_cast( &rhs ) ) { - pNext = pNext_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT - { - pipeline = pipeline_; - return *this; - } + QueueFamilyQueryResultStatusPropertiesKHR & operator=( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT + QueueFamilyQueryResultStatusPropertiesKHR & operator=( VkQueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - executableIndex = executableIndex_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineExecutableInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyQueryResultStatusPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyQueryResultStatusPropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pipeline, executableIndex ); + return std::tie( sType, pNext, queryResultStatusSupport ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineExecutableInfoKHR const & ) const = default; + auto operator<=>( QueueFamilyQueryResultStatusPropertiesKHR const & ) const = default; #else - bool operator==( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ) && ( executableIndex == rhs.executableIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queryResultStatusSupport == rhs.queryResultStatusSupport ); # endif } - bool operator!=( PipelineExecutableInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; - uint32_t executableIndex = {}; +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyQueryResultStatusPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 queryResultStatusSupport = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineExecutableInfoKHR; + using Type = QueueFamilyQueryResultStatusPropertiesKHR; }; - struct PipelineExecutableInternalRepresentationKHR + struct QueueFamilyVideoPropertiesKHR { - using NativeType = VkPipelineExecutableInternalRepresentationKHR; + using NativeType = VkQueueFamilyVideoPropertiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInternalRepresentationKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyVideoPropertiesKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( std::array const & name_ = {}, - std::array const & description_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, - size_t dataSize_ = {}, - void * pData_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , name( name_ ) - , description( description_ ) - , isText( isText_ ) - , dataSize( dataSize_ ) - , pData( pData_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR QueueFamilyVideoPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoCodecOperations{ videoCodecOperations_ } { } - VULKAN_HPP_CONSTEXPR_14 - PipelineExecutableInternalRepresentationKHR( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineExecutableInternalRepresentationKHR( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR QueueFamilyVideoPropertiesKHR( QueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - template - PipelineExecutableInternalRepresentationKHR( std::array const & name_, - std::array const & description_, - VULKAN_HPP_NAMESPACE::Bool32 isText_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_, - void * pNext_ = nullptr ) - : pNext( pNext_ ), name( name_ ), description( description_ ), isText( isText_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) + QueueFamilyVideoPropertiesKHR( VkQueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyVideoPropertiesKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineExecutableInternalRepresentationKHR & operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + QueueFamilyVideoPropertiesKHR & operator=( QueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineExecutableInternalRepresentationKHR & operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT + QueueFamilyVideoPropertiesKHR & operator=( VkQueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPipelineExecutableInternalRepresentationKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyVideoPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyVideoPropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - size_t const &, - void * const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, name, description, isText, dataSize, pData ); + return std::tie( sType, pNext, videoCodecOperations ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineExecutableInternalRepresentationKHR const & ) const = default; + auto operator<=>( QueueFamilyVideoPropertiesKHR const & ) const = default; #else - bool operator==( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueueFamilyVideoPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( name == rhs.name ) && ( description == rhs.description ) && ( isText == rhs.isText ) && - ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperations == rhs.videoCodecOperations ); # endif } - bool operator!=( PipelineExecutableInternalRepresentationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueueFamilyVideoPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; - VULKAN_HPP_NAMESPACE::Bool32 isText = {}; - size_t dataSize = {}; - void * pData = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyVideoPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineExecutableInternalRepresentationKHR; + using Type = QueueFamilyVideoPropertiesKHR; }; - struct PipelineExecutablePropertiesKHR + struct RayTracingPipelineClusterAccelerationStructureCreateInfoNV { - using NativeType = VkPipelineExecutablePropertiesKHR; + using NativeType = VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineClusterAccelerationStructureCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, - std::array const & name_ = {}, - std::array const & description_ = {}, - uint32_t subgroupSize_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stages( stages_ ) - , name( name_ ) - , description( description_ ) - , subgroupSize( subgroupSize_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingPipelineClusterAccelerationStructureCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 allowClusterAccelerationStructure_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , allowClusterAccelerationStructure{ allowClusterAccelerationStructure_ } { } - VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RayTracingPipelineClusterAccelerationStructureCreateInfoNV( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; - PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineExecutablePropertiesKHR( *reinterpret_cast( &rhs ) ) + RayTracingPipelineClusterAccelerationStructureCreateInfoNV( VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingPipelineClusterAccelerationStructureCreateInfoNV( + *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RayTracingPipelineClusterAccelerationStructureCreateInfoNV & + operator=( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineClusterAccelerationStructureCreateInfoNV & + operator=( VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPipelineExecutablePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineClusterAccelerationStructureCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineClusterAccelerationStructureCreateInfoNV & + setAllowClusterAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 allowClusterAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + allowClusterAccelerationStructure = allowClusterAccelerationStructure_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineClusterAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - uint32_t const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, stages, name, description, subgroupSize ); + return std::tie( sType, pNext, allowClusterAccelerationStructure ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineExecutablePropertiesKHR const & ) const = default; + auto operator<=>( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & ) const = default; #else - bool operator==( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stages == rhs.stages ) && ( name == rhs.name ) && ( description == rhs.description ) && - ( subgroupSize == rhs.subgroupSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allowClusterAccelerationStructure == rhs.allowClusterAccelerationStructure ); # endif } - bool operator!=( PipelineExecutablePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RayTracingPipelineClusterAccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; - uint32_t subgroupSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineClusterAccelerationStructureCreateInfoNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 allowClusterAccelerationStructure = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineExecutablePropertiesKHR; + using Type = RayTracingPipelineClusterAccelerationStructureCreateInfoNV; }; - union PipelineExecutableStatisticValueKHR + struct RayTracingShaderGroupCreateInfoKHR { - using NativeType = VkPipelineExecutableStatisticValueKHR; -#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) - - VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} ) : b32( b32_ ) {} + using NativeType = VkRayTracingShaderGroupCreateInfoKHR; - VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( int64_t i64_ ) : i64( i64_ ) {} + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoKHR; - VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( uint64_t u64_ ) : u64( u64_ ) {} +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, + uint32_t generalShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR, + uint32_t closestHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR, + uint32_t anyHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR, + uint32_t intersectionShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR, + const void * pShaderGroupCaptureReplayHandle_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , generalShader{ generalShader_ } + , closestHitShader{ closestHitShader_ } + , anyHitShader{ anyHitShader_ } + , intersectionShader{ intersectionShader_ } + , pShaderGroupCaptureReplayHandle{ pShaderGroupCaptureReplayHandle_ } + { + } - VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR( double f64_ ) : f64( f64_ ) {} -#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT + RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingShaderGroupCreateInfoKHR( *reinterpret_cast( &rhs ) ) { - b32 = b32_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT + RayTracingShaderGroupCreateInfoKHR & operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RayTracingShaderGroupCreateInfoKHR & operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - i64 = i64_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - u64 = u64_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT { - f64 = f64_; + type = type_; return *this; } -#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ - operator VkPipelineExecutableStatisticValueKHR const &() const + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + generalShader = generalShader_; + return *this; } - operator VkPipelineExecutableStatisticValueKHR &() + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + closestHitShader = closestHitShader_; + return *this; } -#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - VULKAN_HPP_NAMESPACE::Bool32 b32; - int64_t i64; - uint64_t u64; - double f64; -#else - VkBool32 b32; - int64_t i64; - uint64_t u64; - double f64; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ - }; - - struct PipelineExecutableStatisticKHR - { - using NativeType = VkPipelineExecutableStatisticKHR; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableStatisticKHR; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR( - std::array const & name_ = {}, - std::array const & description_ = {}, - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , name( name_ ) - , description( description_ ) - , format( format_ ) - , value( value_ ) + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT { + anyHitShader = anyHitShader_; + return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineExecutableStatisticKHR( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT { + intersectionShader = intersectionShader_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & + setPShaderGroupCaptureReplayHandle( const void * pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineExecutableStatisticKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkRayTracingShaderGroupCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT + operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -72256,225 +113026,361 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR const &, - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const &> + const void * const &, + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + const void * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, name, description, format, value ); + return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader, pShaderGroupCaptureReplayHandle ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RayTracingShaderGroupCreateInfoKHR const & ) const = default; +#else + bool operator==( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( generalShader == rhs.generalShader ) && + ( closestHitShader == rhs.closestHitShader ) && ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ) && + ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle ); +# endif + } + + bool operator!=( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D name = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32; - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; + uint32_t generalShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR; + uint32_t closestHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR; + uint32_t anyHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR; + uint32_t intersectionShader = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR; + const void * pShaderGroupCaptureReplayHandle = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineExecutableStatisticKHR; + using Type = RayTracingShaderGroupCreateInfoKHR; }; - struct PipelineFragmentShadingRateEnumStateCreateInfoNV + struct RayTracingPipelineInterfaceCreateInfoKHR { - using NativeType = VkPipelineFragmentShadingRateEnumStateCreateInfoNV; + using NativeType = VkRayTracingPipelineInterfaceCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV( - VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize, - VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel, - std::array const & - combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shadingRateType( shadingRateType_ ) - , shadingRate( shadingRate_ ) - , combinerOps( combinerOps_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( uint32_t maxPipelineRayPayloadSize_ = {}, + uint32_t maxPipelineRayHitAttributeSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxPipelineRayPayloadSize{ maxPipelineRayPayloadSize_ } + , maxPipelineRayHitAttributeSize{ maxPipelineRayHitAttributeSize_ } { } - VULKAN_HPP_CONSTEXPR_14 - PipelineFragmentShadingRateEnumStateCreateInfoNV( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineFragmentShadingRateEnumStateCreateInfoNV( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineFragmentShadingRateEnumStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingPipelineInterfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RayTracingPipelineInterfaceCreateInfoKHR & operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineInterfaceCreateInfoKHR & operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & - setShadingRateType( VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ ) VULKAN_HPP_NOEXCEPT - { - shadingRateType = shadingRateType_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & - setShadingRate( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setMaxPipelineRayPayloadSize( uint32_t maxPipelineRayPayloadSize_ ) VULKAN_HPP_NOEXCEPT { - shadingRate = shadingRate_; + maxPipelineRayPayloadSize = maxPipelineRayPayloadSize_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & - setCombinerOps( std::array combinerOps_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & + setMaxPipelineRayHitAttributeSize( uint32_t maxPipelineRayHitAttributeSize_ ) VULKAN_HPP_NOEXCEPT { - combinerOps = combinerOps_; + maxPipelineRayHitAttributeSize = maxPipelineRayHitAttributeSize_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkRayTracingPipelineInterfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shadingRateType, shadingRate, combinerOps ); + return std::tie( sType, pNext, maxPipelineRayPayloadSize, maxPipelineRayHitAttributeSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineFragmentShadingRateEnumStateCreateInfoNV const & ) const = default; + auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const & ) const = default; #else - bool operator==( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateType == rhs.shadingRateType ) && ( shadingRate == rhs.shadingRate ) && - ( combinerOps == rhs.combinerOps ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPipelineRayPayloadSize == rhs.maxPipelineRayPayloadSize ) && + ( maxPipelineRayHitAttributeSize == rhs.maxPipelineRayHitAttributeSize ); # endif } - bool operator!=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize; - VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D combinerOps = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; + const void * pNext = {}; + uint32_t maxPipelineRayPayloadSize = {}; + uint32_t maxPipelineRayHitAttributeSize = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineFragmentShadingRateEnumStateCreateInfoNV; + using Type = RayTracingPipelineInterfaceCreateInfoKHR; }; - struct PipelineFragmentShadingRateStateCreateInfoKHR + struct RayTracingPipelineCreateInfoKHR { - using NativeType = VkPipelineFragmentShadingRateStateCreateInfoKHR; + using NativeType = VkRayTracingPipelineCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR( - VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, - std::array const & - combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentSize( fragmentSize_ ) - , combinerOps( combinerOps_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ = {}, + uint32_t maxPipelineRayRecursionDepth_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , stageCount{ stageCount_ } + , pStages{ pStages_ } + , groupCount{ groupCount_ } + , pGroups{ pGroups_ } + , maxPipelineRayRecursionDepth{ maxPipelineRayRecursionDepth_ } + , pLibraryInfo{ pLibraryInfo_ } + , pLibraryInterface{ pLibraryInterface_ } + , pDynamicState{ pDynamicState_ } + , layout{ layout_ } + , basePipelineHandle{ basePipelineHandle_ } + , basePipelineIndex{ basePipelineIndex_ } { } - VULKAN_HPP_CONSTEXPR_14 - PipelineFragmentShadingRateStateCreateInfoKHR( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineFragmentShadingRateStateCreateInfoKHR( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineFragmentShadingRateStateCreateInfoKHR( *reinterpret_cast( &rhs ) ) + RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingPipelineCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineFragmentShadingRateStateCreateInfoKHR & operator=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ = {}, + uint32_t maxPipelineRayRecursionDepth_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ) + , pLibraryInfo( pLibraryInfo_ ) + , pLibraryInterface( pLibraryInterface_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PipelineFragmentShadingRateStateCreateInfoKHR & operator=( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineCreateInfoKHR & operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & - setFragmentSize( VULKAN_HPP_NAMESPACE::Extent2D const & fragmentSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR & + setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RayTracingPipelineCreateInfoKHR & setGroups( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setMaxPipelineRayRecursionDepth( uint32_t maxPipelineRayRecursionDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineRayRecursionDepth = maxPipelineRayRecursionDepth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT + { + pLibraryInfo = pLibraryInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPLibraryInterface( const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT + { + pLibraryInterface = pLibraryInterface_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & + setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicState = pDynamicState_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT { - fragmentSize = fragmentSize_; + basePipelineHandle = basePipelineHandle_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & - setCombinerOps( std::array combinerOps_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT { - combinerOps = combinerOps_; + basePipelineIndex = basePipelineIndex_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineFragmentShadingRateStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkRayTracingPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -72483,353 +113389,386 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple const &> + VULKAN_HPP_NAMESPACE::PipelineCreateFlags const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * const &, + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * const &, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * const &, + VULKAN_HPP_NAMESPACE::PipelineLayout const &, + VULKAN_HPP_NAMESPACE::Pipeline const &, + int32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, fragmentSize, combinerOps ); + return std::tie( sType, + pNext, + flags, + stageCount, + pStages, + groupCount, + pGroups, + maxPipelineRayRecursionDepth, + pLibraryInfo, + pLibraryInterface, + pDynamicState, + layout, + basePipelineHandle, + basePipelineIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineFragmentShadingRateStateCreateInfoKHR const & ) const = default; + auto operator<=>( RayTracingPipelineCreateInfoKHR const & ) const = default; #else - bool operator==( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentSize == rhs.fragmentSize ) && ( combinerOps == rhs.combinerOps ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) && ( maxPipelineRayRecursionDepth == rhs.maxPipelineRayRecursionDepth ) && + ( pLibraryInfo == rhs.pLibraryInfo ) && ( pLibraryInterface == rhs.pLibraryInterface ) && ( pDynamicState == rhs.pDynamicState ) && + ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); # endif } - bool operator!=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D combinerOps = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups = {}; + uint32_t maxPipelineRayRecursionDepth = {}; + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo = {}; + const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface = {}; + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineFragmentShadingRateStateCreateInfoKHR; + using Type = RayTracingPipelineCreateInfoKHR; }; - struct PipelineInfoKHR + struct RayTracingShaderGroupCreateInfoNV { - using NativeType = VkPipelineInfoKHR; + using NativeType = VkRayTracingShaderGroupCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipeline( pipeline_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, + uint32_t generalShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, + uint32_t closestHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, + uint32_t anyHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, + uint32_t intersectionShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , generalShader{ generalShader_ } + , closestHitShader{ closestHitShader_ } + , anyHitShader{ anyHitShader_ } + , intersectionShader{ intersectionShader_ } { } - VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineInfoKHR( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingShaderGroupCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } - PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RayTracingShaderGroupCreateInfoNV & operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineInfoKHR & operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT { - pipeline = pipeline_; + type = type_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkPipelineInfoKHR const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, pipeline ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineInfoKHR const & ) const = default; -#else - bool operator==( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ); -# endif - } - - bool operator!=( PipelineInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; - }; - - template <> - struct CppType - { - using Type = PipelineInfoKHR; - }; - using PipelineInfoEXT = PipelineInfoKHR; - - struct PushConstantRange - { - using NativeType = VkPushConstantRange; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT - : stageFlags( stageFlags_ ) - , offset( offset_ ) - , size( size_ ) - { - } - - VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT : PushConstantRange( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + generalShader = generalShader_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT { - stageFlags = stageFlags_; + closestHitShader = closestHitShader_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT { - offset = offset_; + anyHitShader = anyHitShader_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT { - size = size_; + intersectionShader = intersectionShader_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT + operator VkRayTracingShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT + operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( stageFlags, offset, size ); + return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PushConstantRange const & ) const = default; + auto operator<=>( RayTracingShaderGroupCreateInfoNV const & ) const = default; #else - bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && ( size == rhs.size ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( generalShader == rhs.generalShader ) && + ( closestHitShader == rhs.closestHitShader ) && ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ); # endif } - bool operator!=( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; - uint32_t offset = {}; - uint32_t size = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; + uint32_t generalShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV; + uint32_t closestHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV; + uint32_t anyHitShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV; + uint32_t intersectionShader = VULKAN_HPP_NAMESPACE::ShaderUnusedNV; }; - struct PipelineLayoutCreateInfo + template <> + struct CppType { - using NativeType = VkPipelineLayoutCreateInfo; + using Type = RayTracingShaderGroupCreateInfoNV; + }; + + struct RayTracingPipelineCreateInfoNV + { + using NativeType = VkRayTracingPipelineCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, - uint32_t setLayoutCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, - uint32_t pushConstantRangeCount_ = {}, - const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , setLayoutCount( setLayoutCount_ ) - , pSetLayouts( pSetLayouts_ ) - , pushConstantRangeCount( pushConstantRangeCount_ ) - , pPushConstantRanges( pPushConstantRanges_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ = {}, + uint32_t maxRecursionDepth_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , stageCount{ stageCount_ } + , pStages{ pStages_ } + , groupCount{ groupCount_ } + , pGroups{ pGroups_ } + , maxRecursionDepth{ maxRecursionDepth_ } + , layout{ layout_ } + , basePipelineHandle{ basePipelineHandle_ } + , basePipelineIndex{ basePipelineIndex_ } { } - VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineLayoutCreateInfo( *reinterpret_cast( &rhs ) ) + RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : RayTracingPipelineCreateInfoNV( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ = {}, - const void * pNext_ = nullptr ) + RayTracingPipelineCreateInfoNV( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ = {}, + uint32_t maxRecursionDepth_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) - , setLayoutCount( static_cast( setLayouts_.size() ) ) - , pSetLayouts( setLayouts_.data() ) - , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) - , pPushConstantRanges( pushConstantRanges_.data() ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , groupCount( static_cast( groups_.size() ) ) + , pGroups( groups_.data() ) + , maxRecursionDepth( maxRecursionDepth_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineLayoutCreateInfo & operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RayTracingPipelineCreateInfoNV & operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT { - setLayoutCount = setLayoutCount_; + stageCount = stageCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT { - pSetLayouts = pSetLayouts_; + pStages = pStages_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineLayoutCreateInfo & - setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineCreateInfoNV & + setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT { - setLayoutCount = static_cast( setLayouts_.size() ); - pSetLayouts = setLayouts_.data(); + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT { - pushConstantRangeCount = pushConstantRangeCount_; + groupCount = groupCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & - setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & + setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT { - pPushConstantRanges = pPushConstantRanges_; + pGroups = pGroups_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineLayoutCreateInfo & setPushConstantRanges( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + RayTracingPipelineCreateInfoNV & setGroups( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ ) VULKAN_HPP_NOEXCEPT { - pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); - pPushConstantRanges = pushConstantRanges_.data(); + groupCount = static_cast( groups_.size() ); + pGroups = groups_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + maxRecursionDepth = maxRecursionDepth_; + return *this; } - operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRayTracingPipelineCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -72838,350 +113777,312 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::PipelineLayout const &, + VULKAN_HPP_NAMESPACE::Pipeline const &, + int32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, setLayoutCount, pSetLayouts, pushConstantRangeCount, pPushConstantRanges ); + return std::tie( sType, pNext, flags, stageCount, pStages, groupCount, pGroups, maxRecursionDepth, layout, basePipelineHandle, basePipelineIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineLayoutCreateInfo const & ) const = default; + auto operator<=>( RayTracingPipelineCreateInfoNV const & ) const = default; #else - bool operator==( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( setLayoutCount == rhs.setLayoutCount ) && - ( pSetLayouts == rhs.pSetLayouts ) && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && - ( pPushConstantRanges == rhs.pPushConstantRanges ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( layout == rhs.layout ) && + ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); # endif } - bool operator!=( PipelineLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {}; - uint32_t setLayoutCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; - uint32_t pushConstantRangeCount = {}; - const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups = {}; + uint32_t maxRecursionDepth = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineLayoutCreateInfo; + using Type = RayTracingPipelineCreateInfoNV; }; - struct PipelineLibraryCreateInfoKHR + struct RefreshCycleDurationGOOGLE { - using NativeType = VkPipelineLibraryCreateInfoKHR; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {}, - const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , libraryCount( libraryCount_ ) - , pLibraries( pLibraries_ ) - { - } - - VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineLibraryCreateInfoKHR( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineLibraryCreateInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), libraryCount( static_cast( libraries_.size() ) ), pLibraries( libraries_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + using NativeType = VkRefreshCycleDurationGOOGLE; - PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT : refreshDuration{ refreshDuration_ } {} -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT + RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + : RefreshCycleDurationGOOGLE( *reinterpret_cast( &rhs ) ) { - libraryCount = libraryCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT - { - pLibraries = pLibraries_; - return *this; - } + RefreshCycleDurationGOOGLE & operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineLibraryCreateInfoKHR & - setLibraries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_ ) VULKAN_HPP_NOEXCEPT + RefreshCycleDurationGOOGLE & operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT { - libraryCount = static_cast( libraries_.size() ); - pLibraries = libraries_.data(); + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkRefreshCycleDurationGOOGLE const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, libraryCount, pLibraries ); + return std::tie( refreshDuration ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default; + auto operator<=>( RefreshCycleDurationGOOGLE const & ) const = default; #else - bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( libraryCount == rhs.libraryCount ) && ( pLibraries == rhs.pLibraries ); + return ( refreshDuration == rhs.refreshDuration ); # endif } - bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR; - const void * pNext = {}; - uint32_t libraryCount = {}; - const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries = {}; - }; - - template <> - struct CppType - { - using Type = PipelineLibraryCreateInfoKHR; + uint64_t refreshDuration = {}; }; - struct PipelinePropertiesIdentifierEXT + struct ReleaseCapturedPipelineDataInfoKHR { - using NativeType = VkPipelinePropertiesIdentifierEXT; + using NativeType = VkReleaseCapturedPipelineDataInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelinePropertiesIdentifierEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eReleaseCapturedPipelineDataInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT( std::array const & pipelineIdentifier_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelineIdentifier( pipelineIdentifier_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ReleaseCapturedPipelineDataInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipeline{ pipeline_ } { } - VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT( PipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ReleaseCapturedPipelineDataInfoKHR( ReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelinePropertiesIdentifierEXT( VkPipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelinePropertiesIdentifierEXT( *reinterpret_cast( &rhs ) ) + ReleaseCapturedPipelineDataInfoKHR( VkReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ReleaseCapturedPipelineDataInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelinePropertiesIdentifierEXT & operator=( PipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ReleaseCapturedPipelineDataInfoKHR & operator=( ReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelinePropertiesIdentifierEXT & operator=( VkPipelinePropertiesIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ReleaseCapturedPipelineDataInfoKHR & operator=( VkReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ReleaseCapturedPipelineDataInfoKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT & setPipelineIdentifier( std::array pipelineIdentifier_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ReleaseCapturedPipelineDataInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT { - pipelineIdentifier = pipelineIdentifier_; + pipeline = pipeline_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelinePropertiesIdentifierEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkReleaseCapturedPipelineDataInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelinePropertiesIdentifierEXT &() VULKAN_HPP_NOEXCEPT + operator VkReleaseCapturedPipelineDataInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pipelineIdentifier ); + return std::tie( sType, pNext, pipeline ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelinePropertiesIdentifierEXT const & ) const = default; + auto operator<=>( ReleaseCapturedPipelineDataInfoKHR const & ) const = default; #else - bool operator==( PipelinePropertiesIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ReleaseCapturedPipelineDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineIdentifier == rhs.pipelineIdentifier ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ); # endif } - bool operator!=( PipelinePropertiesIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ReleaseCapturedPipelineDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelinePropertiesIdentifierEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineIdentifier = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eReleaseCapturedPipelineDataInfoKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelinePropertiesIdentifierEXT; + using Type = ReleaseCapturedPipelineDataInfoKHR; }; - struct PipelineRasterizationConservativeStateCreateInfoEXT + struct ReleaseSwapchainImagesInfoEXT { - using NativeType = VkPipelineRasterizationConservativeStateCreateInfoEXT; + using NativeType = VkReleaseSwapchainImagesInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eReleaseSwapchainImagesInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( - VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, - float extraPrimitiveOverestimationSize_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , conservativeRasterizationMode( conservativeRasterizationMode_ ) - , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ReleaseSwapchainImagesInfoEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, + uint32_t imageIndexCount_ = {}, + const uint32_t * pImageIndices_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchain{ swapchain_ } + , imageIndexCount{ imageIndexCount_ } + , pImageIndices{ pImageIndices_ } { } - VULKAN_HPP_CONSTEXPR - PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ReleaseSwapchainImagesInfoEXT( ReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineRasterizationConservativeStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + ReleaseSwapchainImagesInfoEXT( VkReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ReleaseSwapchainImagesInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineRasterizationConservativeStateCreateInfoEXT & - operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ReleaseSwapchainImagesInfoEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchain( swapchain_ ), imageIndexCount( static_cast( imageIndices_.size() ) ), pImageIndices( imageIndices_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ReleaseSwapchainImagesInfoEXT & operator=( ReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ReleaseSwapchainImagesInfoEXT & operator=( VkReleaseSwapchainImagesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & - setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + swapchain = swapchain_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & - setConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setImageIndexCount( uint32_t imageIndexCount_ ) VULKAN_HPP_NOEXCEPT { - conservativeRasterizationMode = conservativeRasterizationMode_; + imageIndexCount = imageIndexCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & - setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ReleaseSwapchainImagesInfoEXT & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT { - extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_; + pImageIndices = pImageIndices_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineRasterizationConservativeStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ReleaseSwapchainImagesInfoEXT & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + imageIndexCount = static_cast( imageIndices_.size() ); + pImageIndices = imageIndices_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkReleaseSwapchainImagesInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkReleaseSwapchainImagesInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -73190,242 +114091,285 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::SwapchainKHR const &, + uint32_t const &, + const uint32_t * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, conservativeRasterizationMode, extraPrimitiveOverestimationSize ); + return std::tie( sType, pNext, swapchain, imageIndexCount, pImageIndices ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const & ) const = default; + auto operator<=>( ReleaseSwapchainImagesInfoEXT const & ) const = default; #else - bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ReleaseSwapchainImagesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && - ( conservativeRasterizationMode == rhs.conservativeRasterizationMode ) && - ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( imageIndexCount == rhs.imageIndexCount ) && + ( pImageIndices == rhs.pImageIndices ); # endif } - bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ReleaseSwapchainImagesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled; - float extraPrimitiveOverestimationSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eReleaseSwapchainImagesInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + uint32_t imageIndexCount = {}; + const uint32_t * pImageIndices = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineRasterizationConservativeStateCreateInfoEXT; + using Type = ReleaseSwapchainImagesInfoEXT; }; - struct PipelineRasterizationDepthClipStateCreateInfoEXT + struct RenderPassAttachmentBeginInfo { - using NativeType = VkPipelineRasterizationDepthClipStateCreateInfoEXT; + using NativeType = VkRenderPassAttachmentBeginInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , depthClipEnable( depthClipEnable_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , attachmentCount{ attachmentCount_ } + , pAttachments{ pAttachments_ } { } - VULKAN_HPP_CONSTEXPR - PipelineRasterizationDepthClipStateCreateInfoEXT( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineRasterizationDepthClipStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassAttachmentBeginInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassAttachmentBeginInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), attachmentCount( static_cast( attachments_.size() ) ), pAttachments( attachments_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassAttachmentBeginInfo & operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & - setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + attachmentCount = attachmentCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & - setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT { - depthClipEnable = depthClipEnable_; + pAttachments = pAttachments_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassAttachmentBeginInfo & + setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkRenderPassAttachmentBeginInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, depthClipEnable ); + return std::tie( sType, pNext, attachmentCount, pAttachments ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const & ) const = default; + auto operator<=>( RenderPassAttachmentBeginInfo const & ) const = default; #else - bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthClipEnable == rhs.depthClipEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ); # endif } - bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo; + const void * pNext = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineRasterizationDepthClipStateCreateInfoEXT; + using Type = RenderPassAttachmentBeginInfo; }; - struct PipelineRasterizationLineStateCreateInfoEXT + using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo; + + struct RenderPassBeginInfo { - using NativeType = VkPipelineRasterizationLineStateCreateInfoEXT; + using NativeType = VkRenderPassBeginInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( - VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault, - VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, - uint32_t lineStippleFactor_ = {}, - uint16_t lineStipplePattern_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , lineRasterizationMode( lineRasterizationMode_ ) - , stippledLineEnable( stippledLineEnable_ ) - , lineStippleFactor( lineStippleFactor_ ) - , lineStipplePattern( lineStipplePattern_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, + uint32_t clearValueCount_ = {}, + const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , renderPass{ renderPass_ } + , framebuffer{ framebuffer_ } + , renderArea{ renderArea_ } + , clearValueCount{ clearValueCount_ } + , pClearValues{ pClearValues_ } { } - VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderPassBeginInfo( *reinterpret_cast( &rhs ) ) + { + } - PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineRasterizationLineStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & clearValues_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , renderPass( renderPass_ ) + , framebuffer( framebuffer_ ) + , renderArea( renderArea_ ) + , clearValueCount( static_cast( clearValues_.size() ) ) + , pClearValues( clearValues_.data() ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PipelineRasterizationLineStateCreateInfoEXT & operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineRasterizationLineStateCreateInfoEXT & operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & - setLineRasterizationMode( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT { - lineRasterizationMode = lineRasterizationMode_; + renderPass = renderPass_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & - setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT { - stippledLineEnable = stippledLineEnable_; + framebuffer = framebuffer_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT { - lineStippleFactor = lineStippleFactor_; + renderArea = renderArea_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT { - lineStipplePattern = lineStipplePattern_; + clearValueCount = clearValueCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineRasterizationLineStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ ) VULKAN_HPP_NOEXCEPT + { + pClearValues = pClearValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassBeginInfo & + setClearValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & clearValues_ ) VULKAN_HPP_NOEXCEPT + { + clearValueCount = static_cast( clearValues_.size() ); + pClearValues = clearValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineRasterizationLineStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -73434,832 +114378,865 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + const VULKAN_HPP_NAMESPACE::ClearValue * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, lineRasterizationMode, stippledLineEnable, lineStippleFactor, lineStipplePattern ); + return std::tie( sType, pNext, renderPass, framebuffer, renderArea, clearValueCount, pClearValues ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineRasterizationLineStateCreateInfoEXT const & ) const = default; + auto operator<=>( RenderPassBeginInfo const & ) const = default; #else - bool operator==( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lineRasterizationMode == rhs.lineRasterizationMode ) && - ( stippledLineEnable == rhs.stippledLineEnable ) && ( lineStippleFactor == rhs.lineStippleFactor ) && - ( lineStipplePattern == rhs.lineStipplePattern ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( framebuffer == rhs.framebuffer ) && + ( renderArea == rhs.renderArea ) && ( clearValueCount == rhs.clearValueCount ) && ( pClearValues == rhs.pClearValues ); # endif } - bool operator!=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault; - VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {}; - uint32_t lineStippleFactor = {}; - uint16_t lineStipplePattern = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + uint32_t clearValueCount = {}; + const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineRasterizationLineStateCreateInfoEXT; + using Type = RenderPassBeginInfo; }; - struct PipelineRasterizationProvokingVertexStateCreateInfoEXT + struct SubpassDescription { - using NativeType = VkPipelineRasterizationProvokingVertexStateCreateInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT; + using NativeType = VkSubpassDescription; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( - VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , provokingVertexMode( provokingVertexMode_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t inputAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, + uint32_t preserveAttachmentCount_ = {}, + const uint32_t * pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : flags{ flags_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , inputAttachmentCount{ inputAttachmentCount_ } + , pInputAttachments{ pInputAttachments_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachments{ pColorAttachments_ } + , pResolveAttachments{ pResolveAttachments_ } + , pDepthStencilAttachment{ pDepthStencilAttachment_ } + , preserveAttachmentCount{ preserveAttachmentCount_ } + , pPreserveAttachments{ pPreserveAttachments_ } { } - VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) - VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineRasterizationProvokingVertexStateCreateInfoEXT( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineRasterizationProvokingVertexStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) + SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDescription( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ = {} ) + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , inputAttachmentCount( static_cast( inputAttachments_.size() ) ) + , pInputAttachments( inputAttachments_.data() ) + , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) + , pColorAttachments( colorAttachments_.data() ) + , pResolveAttachments( resolveAttachments_.data() ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( static_cast( preserveAttachments_.size() ) ) + , pPreserveAttachments( preserveAttachments_.data() ) { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) ); +# else + if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PipelineRasterizationProvokingVertexStateCreateInfoEXT & - operator=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SubpassDescription & operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineRasterizationProvokingVertexStateCreateInfoEXT & - operator=( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationProvokingVertexStateCreateInfoEXT & - setProvokingVertexMode( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT { - provokingVertexMode = provokingVertexMode_; + pipelineBindPoint = pipelineBindPoint_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + inputAttachmentCount = inputAttachmentCount_; + return *this; } - operator VkPipelineRasterizationProvokingVertexStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pInputAttachments = pInputAttachments_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setInputAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, provokingVertexMode ); + inputAttachmentCount = static_cast( inputAttachments_.size() ); + pInputAttachments = inputAttachments_.data(); + return *this; } -#endif +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & ) const = default; -#else - bool operator==( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexMode == rhs.provokingVertexMode ); -# endif + colorAttachmentCount = colorAttachmentCount_; + return *this; } - bool operator!=( PipelineRasterizationProvokingVertexStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + pColorAttachments = pColorAttachments_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationProvokingVertexStateCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex; - }; - - template <> - struct CppType - { - using Type = PipelineRasterizationProvokingVertexStateCreateInfoEXT; - }; - - struct PipelineRasterizationStateRasterizationOrderAMD - { - using NativeType = VkPipelineRasterizationStateRasterizationOrderAMD; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( - VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rasterizationOrder( rasterizationOrder_ ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setColorAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT { + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR - PipelineRasterizationStateRasterizationOrderAMD( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pResolveAttachments = pResolveAttachments_; + return *this; + } - PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineRasterizationStateRasterizationOrderAMD( *reinterpret_cast( &rhs ) ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & setResolveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT { + colorAttachmentCount = static_cast( resolveAttachments_.size() ); + pResolveAttachments = resolveAttachments_.data(); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PipelineRasterizationStateRasterizationOrderAMD & operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & + setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } - PipelineRasterizationStateRasterizationOrderAMD & operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + preserveAttachmentCount = preserveAttachmentCount_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + pPreserveAttachments = pPreserveAttachments_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & - setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription & + setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT { - rasterizationOrder = rasterizationOrder_; + preserveAttachmentCount = static_cast( preserveAttachments_.size() ); + pPreserveAttachments = preserveAttachments_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineRasterizationStateRasterizationOrderAMD const &() const VULKAN_HPP_NOEXCEPT + operator VkSubpassDescription const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT + operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, rasterizationOrder ); + return std::tie( flags, + pipelineBindPoint, + inputAttachmentCount, + pInputAttachments, + colorAttachmentCount, + pColorAttachments, + pResolveAttachments, + pDepthStencilAttachment, + preserveAttachmentCount, + pPreserveAttachments ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const & ) const = default; + auto operator<=>( SubpassDescription const & ) const = default; #else - bool operator==( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrder == rhs.rasterizationOrder ); + return ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( inputAttachmentCount == rhs.inputAttachmentCount ) && + ( pInputAttachments == rhs.pInputAttachments ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachments == rhs.pColorAttachments ) && ( pResolveAttachments == rhs.pResolveAttachments ) && + ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && + ( pPreserveAttachments == rhs.pPreserveAttachments ); # endif } - bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t * pPreserveAttachments = {}; + }; + + struct SubpassDependency + { + using NativeType = VkSubpassDependency; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDependency( uint32_t srcSubpass_ = {}, + uint32_t dstSubpass_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubpass{ srcSubpass_ } + , dstSubpass{ dstSubpass_ } + , srcStageMask{ srcStageMask_ } + , dstStageMask{ dstStageMask_ } + , srcAccessMask{ srcAccessMask_ } + , dstAccessMask{ dstAccessMask_ } + , dependencyFlags{ dependencyFlags_ } { - return !operator==( rhs ); } -#endif - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict; - }; - - template <> - struct CppType - { - using Type = PipelineRasterizationStateRasterizationOrderAMD; - }; + VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default; - struct PipelineRasterizationStateStreamCreateInfoEXT - { - using NativeType = VkPipelineRasterizationStateStreamCreateInfoEXT; + SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDependency( *reinterpret_cast( &rhs ) ) {} - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; + SubpassDependency & operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, - uint32_t rasterizationStream_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , rasterizationStream( rasterizationStream_ ) + SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } - VULKAN_HPP_CONSTEXPR - PipelineRasterizationStateStreamCreateInfoEXT( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT + { + srcSubpass = srcSubpass_; + return *this; + } - PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineRasterizationStateStreamCreateInfoEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT { + dstSubpass = dstSubpass_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineRasterizationStateStreamCreateInfoEXT & operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } - PipelineRasterizationStateStreamCreateInfoEXT & operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + dstStageMask = dstStageMask_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + srcAccessMask = srcAccessMask_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & - setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + dstAccessMask = dstAccessMask_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT { - rasterizationStream = rasterizationStream_; + dependencyFlags = dependencyFlags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineRasterizationStateStreamCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkSubpassDependency const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, rasterizationStream ); + return std::tie( srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const & ) const = default; + auto operator<=>( SubpassDependency const & ) const = default; #else - bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rasterizationStream == rhs.rasterizationStream ); + return ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) && ( srcStageMask == rhs.srcStageMask ) && + ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) && + ( dependencyFlags == rhs.dependencyFlags ); # endif } - bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {}; - uint32_t rasterizationStream = {}; - }; - - template <> - struct CppType - { - using Type = PipelineRasterizationStateStreamCreateInfoEXT; + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; }; - struct PipelineRenderingCreateInfo + struct RenderPassCreateInfo { - using NativeType = VkPipelineRenderingCreateInfo; + using NativeType = VkRenderPassCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRenderingCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( uint32_t viewMask_ = {}, - uint32_t colorAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, - VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , viewMask( viewMask_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachmentFormats( pColorAttachmentFormats_ ) - , depthAttachmentFormat( depthAttachmentFormat_ ) - , stencilAttachmentFormat( stencilAttachmentFormat_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ = {}, + uint32_t subpassCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ = {}, + uint32_t dependencyCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , attachmentCount{ attachmentCount_ } + , pAttachments{ pAttachments_ } + , subpassCount{ subpassCount_ } + , pSubpasses{ pSubpasses_ } + , dependencyCount{ dependencyCount_ } + , pDependencies{ pDependencies_ } { } - VULKAN_HPP_CONSTEXPR PipelineRenderingCreateInfo( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineRenderingCreateInfo( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineRenderingCreateInfo( *reinterpret_cast( &rhs ) ) + RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineRenderingCreateInfo( uint32_t viewMask_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_, - VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - const void * pNext_ = nullptr ) + RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , viewMask( viewMask_ ) - , colorAttachmentCount( static_cast( colorAttachmentFormats_.size() ) ) - , pColorAttachmentFormats( colorAttachmentFormats_.data() ) - , depthAttachmentFormat( depthAttachmentFormat_ ) - , stencilAttachmentFormat( stencilAttachmentFormat_ ) + , flags( flags_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , subpassCount( static_cast( subpasses_.size() ) ) + , pSubpasses( subpasses_.data() ) + , dependencyCount( static_cast( dependencies_.size() ) ) + , pDependencies( dependencies_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineRenderingCreateInfo & operator=( PipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderPassCreateInfo & operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineRenderingCreateInfo & operator=( VkPipelineRenderingCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - viewMask = viewMask_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT { - colorAttachmentCount = colorAttachmentCount_; + attachmentCount = attachmentCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & - setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ ) VULKAN_HPP_NOEXCEPT { - pColorAttachmentFormats = pColorAttachmentFormats_; + pAttachments = pAttachments_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineRenderingCreateInfo & setColorAttachmentFormats( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + RenderPassCreateInfo & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT { - colorAttachmentCount = static_cast( colorAttachmentFormats_.size() ); - pColorAttachmentFormats = colorAttachmentFormats_.data(); + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT { - depthAttachmentFormat = depthAttachmentFormat_; + subpassCount = subpassCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRenderingCreateInfo & - setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ ) VULKAN_HPP_NOEXCEPT { - stencilAttachmentFormat = stencilAttachmentFormat_; + pSubpasses = pSubpasses_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkPipelineRenderingCreateInfo const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPipelineRenderingCreateInfo &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineRenderingCreateInfo const & ) const = default; -#else - bool operator==( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && - ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && - ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ); -# endif - } - - bool operator!=( PipelineRenderingCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRenderingCreateInfo; - const void * pNext = {}; - uint32_t viewMask = {}; - uint32_t colorAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {}; - VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; - }; - - template <> - struct CppType - { - using Type = PipelineRenderingCreateInfo; - }; - using PipelineRenderingCreateInfoKHR = PipelineRenderingCreateInfo; - - struct PipelineRepresentativeFragmentTestStateCreateInfoNV - { - using NativeType = VkPipelineRepresentativeFragmentTestStateCreateInfoNV; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , representativeFragmentTestEnable( representativeFragmentTestEnable_ ) - { - } - - VULKAN_HPP_CONSTEXPR - PipelineRepresentativeFragmentTestStateCreateInfoNV( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineRepresentativeFragmentTestStateCreateInfoNV( *reinterpret_cast( &rhs ) ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo & + setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ ) VULKAN_HPP_NOEXCEPT { + subpassCount = static_cast( subpasses_.size() ); + pSubpasses = subpasses_.data(); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PipelineRepresentativeFragmentTestStateCreateInfoNV & - operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + dependencyCount = dependencyCount_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + pDependencies = pDependencies_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & - setRepresentativeFragmentTestEnable( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo & + setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ ) VULKAN_HPP_NOEXCEPT { - representativeFragmentTestEnable = representativeFragmentTestEnable_; + dependencyCount = static_cast( dependencies_.size() ); + pDependencies = dependencies_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, representativeFragmentTestEnable ); + return std::tie( sType, pNext, flags, attachmentCount, pAttachments, subpassCount, pSubpasses, dependencyCount, pDependencies ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const & ) const = default; + auto operator<=>( RenderPassCreateInfo const & ) const = default; #else - bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( attachmentCount == rhs.attachmentCount ) && + ( pAttachments == rhs.pAttachments ) && ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) && + ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ); # endif } - bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV; + using Type = RenderPassCreateInfo; }; - struct PipelineRobustnessCreateInfoEXT + struct SubpassDescription2 { - using NativeType = VkPipelineRobustnessCreateInfoEXT; + using NativeType = VkSubpassDescription2; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRobustnessCreateInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfoEXT( - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT storageBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT uniformBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT vertexInputs_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, - VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT images_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , storageBuffers( storageBuffers_ ) - , uniformBuffers( uniformBuffers_ ) - , vertexInputs( vertexInputs_ ) - , images( images_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t viewMask_ = {}, + uint32_t inputAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, + uint32_t preserveAttachmentCount_ = {}, + const uint32_t * pPreserveAttachments_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , viewMask{ viewMask_ } + , inputAttachmentCount{ inputAttachmentCount_ } + , pInputAttachments{ pInputAttachments_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachments{ pColorAttachments_ } + , pResolveAttachments{ pResolveAttachments_ } + , pDepthStencilAttachment{ pDepthStencilAttachment_ } + , preserveAttachmentCount{ preserveAttachmentCount_ } + , pPreserveAttachments{ pPreserveAttachments_ } { } - VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfoEXT( PipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDescription2( *reinterpret_cast( &rhs ) ) + { + } - PipelineRobustnessCreateInfoEXT( VkPipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineRobustnessCreateInfoEXT( *reinterpret_cast( &rhs ) ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, + uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , viewMask( viewMask_ ) + , inputAttachmentCount( static_cast( inputAttachments_.size() ) ) + , pInputAttachments( inputAttachments_.data() ) + , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) + , pColorAttachments( colorAttachments_.data() ) + , pResolveAttachments( resolveAttachments_.data() ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( static_cast( preserveAttachments_.size() ) ) + , pPreserveAttachments( preserveAttachments_.data() ) { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) ); +# else + if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PipelineRobustnessCreateInfoEXT & operator=( PipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SubpassDescription2 & operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineRobustnessCreateInfoEXT & operator=( VkPipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & - setStorageBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT storageBuffers_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT { - storageBuffers = storageBuffers_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & - setUniformBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT uniformBuffers_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT { - uniformBuffers = uniformBuffers_; + pipelineBindPoint = pipelineBindPoint_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & - setVertexInputs( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT vertexInputs_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT { - vertexInputs = vertexInputs_; + viewMask = viewMask_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & setImages( VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT images_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT { - images = images_; + inputAttachmentCount = inputAttachmentCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineRobustnessCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pInputAttachments = pInputAttachments_; + return *this; } - operator VkPipelineRobustnessCreateInfoEXT &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setInputAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + inputAttachmentCount = static_cast( inputAttachments_.size() ); + pInputAttachments = inputAttachments_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, storageBuffers, uniformBuffers, vertexInputs, images ); + colorAttachmentCount = colorAttachmentCount_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineRobustnessCreateInfoEXT const & ) const = default; -#else - bool operator==( PipelineRobustnessCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffers == rhs.storageBuffers ) && ( uniformBuffers == rhs.uniformBuffers ) && - ( vertexInputs == rhs.vertexInputs ) && ( images == rhs.images ); -# endif + pColorAttachments = pColorAttachments_; + return *this; } - bool operator!=( PipelineRobustnessCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setColorAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRobustnessCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT storageBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT uniformBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT vertexInputs = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; - VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT images = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault; - }; - - template <> - struct CppType - { - using Type = PipelineRobustnessCreateInfoEXT; - }; - - struct PipelineSampleLocationsStateCreateInfoEXT - { - using NativeType = VkPipelineSampleLocationsStateCreateInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, - VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , sampleLocationsEnable( sampleLocationsEnable_ ) - , sampleLocationsInfo( sampleLocationsInfo_ ) + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT { + pResolveAttachments = pResolveAttachments_; + return *this; } - VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineSampleLocationsStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & setResolveAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT { + colorAttachmentCount = static_cast( resolveAttachments_.size() ); + pResolveAttachments = resolveAttachments_.data(); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PipelineSampleLocationsStateCreateInfoEXT & operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PipelineSampleLocationsStateCreateInfoEXT & operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & + setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + pDepthStencilAttachment = pDepthStencilAttachment_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + preserveAttachmentCount = preserveAttachmentCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & - setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT { - sampleLocationsEnable = sampleLocationsEnable_; + pPreserveAttachments = pPreserveAttachments_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & - setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassDescription2 & + setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT { - sampleLocationsInfo = sampleLocationsInfo_; + preserveAttachmentCount = static_cast( preserveAttachments_.size() ); + pPreserveAttachments = preserveAttachments_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineSampleLocationsStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkSubpassDescription2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -74268,1159 +115245,1138 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags const &, + VULKAN_HPP_NAMESPACE::PipelineBindPoint const &, + uint32_t const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &, + uint32_t const &, + const uint32_t * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, sampleLocationsEnable, sampleLocationsInfo ); + return std::tie( sType, + pNext, + flags, + pipelineBindPoint, + viewMask, + inputAttachmentCount, + pInputAttachments, + colorAttachmentCount, + pColorAttachments, + pResolveAttachments, + pDepthStencilAttachment, + preserveAttachmentCount, + pPreserveAttachments ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const & ) const = default; + auto operator<=>( SubpassDescription2 const & ) const = default; #else - bool operator==( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationsEnable == rhs.sampleLocationsEnable ) && - ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && + ( viewMask == rhs.viewMask ) && ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) && + ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) && + ( pResolveAttachments == rhs.pResolveAttachments ) && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && + ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && ( pPreserveAttachments == rhs.pPreserveAttachments ); # endif } - bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {}; - VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t viewMask = {}; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t * pPreserveAttachments = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineSampleLocationsStateCreateInfoEXT; + using Type = SubpassDescription2; }; - struct PipelineShaderStageModuleIdentifierCreateInfoEXT + using SubpassDescription2KHR = SubpassDescription2; + + struct SubpassDependency2 { - using NativeType = VkPipelineShaderStageModuleIdentifierCreateInfoEXT; + using NativeType = VkSubpassDependency2; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineShaderStageModuleIdentifierCreateInfoEXT( uint32_t identifierSize_ = {}, - const uint8_t * pIdentifier_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , identifierSize( identifierSize_ ) - , pIdentifier( pIdentifier_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassDependency2( uint32_t srcSubpass_ = {}, + uint32_t dstSubpass_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, + int32_t viewOffset_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcSubpass{ srcSubpass_ } + , dstSubpass{ dstSubpass_ } + , srcStageMask{ srcStageMask_ } + , dstStageMask{ dstStageMask_ } + , srcAccessMask{ srcAccessMask_ } + , dstAccessMask{ dstAccessMask_ } + , dependencyFlags{ dependencyFlags_ } + , viewOffset{ viewOffset_ } { } - VULKAN_HPP_CONSTEXPR - PipelineShaderStageModuleIdentifierCreateInfoEXT( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PipelineShaderStageModuleIdentifierCreateInfoEXT( VkPipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineShaderStageModuleIdentifierCreateInfoEXT( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineShaderStageModuleIdentifierCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & identifier_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), identifierSize( static_cast( identifier_.size() ) ), pIdentifier( identifier_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDependency2( *reinterpret_cast( &rhs ) ) {} - PipelineShaderStageModuleIdentifierCreateInfoEXT & operator=( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SubpassDependency2 & operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineShaderStageModuleIdentifierCreateInfoEXT & operator=( VkPipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setIdentifierSize( uint32_t identifierSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT { - identifierSize = identifierSize_; + srcSubpass = srcSubpass_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageModuleIdentifierCreateInfoEXT & setPIdentifier( const uint8_t * pIdentifier_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT { - pIdentifier = pIdentifier_; + dstSubpass = dstSubpass_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineShaderStageModuleIdentifierCreateInfoEXT & - setIdentifier( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & identifier_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT { - identifierSize = static_cast( identifier_.size() ); - pIdentifier = identifier_.data(); + srcStageMask = srcStageMask_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPipelineShaderStageModuleIdentifierCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, identifierSize, pIdentifier ); - } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineShaderStageModuleIdentifierCreateInfoEXT const & ) const = default; -#else - bool operator==( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( identifierSize == rhs.identifierSize ) && ( pIdentifier == rhs.pIdentifier ); -# endif + dstStageMask = dstStageMask_; + return *this; } - bool operator!=( PipelineShaderStageModuleIdentifierCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + srcAccessMask = srcAccessMask_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT; - const void * pNext = {}; - uint32_t identifierSize = {}; - const uint8_t * pIdentifier = {}; - }; - - template <> - struct CppType - { - using Type = PipelineShaderStageModuleIdentifierCreateInfoEXT; - }; - - struct PipelineShaderStageRequiredSubgroupSizeCreateInfo - { - using NativeType = VkPipelineShaderStageRequiredSubgroupSizeCreateInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo( uint32_t requiredSubgroupSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , requiredSubgroupSize( requiredSubgroupSize_ ) + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT { + dstAccessMask = dstAccessMask_; + return *this; } - VULKAN_HPP_CONSTEXPR - PipelineShaderStageRequiredSubgroupSizeCreateInfo( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PipelineShaderStageRequiredSubgroupSizeCreateInfo( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineShaderStageRequiredSubgroupSizeCreateInfo( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT { + dependencyFlags = dependencyFlags_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PipelineShaderStageRequiredSubgroupSizeCreateInfo & - operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineShaderStageRequiredSubgroupSizeCreateInfo & operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + viewOffset = viewOffset_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkSubpassDependency2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, requiredSubgroupSize ); + return std::tie( sType, pNext, srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags, viewOffset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & ) const = default; + auto operator<=>( SubpassDependency2 const & ) const = default; #else - bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( requiredSubgroupSize == rhs.requiredSubgroupSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) && + ( srcStageMask == rhs.srcStageMask ) && ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && + ( dstAccessMask == rhs.dstAccessMask ) && ( dependencyFlags == rhs.dependencyFlags ) && ( viewOffset == rhs.viewOffset ); # endif } - bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfo; - void * pNext = {}; - uint32_t requiredSubgroupSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2; + const void * pNext = {}; + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + int32_t viewOffset = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfo; + using Type = SubpassDependency2; }; - using PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = PipelineShaderStageRequiredSubgroupSizeCreateInfo; - struct PipelineTessellationDomainOriginStateCreateInfo + using SubpassDependency2KHR = SubpassDependency2; + + struct RenderPassCreateInfo2 { - using NativeType = VkPipelineTessellationDomainOriginStateCreateInfo; + using NativeType = VkRenderPassCreateInfo2; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( - VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , domainOrigin( domainOrigin_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ = {}, + uint32_t subpassCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ = {}, + uint32_t dependencyCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ = {}, + uint32_t correlatedViewMaskCount_ = {}, + const uint32_t * pCorrelatedViewMasks_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , attachmentCount{ attachmentCount_ } + , pAttachments{ pAttachments_ } + , subpassCount{ subpassCount_ } + , pSubpasses{ pSubpasses_ } + , dependencyCount{ dependencyCount_ } + , pDependencies{ pDependencies_ } + , correlatedViewMaskCount{ correlatedViewMaskCount_ } + , pCorrelatedViewMasks{ pCorrelatedViewMasks_ } { } - VULKAN_HPP_CONSTEXPR - PipelineTessellationDomainOriginStateCreateInfo( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineTessellationDomainOriginStateCreateInfo( *reinterpret_cast( &rhs ) ) + RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreateInfo2( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineTessellationDomainOriginStateCreateInfo & operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlatedViewMasks_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , attachmentCount( static_cast( attachments_.size() ) ) + , pAttachments( attachments_.data() ) + , subpassCount( static_cast( subpasses_.size() ) ) + , pSubpasses( subpasses_.data() ) + , dependencyCount( static_cast( dependencies_.size() ) ) + , pDependencies( dependencies_.data() ) + , correlatedViewMaskCount( static_cast( correlatedViewMasks_.size() ) ) + , pCorrelatedViewMasks( correlatedViewMasks_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PipelineTessellationDomainOriginStateCreateInfo & operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassCreateInfo2 & operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & - setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - domainOrigin = domainOrigin_; + flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineTessellationDomainOriginStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + attachmentCount = attachmentCount_; + return *this; } - operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pAttachments = pAttachments_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & setAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, domainOrigin ); + attachmentCount = static_cast( attachments_.size() ); + pAttachments = attachments_.data(); + return *this; } -#endif +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const & ) const = default; -#else - bool operator==( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( domainOrigin == rhs.domainOrigin ); -# endif + subpassCount = subpassCount_; + return *this; } - bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + pSubpasses = pSubpasses_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft; - }; - - template <> - struct CppType - { - using Type = PipelineTessellationDomainOriginStateCreateInfo; - }; - using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; - - struct VertexInputBindingDivisorDescriptionEXT - { - using NativeType = VkVertexInputBindingDivisorDescriptionEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = {}, uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT - : binding( binding_ ) - , divisor( divisor_ ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & + setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ ) VULKAN_HPP_NOEXCEPT { + subpassCount = static_cast( subpasses_.size() ); + pSubpasses = subpasses_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } - VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VertexInputBindingDivisorDescriptionEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ ) VULKAN_HPP_NOEXCEPT { + pDependencies = pDependencies_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VertexInputBindingDivisorDescriptionEXT & operator=( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & + setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = static_cast( dependencies_.size() ); + pDependencies = dependencies_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VertexInputBindingDivisorDescriptionEXT & operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + correlatedViewMaskCount = correlatedViewMaskCount_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t * pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT { - binding = binding_; + pCorrelatedViewMasks = pCorrelatedViewMasks_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassCreateInfo2 & + setCorrelatedViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT { - divisor = divisor_; + correlatedViewMaskCount = static_cast( correlatedViewMasks_.size() ); + pCorrelatedViewMasks = correlatedViewMasks_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVertexInputBindingDivisorDescriptionEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassCreateInfo2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVertexInputBindingDivisorDescriptionEXT &() VULKAN_HPP_NOEXCEPT + operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( binding, divisor ); + return std::tie( sType, + pNext, + flags, + attachmentCount, + pAttachments, + subpassCount, + pSubpasses, + dependencyCount, + pDependencies, + correlatedViewMaskCount, + pCorrelatedViewMasks ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VertexInputBindingDivisorDescriptionEXT const & ) const = default; + auto operator<=>( RenderPassCreateInfo2 const & ) const = default; #else - bool operator==( VertexInputBindingDivisorDescriptionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( binding == rhs.binding ) && ( divisor == rhs.divisor ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( attachmentCount == rhs.attachmentCount ) && + ( pAttachments == rhs.pAttachments ) && ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) && + ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ) && + ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks ); # endif } - bool operator!=( VertexInputBindingDivisorDescriptionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t binding = {}; - uint32_t divisor = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies = {}; + uint32_t correlatedViewMaskCount = {}; + const uint32_t * pCorrelatedViewMasks = {}; }; - struct PipelineVertexInputDivisorStateCreateInfoEXT + template <> + struct CppType { - using NativeType = VkPipelineVertexInputDivisorStateCreateInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PipelineVertexInputDivisorStateCreateInfoEXT( uint32_t vertexBindingDivisorCount_ = {}, - const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , vertexBindingDivisorCount( vertexBindingDivisorCount_ ) - , pVertexBindingDivisors( pVertexBindingDivisors_ ) - { - } - - VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + using Type = RenderPassCreateInfo2; + }; - PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineVertexInputDivisorStateCreateInfoEXT( *reinterpret_cast( &rhs ) ) - { - } + using RenderPassCreateInfo2KHR = RenderPassCreateInfo2; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineVertexInputDivisorStateCreateInfoEXT( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDivisors_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , vertexBindingDivisorCount( static_cast( vertexBindingDivisors_.size() ) ) - , pVertexBindingDivisors( vertexBindingDivisors_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + struct RenderPassCreationControlEXT + { + using NativeType = VkRenderPassCreationControlEXT; - PipelineVertexInputDivisorStateCreateInfoEXT & operator=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreationControlEXT; - PipelineVertexInputDivisorStateCreateInfoEXT & operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreationControlEXT( VULKAN_HPP_NAMESPACE::Bool32 disallowMerging_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , disallowMerging{ disallowMerging_ } { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR RenderPassCreationControlEXT( RenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassCreationControlEXT( VkRenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreationControlEXT( *reinterpret_cast( &rhs ) ) { - pNext = pNext_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & - setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT + RenderPassCreationControlEXT & operator=( RenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassCreationControlEXT & operator=( VkRenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - vertexBindingDivisorCount = vertexBindingDivisorCount_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & - setPVertexBindingDivisors( const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassCreationControlEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pVertexBindingDivisors = pVertexBindingDivisors_; + pNext = pNext_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisors( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDivisors_ ) - VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassCreationControlEXT & setDisallowMerging( VULKAN_HPP_NAMESPACE::Bool32 disallowMerging_ ) VULKAN_HPP_NOEXCEPT { - vertexBindingDivisorCount = static_cast( vertexBindingDivisors_.size() ); - pVertexBindingDivisors = vertexBindingDivisors_.data(); + disallowMerging = disallowMerging_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineVertexInputDivisorStateCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassCreationControlEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineVertexInputDivisorStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkRenderPassCreationControlEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, vertexBindingDivisorCount, pVertexBindingDivisors ); + return std::tie( sType, pNext, disallowMerging ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineVertexInputDivisorStateCreateInfoEXT const & ) const = default; + auto operator<=>( RenderPassCreationControlEXT const & ) const = default; #else - bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassCreationControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) && - ( pVertexBindingDivisors == rhs.pVertexBindingDivisors ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( disallowMerging == rhs.disallowMerging ); # endif } - bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassCreationControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; - const void * pNext = {}; - uint32_t vertexBindingDivisorCount = {}; - const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT * pVertexBindingDivisors = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreationControlEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 disallowMerging = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineVertexInputDivisorStateCreateInfoEXT; + using Type = RenderPassCreationControlEXT; }; - struct PipelineViewportCoarseSampleOrderStateCreateInfoNV + struct RenderPassCreationFeedbackInfoEXT { - using NativeType = VkPipelineViewportCoarseSampleOrderStateCreateInfoNV; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( - VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, - uint32_t customSampleOrderCount_ = {}, - const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , sampleOrderType( sampleOrderType_ ) - , customSampleOrderCount( customSampleOrderCount_ ) - , pCustomSampleOrders( pCustomSampleOrders_ ) - { - } - - VULKAN_HPP_CONSTEXPR - PipelineViewportCoarseSampleOrderStateCreateInfoNV( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineViewportCoarseSampleOrderStateCreateInfoNV( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineViewportCoarseSampleOrderStateCreateInfoNV( - VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & customSampleOrders_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , sampleOrderType( sampleOrderType_ ) - , customSampleOrderCount( static_cast( customSampleOrders_.size() ) ) - , pCustomSampleOrders( customSampleOrders_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PipelineViewportCoarseSampleOrderStateCreateInfoNV & - operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + using NativeType = VkRenderPassCreationFeedbackInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackInfoEXT( uint32_t postMergeSubpassCount_ = {} ) VULKAN_HPP_NOEXCEPT + : postMergeSubpassCount{ postMergeSubpassCount_ } { - pNext = pNext_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & - setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT - { - sampleOrderType = sampleOrderType_; - return *this; - } + VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackInfoEXT( RenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & - setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT + RenderPassCreationFeedbackInfoEXT( VkRenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreationFeedbackInfoEXT( *reinterpret_cast( &rhs ) ) { - customSampleOrderCount = customSampleOrderCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & - setPCustomSampleOrders( const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT - { - pCustomSampleOrders = pCustomSampleOrders_; - return *this; - } + RenderPassCreationFeedbackInfoEXT & operator=( RenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrders( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & customSampleOrders_ ) VULKAN_HPP_NOEXCEPT + RenderPassCreationFeedbackInfoEXT & operator=( VkRenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - customSampleOrderCount = static_cast( customSampleOrders_.size() ); - pCustomSampleOrders = customSampleOrders_.data(); + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassCreationFeedbackInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkRenderPassCreationFeedbackInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); + return std::tie( postMergeSubpassCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & ) const = default; + auto operator<=>( RenderPassCreationFeedbackInfoEXT const & ) const = default; #else - bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassCreationFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleOrderType == rhs.sampleOrderType ) && - ( customSampleOrderCount == rhs.customSampleOrderCount ) && ( pCustomSampleOrders == rhs.pCustomSampleOrders ); + return ( postMergeSubpassCount == rhs.postMergeSubpassCount ); # endif } - bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassCreationFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault; - uint32_t customSampleOrderCount = {}; - const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders = {}; - }; - - template <> - struct CppType - { - using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV; + uint32_t postMergeSubpassCount = {}; }; - struct PipelineViewportDepthClipControlCreateInfoEXT + struct RenderPassCreationFeedbackCreateInfoEXT { - using NativeType = VkPipelineViewportDepthClipControlCreateInfoEXT; + using NativeType = VkRenderPassCreationFeedbackCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreationFeedbackCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , negativeOneToOne( negativeOneToOne_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pRenderPassFeedback{ pRenderPassFeedback_ } { } - VULKAN_HPP_CONSTEXPR - PipelineViewportDepthClipControlCreateInfoEXT( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackCreateInfoEXT( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineViewportDepthClipControlCreateInfoEXT( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineViewportDepthClipControlCreateInfoEXT( *reinterpret_cast( &rhs ) ) + RenderPassCreationFeedbackCreateInfoEXT( VkRenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassCreationFeedbackCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineViewportDepthClipControlCreateInfoEXT & operator=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderPassCreationFeedbackCreateInfoEXT & operator=( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineViewportDepthClipControlCreateInfoEXT & operator=( VkPipelineViewportDepthClipControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassCreationFeedbackCreateInfoEXT & operator=( VkRenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassCreationFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClipControlCreateInfoEXT & - setNegativeOneToOne( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassCreationFeedbackCreateInfoEXT & + setPRenderPassFeedback( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback_ ) VULKAN_HPP_NOEXCEPT { - negativeOneToOne = negativeOneToOne_; + pRenderPassFeedback = pRenderPassFeedback_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineViewportDepthClipControlCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassCreationFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineViewportDepthClipControlCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkRenderPassCreationFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, negativeOneToOne ); + return std::tie( sType, pNext, pRenderPassFeedback ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineViewportDepthClipControlCreateInfoEXT const & ) const = default; + auto operator<=>( RenderPassCreationFeedbackCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( negativeOneToOne == rhs.negativeOneToOne ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pRenderPassFeedback == rhs.pRenderPassFeedback ); # endif } - bool operator!=( PipelineViewportDepthClipControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportDepthClipControlCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreationFeedbackCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineViewportDepthClipControlCreateInfoEXT; + using Type = RenderPassCreationFeedbackCreateInfoEXT; }; - struct PipelineViewportExclusiveScissorStateCreateInfoNV + struct RenderPassFragmentDensityMapCreateInfoEXT { - using NativeType = VkPipelineViewportExclusiveScissorStateCreateInfoNV; + using NativeType = VkRenderPassFragmentDensityMapCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = {}, - const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , exclusiveScissorCount( exclusiveScissorCount_ ) - , pExclusiveScissors( pExclusiveScissors_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentDensityMapAttachment{ fragmentDensityMapAttachment_ } { } - VULKAN_HPP_CONSTEXPR - PipelineViewportExclusiveScissorStateCreateInfoNV( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineViewportExclusiveScissorStateCreateInfoNV( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineViewportExclusiveScissorStateCreateInfoNV( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & exclusiveScissors_, const void * pNext_ = nullptr ) - : pNext( pNext_ ), exclusiveScissorCount( static_cast( exclusiveScissors_.size() ) ), pExclusiveScissors( exclusiveScissors_.data() ) + RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassFragmentDensityMapCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineViewportExclusiveScissorStateCreateInfoNV & - operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderPassFragmentDensityMapCreateInfoEXT & operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassFragmentDensityMapCreateInfoEXT & operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT - { - exclusiveScissorCount = exclusiveScissorCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & - setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT - { - pExclusiveScissors = pExclusiveScissors_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineViewportExclusiveScissorStateCreateInfoNV & - setExclusiveScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & exclusiveScissors_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & + setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT { - exclusiveScissorCount = static_cast( exclusiveScissors_.size() ); - pExclusiveScissors = exclusiveScissors_.data(); + fragmentDensityMapAttachment = fragmentDensityMapAttachment_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassFragmentDensityMapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, exclusiveScissorCount, pExclusiveScissors ); + return std::tie( sType, pNext, fragmentDensityMapAttachment ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const & ) const = default; + auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const & ) const = default; #else - bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissorCount == rhs.exclusiveScissorCount ) && - ( pExclusiveScissors == rhs.pExclusiveScissors ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment ); # endif } - bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; - const void * pNext = {}; - uint32_t exclusiveScissorCount = {}; - const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineViewportExclusiveScissorStateCreateInfoNV; + using Type = RenderPassFragmentDensityMapCreateInfoEXT; }; - struct ShadingRatePaletteNV + struct RenderPassInputAttachmentAspectCreateInfo { - using NativeType = VkShadingRatePaletteNV; + using NativeType = VkRenderPassInputAttachmentAspectCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( uint32_t shadingRatePaletteEntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ = {} ) VULKAN_HPP_NOEXCEPT - : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ) - , pShadingRatePaletteEntries( pShadingRatePaletteEntries_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = {}, + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , aspectReferenceCount{ aspectReferenceCount_ } + , pAspectReferences{ pAspectReferences_ } { } - VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT - : ShadingRatePaletteNV( *reinterpret_cast( &rhs ) ) + RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassInputAttachmentAspectCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ShadingRatePaletteNV( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePaletteEntries_ ) - : shadingRatePaletteEntryCount( static_cast( shadingRatePaletteEntries_.size() ) ) - , pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() ) + RenderPassInputAttachmentAspectCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & aspectReferences_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), aspectReferenceCount( static_cast( aspectReferences_.size() ) ), pAspectReferences( aspectReferences_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderPassInputAttachmentAspectCreateInfo & operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassInputAttachmentAspectCreateInfo & operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & - setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT { - pShadingRatePaletteEntries = pShadingRatePaletteEntries_; + aspectReferenceCount = aspectReferenceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & + setPAspectReferences( const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ ) VULKAN_HPP_NOEXCEPT + { + pAspectReferences = pAspectReferences_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ShadingRatePaletteNV & setShadingRatePaletteEntries( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePaletteEntries_ ) - VULKAN_HPP_NOEXCEPT + RenderPassInputAttachmentAspectCreateInfo & setAspectReferences( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & aspectReferences_ ) VULKAN_HPP_NOEXCEPT { - shadingRatePaletteEntryCount = static_cast( shadingRatePaletteEntries_.size() ); - pShadingRatePaletteEntries = shadingRatePaletteEntries_.data(); + aspectReferenceCount = static_cast( aspectReferences_.size() ); + pAspectReferences = aspectReferences_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkShadingRatePaletteNV const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassInputAttachmentAspectCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT + operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( shadingRatePaletteEntryCount, pShadingRatePaletteEntries ); + return std::tie( sType, pNext, aspectReferenceCount, pAspectReferences ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ShadingRatePaletteNV const & ) const = default; + auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const & ) const = default; #else - bool operator==( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aspectReferenceCount == rhs.aspectReferenceCount ) && + ( pAspectReferences == rhs.pAspectReferences ); # endif } - bool operator!=( ShadingRatePaletteNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t shadingRatePaletteEntryCount = {}; - const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; + const void * pNext = {}; + uint32_t aspectReferenceCount = {}; + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences = {}; }; - struct PipelineViewportShadingRateImageStateCreateInfoNV + template <> + struct CppType { - using NativeType = VkPipelineViewportShadingRateImageStateCreateInfoNV; + using Type = RenderPassInputAttachmentAspectCreateInfo; + }; + + using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; + + struct RenderPassMultiviewCreateInfo + { + using NativeType = VkRenderPassMultiviewCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassMultiviewCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, - uint32_t viewportCount_ = {}, - const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shadingRateImageEnable( shadingRateImageEnable_ ) - , viewportCount( viewportCount_ ) - , pShadingRatePalettes( pShadingRatePalettes_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = {}, + const uint32_t * pViewMasks_ = {}, + uint32_t dependencyCount_ = {}, + const int32_t * pViewOffsets_ = {}, + uint32_t correlationMaskCount_ = {}, + const uint32_t * pCorrelationMasks_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , subpassCount{ subpassCount_ } + , pViewMasks{ pViewMasks_ } + , dependencyCount{ dependencyCount_ } + , pViewOffsets{ pViewOffsets_ } + , correlationMaskCount{ correlationMaskCount_ } + , pCorrelationMasks{ pCorrelationMasks_ } { } - VULKAN_HPP_CONSTEXPR - PipelineViewportShadingRateImageStateCreateInfoNV( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineViewportShadingRateImageStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassMultiviewCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineViewportShadingRateImageStateCreateInfoNV( - VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePalettes_, - const void * pNext_ = nullptr ) + RenderPassMultiviewCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewMasks_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlationMasks_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , shadingRateImageEnable( shadingRateImageEnable_ ) - , viewportCount( static_cast( shadingRatePalettes_.size() ) ) - , pShadingRatePalettes( shadingRatePalettes_.data() ) + , subpassCount( static_cast( viewMasks_.size() ) ) + , pViewMasks( viewMasks_.data() ) + , dependencyCount( static_cast( viewOffsets_.size() ) ) + , pViewOffsets( viewOffsets_.data() ) + , correlationMaskCount( static_cast( correlationMasks_.size() ) ) + , pCorrelationMasks( correlationMasks_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineViewportShadingRateImageStateCreateInfoNV & - operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderPassMultiviewCreateInfo & operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineViewportShadingRateImageStateCreateInfoNV & operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & - setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT { - shadingRateImageEnable = shadingRateImageEnable_; + subpassCount = subpassCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t * pViewMasks_ ) VULKAN_HPP_NOEXCEPT { - viewportCount = viewportCount_; + pViewMasks = pViewMasks_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & - setPShadingRatePalettes( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo & setViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewMasks_ ) VULKAN_HPP_NOEXCEPT { - pShadingRatePalettes = pShadingRatePalettes_; + subpassCount = static_cast( viewMasks_.size() ); + pViewMasks = viewMasks_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t * pViewOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pViewOffsets = pViewOffsets_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRatePalettes( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT + RenderPassMultiviewCreateInfo & setViewOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT { - viewportCount = static_cast( shadingRatePalettes_.size() ); - pShadingRatePalettes = shadingRatePalettes_.data(); + dependencyCount = static_cast( viewOffsets_.size() ); + pViewOffsets = viewOffsets_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT + { + correlationMaskCount = correlationMaskCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t * pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCorrelationMasks = pCorrelationMasks_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassMultiviewCreateInfo & + setCorrelationMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT + { + correlationMaskCount = static_cast( correlationMasks_.size() ); + pCorrelationMasks = correlationMasks_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineViewportShadingRateImageStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassMultiviewCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -75429,246 +116385,264 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + const uint32_t * const &, + uint32_t const &, + const int32_t * const &, + uint32_t const &, + const uint32_t * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shadingRateImageEnable, viewportCount, pShadingRatePalettes ); + return std::tie( sType, pNext, subpassCount, pViewMasks, dependencyCount, pViewOffsets, correlationMaskCount, pCorrelationMasks ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const & ) const = default; + auto operator<=>( RenderPassMultiviewCreateInfo const & ) const = default; #else - bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shadingRateImageEnable == rhs.shadingRateImageEnable ) && - ( viewportCount == rhs.viewportCount ) && ( pShadingRatePalettes == rhs.pShadingRatePalettes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassCount == rhs.subpassCount ) && ( pViewMasks == rhs.pViewMasks ) && + ( dependencyCount == rhs.dependencyCount ) && ( pViewOffsets == rhs.pViewOffsets ) && ( correlationMaskCount == rhs.correlationMaskCount ) && + ( pCorrelationMasks == rhs.pCorrelationMasks ); # endif } - bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {}; - uint32_t viewportCount = {}; - const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo; + const void * pNext = {}; + uint32_t subpassCount = {}; + const uint32_t * pViewMasks = {}; + uint32_t dependencyCount = {}; + const int32_t * pViewOffsets = {}; + uint32_t correlationMaskCount = {}; + const uint32_t * pCorrelationMasks = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineViewportShadingRateImageStateCreateInfoNV; + using Type = RenderPassMultiviewCreateInfo; }; - struct ViewportSwizzleNV + using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; + + struct SubpassSampleLocationsEXT { - using NativeType = VkViewportSwizzleNV; + using NativeType = VkSubpassSampleLocationsEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( - VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, - VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, - VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, - VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) - , z( z_ ) - , w( w_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( uint32_t subpassIndex_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : subpassIndex{ subpassIndex_ } + , sampleLocationsInfo{ sampleLocationsInfo_ } { } - VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT : ViewportSwizzleNV( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ViewportSwizzleNV & operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ViewportSwizzleNV & operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassSampleLocationsEXT( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setX( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT - { - x = x_; - return *this; - } + SubpassSampleLocationsEXT & operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setY( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT + SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - y = y_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setZ( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & setSubpassIndex( uint32_t subpassIndex_ ) VULKAN_HPP_NOEXCEPT { - z = z_; + subpassIndex = subpassIndex_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & setW( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & + setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT { - w = w_; + sampleLocationsInfo = sampleLocationsInfo_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkViewportSwizzleNV const &() const VULKAN_HPP_NOEXCEPT + operator VkSubpassSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT + operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( x, y, z, w ); + return std::tie( subpassIndex, sampleLocationsInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ViewportSwizzleNV const & ) const = default; + auto operator<=>( SubpassSampleLocationsEXT const & ) const = default; #else - bool operator==( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ) && ( w == rhs.w ); + return ( subpassIndex == rhs.subpassIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); # endif } - bool operator!=( ViewportSwizzleNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; - VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; - VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; - VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + uint32_t subpassIndex = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; }; - struct PipelineViewportSwizzleStateCreateInfoNV + struct RenderPassSampleLocationsBeginInfoEXT { - using NativeType = VkPipelineViewportSwizzleStateCreateInfoNV; + using NativeType = VkRenderPassSampleLocationsBeginInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, - uint32_t viewportCount_ = {}, - const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , viewportCount( viewportCount_ ) - , pViewportSwizzles( pViewportSwizzles_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ = {}, + uint32_t postSubpassSampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , attachmentInitialSampleLocationsCount{ attachmentInitialSampleLocationsCount_ } + , pAttachmentInitialSampleLocations{ pAttachmentInitialSampleLocations_ } + , postSubpassSampleLocationsCount{ postSubpassSampleLocationsCount_ } + , pPostSubpassSampleLocations{ pPostSubpassSampleLocations_ } { } - VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineViewportSwizzleStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassSampleLocationsBeginInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineViewportSwizzleStateCreateInfoNV( - VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportSwizzles_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), flags( flags_ ), viewportCount( static_cast( viewportSwizzles_.size() ) ), pViewportSwizzles( viewportSwizzles_.data() ) + RenderPassSampleLocationsBeginInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentInitialSampleLocations_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & postSubpassSampleLocations_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , attachmentInitialSampleLocationsCount( static_cast( attachmentInitialSampleLocations_.size() ) ) + , pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() ) + , postSubpassSampleLocationsCount( static_cast( postSubpassSampleLocations_.size() ) ) + , pPostSubpassSampleLocations( postSubpassSampleLocations_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineViewportSwizzleStateCreateInfoNV & operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderPassSampleLocationsBeginInfoEXT & operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineViewportSwizzleStateCreateInfoNV & operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassSampleLocationsBeginInfoEXT & operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & - setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & + setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & + setPAttachmentInitialSampleLocations( const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT { - viewportCount = viewportCount_; + pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & - setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentInitialSampleLocations_ ) + VULKAN_HPP_NOEXCEPT { - pViewportSwizzles = pViewportSwizzles_; + attachmentInitialSampleLocationsCount = static_cast( attachmentInitialSampleLocations_.size() ); + pAttachmentInitialSampleLocations = attachmentInitialSampleLocations_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & + setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + { + postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & + setPPostSubpassSampleLocations( const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pPostSubpassSampleLocations = pPostSubpassSampleLocations_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineViewportSwizzleStateCreateInfoNV & setViewportSwizzles( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT + RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocations( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & postSubpassSampleLocations_ ) + VULKAN_HPP_NOEXCEPT { - viewportCount = static_cast( viewportSwizzles_.size() ); - pViewportSwizzles = viewportSwizzles_.data(); + postSubpassSampleLocationsCount = static_cast( postSubpassSampleLocations_.size() ); + pPostSubpassSampleLocations = postSubpassSampleLocations_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineViewportSwizzleStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassSampleLocationsBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -75677,1511 +116651,1634 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, viewportCount, pViewportSwizzles ); + return std::tie( + sType, pNext, attachmentInitialSampleLocationsCount, pAttachmentInitialSampleLocations, postSubpassSampleLocationsCount, pPostSubpassSampleLocations ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const & ) const = default; + auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const & ) const = default; #else - bool operator==( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewportCount == rhs.viewportCount ) && - ( pViewportSwizzles == rhs.pViewportSwizzles ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) && + ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) && + ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations ); # endif } - bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {}; - uint32_t viewportCount = {}; - const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; + const void * pNext = {}; + uint32_t attachmentInitialSampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations = {}; + uint32_t postSubpassSampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineViewportSwizzleStateCreateInfoNV; + using Type = RenderPassSampleLocationsBeginInfoEXT; }; - struct ViewportWScalingNV + struct RenderPassStripeInfoARM { - using NativeType = VkViewportWScalingNV; + using NativeType = VkRenderPassStripeInfoARM; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {}, float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT - : xcoeff( xcoeff_ ) - , ycoeff( ycoeff_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassStripeInfoARM; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassStripeInfoARM( VULKAN_HPP_NAMESPACE::Rect2D stripeArea_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stripeArea{ stripeArea_ } { } - VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderPassStripeInfoARM( RenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT : ViewportWScalingNV( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + RenderPassStripeInfoARM( VkRenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassStripeInfoARM( *reinterpret_cast( &rhs ) ) + { + } - ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderPassStripeInfoARM & operator=( RenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassStripeInfoARM & operator=( VkRenderPassStripeInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - xcoeff = xcoeff_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeInfoARM & setStripeArea( VULKAN_HPP_NAMESPACE::Rect2D const & stripeArea_ ) VULKAN_HPP_NOEXCEPT { - ycoeff = ycoeff_; + stripeArea = stripeArea_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkViewportWScalingNV const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassStripeInfoARM const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT + operator VkRenderPassStripeInfoARM &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( xcoeff, ycoeff ); + return std::tie( sType, pNext, stripeArea ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ViewportWScalingNV const & ) const = default; + auto operator<=>( RenderPassStripeInfoARM const & ) const = default; #else - bool operator==( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassStripeInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( xcoeff == rhs.xcoeff ) && ( ycoeff == rhs.ycoeff ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stripeArea == rhs.stripeArea ); # endif } - bool operator!=( ViewportWScalingNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassStripeInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - float xcoeff = {}; - float ycoeff = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassStripeInfoARM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Rect2D stripeArea = {}; }; - struct PipelineViewportWScalingStateCreateInfoNV + template <> + struct CppType { - using NativeType = VkPipelineViewportWScalingStateCreateInfoNV; + using Type = RenderPassStripeInfoARM; + }; + + struct RenderPassStripeBeginInfoARM + { + using NativeType = VkRenderPassStripeBeginInfoARM; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassStripeBeginInfoARM; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, - uint32_t viewportCount_ = {}, - const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , viewportWScalingEnable( viewportWScalingEnable_ ) - , viewportCount( viewportCount_ ) - , pViewportWScalings( pViewportWScalings_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassStripeBeginInfoARM( uint32_t stripeInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM * pStripeInfos_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stripeInfoCount{ stripeInfoCount_ } + , pStripeInfos{ pStripeInfos_ } { } - VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderPassStripeBeginInfoARM( RenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineViewportWScalingStateCreateInfoNV( *reinterpret_cast( &rhs ) ) + RenderPassStripeBeginInfoARM( VkRenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassStripeBeginInfoARM( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineViewportWScalingStateCreateInfoNV( - VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportWScalings_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , viewportWScalingEnable( viewportWScalingEnable_ ) - , viewportCount( static_cast( viewportWScalings_.size() ) ) - , pViewportWScalings( viewportWScalings_.data() ) + RenderPassStripeBeginInfoARM( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stripeInfos_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), stripeInfoCount( static_cast( stripeInfos_.size() ) ), pStripeInfos( stripeInfos_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineViewportWScalingStateCreateInfoNV & operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderPassStripeBeginInfoARM & operator=( RenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PipelineViewportWScalingStateCreateInfoNV & operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassStripeBeginInfoARM & operator=( VkRenderPassStripeBeginInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeBeginInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & - setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT - { - viewportWScalingEnable = viewportWScalingEnable_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeBeginInfoARM & setStripeInfoCount( uint32_t stripeInfoCount_ ) VULKAN_HPP_NOEXCEPT { - viewportCount = viewportCount_; + stripeInfoCount = stripeInfoCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & - setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeBeginInfoARM & + setPStripeInfos( const VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM * pStripeInfos_ ) VULKAN_HPP_NOEXCEPT { - pViewportWScalings = pViewportWScalings_; + pStripeInfos = pStripeInfos_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineViewportWScalingStateCreateInfoNV & setViewportWScalings( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewportWScalings_ ) VULKAN_HPP_NOEXCEPT + RenderPassStripeBeginInfoARM & setStripeInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stripeInfos_ ) VULKAN_HPP_NOEXCEPT { - viewportCount = static_cast( viewportWScalings_.size() ); - pViewportWScalings = viewportWScalings_.data(); + stripeInfoCount = static_cast( stripeInfos_.size() ); + pStripeInfos = stripeInfos_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPipelineViewportWScalingStateCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassStripeBeginInfoARM const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkRenderPassStripeBeginInfoARM &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std:: + tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, viewportWScalingEnable, viewportCount, pViewportWScalings ); + return std::tie( sType, pNext, stripeInfoCount, pStripeInfos ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const & ) const = default; + auto operator<=>( RenderPassStripeBeginInfoARM const & ) const = default; #else - bool operator==( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassStripeBeginInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportWScalingEnable == rhs.viewportWScalingEnable ) && - ( viewportCount == rhs.viewportCount ) && ( pViewportWScalings == rhs.pViewportWScalings ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stripeInfoCount == rhs.stripeInfoCount ) && ( pStripeInfos == rhs.pStripeInfos ); # endif } - bool operator!=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassStripeBeginInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {}; - uint32_t viewportCount = {}; - const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassStripeBeginInfoARM; + const void * pNext = {}; + uint32_t stripeInfoCount = {}; + const VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM * pStripeInfos = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineViewportWScalingStateCreateInfoNV; + using Type = RenderPassStripeBeginInfoARM; }; -#if defined( VK_USE_PLATFORM_GGP ) - struct PresentFrameTokenGGP + struct SemaphoreSubmitInfo { - using NativeType = VkPresentFrameTokenGGP; + using NativeType = VkSemaphoreSubmitInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentFrameTokenGGP; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSubmitInfo; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , frameToken( frameToken_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + uint64_t value_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ = {}, + uint32_t deviceIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , value{ value_ } + , stageMask{ stageMask_ } + , deviceIndex{ deviceIndex_ } { } - VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT - : PresentFrameTokenGGP( *reinterpret_cast( &rhs ) ) + SemaphoreSubmitInfo( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreSubmitInfo( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PresentFrameTokenGGP & operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SemaphoreSubmitInfo & operator=( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PresentFrameTokenGGP & operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT + SemaphoreSubmitInfo & operator=( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & setFrameToken( GgpFrameToken frameToken_ ) VULKAN_HPP_NOEXCEPT - { - frameToken = frameToken_; - return *this; - } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkPresentFrameTokenGGP const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, frameToken ); - } -# endif - -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT - { - if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) - return cmp; - if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) - return cmp; - if ( auto cmp = memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ); cmp != 0 ) - return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - - return std::strong_ordering::equivalent; - } -# endif - - bool operator==( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 ); - } - - bool operator!=( PresentFrameTokenGGP const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP; - const void * pNext = {}; - GgpFrameToken frameToken = {}; - }; - - template <> - struct CppType - { - using Type = PresentFrameTokenGGP; - }; -#endif /*VK_USE_PLATFORM_GGP*/ - - struct PresentIdKHR - { - using NativeType = VkPresentIdKHR; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentIdKHR; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PresentIdKHR( uint32_t swapchainCount_ = {}, const uint64_t * pPresentIds_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchainCount( swapchainCount_ ) - , pPresentIds( pPresentIds_ ) - { - } - - VULKAN_HPP_CONSTEXPR PresentIdKHR( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PresentIdKHR( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentIdKHR( *reinterpret_cast( &rhs ) ) {} - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PresentIdKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentIds_, const void * pNext_ = nullptr ) - : pNext( pNext_ ), swapchainCount( static_cast( presentIds_.size() ) ), pPresentIds( presentIds_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PresentIdKHR & operator=( PresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PresentIdKHR & operator=( VkPresentIdKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + semaphore = semaphore_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT { - swapchainCount = swapchainCount_; + value = value_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PresentIdKHR & setPPresentIds( const uint64_t * pPresentIds_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ ) VULKAN_HPP_NOEXCEPT { - pPresentIds = pPresentIds_; + stageMask = stageMask_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PresentIdKHR & setPresentIds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentIds_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT { - swapchainCount = static_cast( presentIds_.size() ); - pPresentIds = presentIds_.data(); + deviceIndex = deviceIndex_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPresentIdKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPresentIdKHR &() VULKAN_HPP_NOEXCEPT + operator VkSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, swapchainCount, pPresentIds ); + return std::tie( sType, pNext, semaphore, value, stageMask, deviceIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PresentIdKHR const & ) const = default; + auto operator<=>( SemaphoreSubmitInfo const & ) const = default; #else - bool operator==( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pPresentIds == rhs.pPresentIds ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( value == rhs.value ) && ( stageMask == rhs.stageMask ) && + ( deviceIndex == rhs.deviceIndex ); # endif } - bool operator!=( PresentIdKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentIdKHR; - const void * pNext = {}; - uint32_t swapchainCount = {}; - const uint64_t * pPresentIds = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSubmitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + uint64_t value = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask = {}; + uint32_t deviceIndex = {}; }; template <> - struct CppType + struct CppType { - using Type = PresentIdKHR; + using Type = SemaphoreSubmitInfo; }; - struct PresentInfoKHR + using SemaphoreSubmitInfoKHR = SemaphoreSubmitInfo; + + struct RenderPassStripeSubmitInfoARM { - using NativeType = VkPresentInfoKHR; + using NativeType = VkRenderPassStripeSubmitInfoARM; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassStripeSubmitInfoARM; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, - uint32_t swapchainCount_ = {}, - const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ = {}, - const uint32_t * pImageIndices_ = {}, - VULKAN_HPP_NAMESPACE::Result * pResults_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphores( pWaitSemaphores_ ) - , swapchainCount( swapchainCount_ ) - , pSwapchains( pSwapchains_ ) - , pImageIndices( pImageIndices_ ) - , pResults( pResults_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassStripeSubmitInfoARM( uint32_t stripeSemaphoreInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pStripeSemaphoreInfos_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stripeSemaphoreInfoCount{ stripeSemaphoreInfoCount_ } + , pStripeSemaphoreInfos{ pStripeSemaphoreInfos_ } { } - VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderPassStripeSubmitInfoARM( RenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentInfoKHR( *reinterpret_cast( &rhs ) ) {} + RenderPassStripeSubmitInfoARM( VkRenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassStripeSubmitInfoARM( *reinterpret_cast( &rhs ) ) + { + } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & swapchains_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & results_ = {}, - const void * pNext_ = nullptr ) + RenderPassStripeSubmitInfoARM( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stripeSemaphoreInfos_, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) - , pWaitSemaphores( waitSemaphores_.data() ) - , swapchainCount( static_cast( swapchains_.size() ) ) - , pSwapchains( swapchains_.data() ) - , pImageIndices( imageIndices_.data() ) - , pResults( results_.data() ) + , stripeSemaphoreInfoCount( static_cast( stripeSemaphoreInfos_.size() ) ) + , pStripeSemaphoreInfos( stripeSemaphoreInfos_.data() ) { -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( swapchains_.size() == imageIndices_.size() ); - VULKAN_HPP_ASSERT( results_.empty() || ( swapchains_.size() == results_.size() ) ); - VULKAN_HPP_ASSERT( results_.empty() || ( imageIndices_.size() == results_.size() ) ); -# else - if ( swapchains_.size() != imageIndices_.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" ); - } - if ( !results_.empty() && ( swapchains_.size() != results_.size() ) ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" ); - } - if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderPassStripeSubmitInfoARM & operator=( RenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassStripeSubmitInfoARM & operator=( VkRenderPassStripeSubmitInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeSubmitInfoARM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeSubmitInfoARM & setStripeSemaphoreInfoCount( uint32_t stripeSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT { - waitSemaphoreCount = waitSemaphoreCount_; + stripeSemaphoreInfoCount = stripeSemaphoreInfoCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassStripeSubmitInfoARM & + setPStripeSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pStripeSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT { - pWaitSemaphores = pWaitSemaphores_; + pStripeSemaphoreInfos = pStripeSemaphoreInfos_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PresentInfoKHR & - setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT + RenderPassStripeSubmitInfoARM & setStripeSemaphoreInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stripeSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT { - waitSemaphoreCount = static_cast( waitSemaphores_.size() ); - pWaitSemaphores = waitSemaphores_.data(); + stripeSemaphoreInfoCount = static_cast( stripeSemaphoreInfos_.size() ); + pStripeSemaphoreInfos = stripeSemaphoreInfos_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + operator VkRenderPassStripeSubmitInfoARM const &() const VULKAN_HPP_NOEXCEPT { - swapchainCount = swapchainCount_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains_ ) VULKAN_HPP_NOEXCEPT + operator VkRenderPassStripeSubmitInfoARM &() VULKAN_HPP_NOEXCEPT { - pSwapchains = pSwapchains_; - return *this; + return *reinterpret_cast( this ); } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PresentInfoKHR & - setSwapchains( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & swapchains_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - swapchainCount = static_cast( swapchains_.size() ); - pSwapchains = swapchains_.data(); - return *this; + return std::tie( sType, pNext, stripeSemaphoreInfoCount, pStripeSemaphoreInfos ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif - VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPImageIndices( const uint32_t * pImageIndices_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderPassStripeSubmitInfoARM const & ) const = default; +#else + bool operator==( RenderPassStripeSubmitInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { - pImageIndices = pImageIndices_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stripeSemaphoreInfoCount == rhs.stripeSemaphoreInfoCount ) && + ( pStripeSemaphoreInfos == rhs.pStripeSemaphoreInfos ); +# endif } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PresentInfoKHR & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageIndices_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassStripeSubmitInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { - swapchainCount = static_cast( imageIndices_.size() ); - pImageIndices = imageIndices_.data(); - return *this; + return !operator==( rhs ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif - VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result * pResults_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassStripeSubmitInfoARM; + const void * pNext = {}; + uint32_t stripeSemaphoreInfoCount = {}; + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pStripeSemaphoreInfos = {}; + }; + + template <> + struct CppType + { + using Type = RenderPassStripeSubmitInfoARM; + }; + + struct RenderPassSubpassFeedbackInfoEXT + { + using NativeType = VkRenderPassSubpassFeedbackInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 + RenderPassSubpassFeedbackInfoEXT( VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus_ = VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT::eMerged, + std::array const & description_ = {}, + uint32_t postMergeIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : subpassMergeStatus{ subpassMergeStatus_ } + , description{ description_ } + , postMergeIndex{ postMergeIndex_ } { - pResults = pResults_; - return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PresentInfoKHR & setResults( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & results_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackInfoEXT( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderPassSubpassFeedbackInfoEXT( VkRenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassSubpassFeedbackInfoEXT( *reinterpret_cast( &rhs ) ) { - swapchainCount = static_cast( results_.size() ); - pResults = results_.data(); + } + + RenderPassSubpassFeedbackInfoEXT & operator=( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassSubpassFeedbackInfoEXT & operator=( VkRenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassSubpassFeedbackInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkRenderPassSubpassFeedbackInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std:: + tuple const &, uint32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, swapchainCount, pSwapchains, pImageIndices, pResults ); + return std::tie( subpassMergeStatus, description, postMergeIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PresentInfoKHR const & ) const = default; -#else - bool operator==( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && - ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( swapchainCount == rhs.swapchainCount ) && ( pSwapchains == rhs.pSwapchains ) && - ( pImageIndices == rhs.pImageIndices ) && ( pResults == rhs.pResults ); -# endif + if ( auto cmp = subpassMergeStatus <=> rhs.subpassMergeStatus; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = postMergeIndex <=> rhs.postMergeIndex; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; } +#endif - bool operator!=( PresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( subpassMergeStatus == rhs.subpassMergeStatus ) && ( strcmp( description, rhs.description ) == 0 ) && ( postMergeIndex == rhs.postMergeIndex ); + } + + bool operator!=( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR; - const void * pNext = {}; - uint32_t waitSemaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; - uint32_t swapchainCount = {}; - const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains = {}; - const uint32_t * pImageIndices = {}; - VULKAN_HPP_NAMESPACE::Result * pResults = {}; + VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus = VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT::eMerged; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; + uint32_t postMergeIndex = {}; }; - template <> - struct CppType + struct RenderPassSubpassFeedbackCreateInfoEXT { - using Type = PresentInfoKHR; - }; + using NativeType = VkRenderPassSubpassFeedbackCreateInfoEXT; - struct RectLayerKHR - { - using NativeType = VkRectLayerKHR; + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSubpassFeedbackCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT - : offset( offset_ ) - , extent( extent_ ) - , layer( layer_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pSubpassFeedback{ pSubpassFeedback_ } { } - VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT : RectLayerKHR( *reinterpret_cast( &rhs ) ) {} - - explicit RectLayerKHR( Rect2D const & rect2D, uint32_t layer_ = {} ) : offset( rect2D.offset ), extent( rect2D.extent ), layer( layer_ ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassSubpassFeedbackCreateInfoEXT( VkRenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassSubpassFeedbackCreateInfoEXT( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT + RenderPassSubpassFeedbackCreateInfoEXT & operator=( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassSubpassFeedbackCreateInfoEXT & operator=( VkRenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - offset = offset_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - extent = extent_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & setLayer( uint32_t layer_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT & + setPSubpassFeedback( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback_ ) VULKAN_HPP_NOEXCEPT { - layer = layer_; + pSubpassFeedback = pSubpassFeedback_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkRectLayerKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassSubpassFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT + operator VkRenderPassSubpassFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( offset, extent, layer ); + return std::tie( sType, pNext, pSubpassFeedback ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RectLayerKHR const & ) const = default; + auto operator<=>( RenderPassSubpassFeedbackCreateInfoEXT const & ) const = default; #else - bool operator==( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( layer == rhs.layer ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pSubpassFeedback == rhs.pSubpassFeedback ); # endif } - bool operator!=( RectLayerKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Offset2D offset = {}; - VULKAN_HPP_NAMESPACE::Extent2D extent = {}; - uint32_t layer = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSubpassFeedbackCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback = {}; }; - struct PresentRegionKHR + template <> + struct CppType { - using NativeType = VkPresentRegionKHR; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PresentRegionKHR( uint32_t rectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT - : rectangleCount( rectangleCount_ ) - , pRectangles( pRectangles_ ) - { - } + using Type = RenderPassSubpassFeedbackCreateInfoEXT; + }; - VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + struct RenderPassTransformBeginInfoQCOM + { + using NativeType = VkRenderPassTransformBeginInfoQCOM; - PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentRegionKHR( *reinterpret_cast( &rhs ) ) {} + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PresentRegionKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & rectangles_ ) - : rectangleCount( static_cast( rectangles_.size() ) ), pRectangles( rectangles_.data() ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , transform{ transform_ } { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + RenderPassTransformBeginInfoQCOM( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderPassTransformBeginInfoQCOM( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setRectangleCount( uint32_t rectangleCount_ ) VULKAN_HPP_NOEXCEPT + RenderPassTransformBeginInfoQCOM & operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - rectangleCount = rectangleCount_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pRectangles = pRectangles_; + pNext = pNext_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PresentRegionKHR & - setRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & rectangles_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT { - rectangleCount = static_cast( rectangles_.size() ); - pRectangles = rectangles_.data(); + transform = transform_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPresentRegionKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderPassTransformBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT + operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( rectangleCount, pRectangles ); + return std::tie( sType, pNext, transform ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PresentRegionKHR const & ) const = default; + auto operator<=>( RenderPassTransformBeginInfoQCOM const & ) const = default; #else - bool operator==( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( rectangleCount == rhs.rectangleCount ) && ( pRectangles == rhs.pRectangles ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ); # endif } - bool operator!=( PresentRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t rectangleCount = {}; - const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; }; - struct PresentRegionsKHR + template <> + struct CppType { - using NativeType = VkPresentRegionsKHR; + using Type = RenderPassTransformBeginInfoQCOM; + }; + + struct RenderingAreaInfo + { + using NativeType = VkRenderingAreaInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAreaInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PresentRegionsKHR( uint32_t swapchainCount_ = {}, - const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchainCount( swapchainCount_ ) - , pRegions( pRegions_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderingAreaInfo( uint32_t viewMask_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , viewMask{ viewMask_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentFormats{ pColorAttachmentFormats_ } + , depthAttachmentFormat{ depthAttachmentFormat_ } + , stencilAttachmentFormat{ stencilAttachmentFormat_ } { } - VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderingAreaInfo( RenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PresentRegionsKHR( *reinterpret_cast( &rhs ) ) {} + RenderingAreaInfo( VkRenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderingAreaInfo( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PresentRegionsKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), swapchainCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) + RenderingAreaInfo( uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( static_cast( colorAttachmentFormats_.size() ) ) + , pColorAttachmentFormats( colorAttachmentFormats_.data() ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PresentRegionsKHR & operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderingAreaInfo & operator=( RenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + RenderingAreaInfo & operator=( VkRenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT { - swapchainCount = swapchainCount_; + viewMask = viewMask_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT { - pRegions = pRegions_; + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachmentFormats = pColorAttachmentFormats_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PresentRegionsKHR & - setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + RenderingAreaInfo & setColorAttachmentFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT { - swapchainCount = static_cast( regions_.size() ); - pRegions = regions_.data(); + colorAttachmentCount = static_cast( colorAttachmentFormats_.size() ); + pColorAttachmentFormats = colorAttachmentFormats_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPresentRegionsKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + depthAttachmentFormat = depthAttachmentFormat_; + return *this; } - operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + stencilAttachmentFormat = stencilAttachmentFormat_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderingAreaInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingAreaInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, swapchainCount, pRegions ); + return std::tie( sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PresentRegionsKHR const & ) const = default; + auto operator<=>( RenderingAreaInfo const & ) const = default; #else - bool operator==( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderingAreaInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pRegions == rhs.pRegions ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && + ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ); # endif } - bool operator!=( PresentRegionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderingAreaInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR; - const void * pNext = {}; - uint32_t swapchainCount = {}; - const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAreaInfo; + const void * pNext = {}; + uint32_t viewMask = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {}; + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; }; template <> - struct CppType + struct CppType { - using Type = PresentRegionsKHR; + using Type = RenderingAreaInfo; }; - struct PresentTimeGOOGLE + using RenderingAreaInfoKHR = RenderingAreaInfo; + + struct RenderingAttachmentInfo { - using NativeType = VkPresentTimeGOOGLE; + using NativeType = VkRenderingAttachmentInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {} ) VULKAN_HPP_NOEXCEPT - : presentID( presentID_ ) - , desiredPresentTime( desiredPresentTime_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAttachmentInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageView{ imageView_ } + , imageLayout{ imageLayout_ } + , resolveMode{ resolveMode_ } + , resolveImageView{ resolveImageView_ } + , resolveImageLayout{ resolveImageLayout_ } + , loadOp{ loadOp_ } + , storeOp{ storeOp_ } + , clearValue{ clearValue_ } { } - VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT : PresentTimeGOOGLE( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + RenderingAttachmentInfo( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingAttachmentInfo( *reinterpret_cast( &rhs ) ) + { + } - PresentTimeGOOGLE & operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderingAttachmentInfo & operator=( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PresentTimeGOOGLE & operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + RenderingAttachmentInfo & operator=( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setPresentID( uint32_t presentID_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - presentID = presentID_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & setDesiredPresentTime( uint64_t desiredPresentTime_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT { - desiredPresentTime = desiredPresentTime_; + imageView = imageView_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPresentTimeGOOGLE const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + imageLayout = imageLayout_; + return *this; } - operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + resolveMode = resolveMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageView( VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ ) VULKAN_HPP_NOEXCEPT + { + resolveImageView = resolveImageView_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + resolveImageLayout = resolveImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT + { + loadOp = loadOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT + { + storeOp = storeOp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT + { + clearValue = clearValue_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderingAttachmentInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingAttachmentInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( presentID, desiredPresentTime ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PresentTimeGOOGLE const & ) const = default; -#else - bool operator==( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ); -# endif - } - - bool operator!=( PresentTimeGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); + return std::tie( sType, pNext, imageView, imageLayout, resolveMode, resolveImageView, resolveImageLayout, loadOp, storeOp, clearValue ); } #endif public: - uint32_t presentID = {}; - uint64_t desiredPresentTime = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAttachmentInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; + VULKAN_HPP_NAMESPACE::ImageView resolveImageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; }; - struct PresentTimesInfoGOOGLE + template <> + struct CppType { - using NativeType = VkPresentTimesInfoGOOGLE; + using Type = RenderingAttachmentInfo; + }; + + using RenderingAttachmentInfoKHR = RenderingAttachmentInfo; + + struct RenderingAttachmentLocationInfo + { + using NativeType = VkRenderingAttachmentLocationInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentTimesInfoGOOGLE; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAttachmentLocationInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {}, - const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchainCount( swapchainCount_ ) - , pTimes( pTimes_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderingAttachmentLocationInfo( uint32_t colorAttachmentCount_ = {}, + const uint32_t * pColorAttachmentLocations_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentLocations{ pColorAttachmentLocations_ } { } - VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderingAttachmentLocationInfo( RenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT - : PresentTimesInfoGOOGLE( *reinterpret_cast( &rhs ) ) + RenderingAttachmentLocationInfo( VkRenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingAttachmentLocationInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PresentTimesInfoGOOGLE( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & times_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), swapchainCount( static_cast( times_.size() ) ), pTimes( times_.data() ) + RenderingAttachmentLocationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentLocations_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , colorAttachmentCount( static_cast( colorAttachmentLocations_.size() ) ) + , pColorAttachmentLocations( colorAttachmentLocations_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PresentTimesInfoGOOGLE & operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderingAttachmentLocationInfo & operator=( RenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PresentTimesInfoGOOGLE & operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + RenderingAttachmentLocationInfo & operator=( VkRenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT { - swapchainCount = swapchainCount_; + colorAttachmentCount = colorAttachmentCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfo & setPColorAttachmentLocations( const uint32_t * pColorAttachmentLocations_ ) VULKAN_HPP_NOEXCEPT { - pTimes = pTimes_; + pColorAttachmentLocations = pColorAttachmentLocations_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PresentTimesInfoGOOGLE & - setTimes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & times_ ) VULKAN_HPP_NOEXCEPT + RenderingAttachmentLocationInfo & + setColorAttachmentLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentLocations_ ) VULKAN_HPP_NOEXCEPT { - swapchainCount = static_cast( times_.size() ); - pTimes = times_.data(); + colorAttachmentCount = static_cast( colorAttachmentLocations_.size() ); + pColorAttachmentLocations = colorAttachmentLocations_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPresentTimesInfoGOOGLE const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderingAttachmentLocationInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT + operator VkRenderingAttachmentLocationInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, swapchainCount, pTimes ); + return std::tie( sType, pNext, colorAttachmentCount, pColorAttachmentLocations ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PresentTimesInfoGOOGLE const & ) const = default; + auto operator<=>( RenderingAttachmentLocationInfo const & ) const = default; #else - bool operator==( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderingAttachmentLocationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pTimes == rhs.pTimes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachmentLocations == rhs.pColorAttachmentLocations ); # endif } - bool operator!=( PresentTimesInfoGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderingAttachmentLocationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE; - const void * pNext = {}; - uint32_t swapchainCount = {}; - const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAttachmentLocationInfo; + const void * pNext = {}; + uint32_t colorAttachmentCount = {}; + const uint32_t * pColorAttachmentLocations = {}; }; template <> - struct CppType + struct CppType { - using Type = PresentTimesInfoGOOGLE; + using Type = RenderingAttachmentLocationInfo; }; - struct PrivateDataSlotCreateInfo + using RenderingAttachmentLocationInfoKHR = RenderingAttachmentLocationInfo; + + struct RenderingFragmentDensityMapAttachmentInfoEXT { - using NativeType = VkPrivateDataSlotCreateInfo; + using NativeType = VkRenderingFragmentDensityMapAttachmentInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RenderingFragmentDensityMapAttachmentInfoEXT( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageView{ imageView_ } + , imageLayout{ imageLayout_ } { } - VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PrivateDataSlotCreateInfo( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : PrivateDataSlotCreateInfo( *reinterpret_cast( &rhs ) ) + RenderingFragmentDensityMapAttachmentInfoEXT( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingFragmentDensityMapAttachmentInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PrivateDataSlotCreateInfo & operator=( PrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderingFragmentDensityMapAttachmentInfoEXT & operator=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - PrivateDataSlotCreateInfo & operator=( VkPrivateDataSlotCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + RenderingFragmentDensityMapAttachmentInfoEXT & operator=( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + imageView = imageView_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPrivateDataSlotCreateInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + imageLayout = imageLayout_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkPrivateDataSlotCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkRenderingFragmentDensityMapAttachmentInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkRenderingFragmentDensityMapAttachmentInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags ); + return std::tie( sType, pNext, imageView, imageLayout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PrivateDataSlotCreateInfo const & ) const = default; + auto operator<=>( RenderingFragmentDensityMapAttachmentInfoEXT const & ) const = default; #else - bool operator==( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ); # endif } - bool operator!=( PrivateDataSlotCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePrivateDataSlotCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; }; template <> - struct CppType + struct CppType { - using Type = PrivateDataSlotCreateInfo; + using Type = RenderingFragmentDensityMapAttachmentInfoEXT; }; - using PrivateDataSlotCreateInfoEXT = PrivateDataSlotCreateInfo; - struct ProtectedSubmitInfo + struct RenderingFragmentShadingRateAttachmentInfoKHR { - using NativeType = VkProtectedSubmitInfo; + using NativeType = VkRenderingFragmentShadingRateAttachmentInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , protectedSubmit( protectedSubmit_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + RenderingFragmentShadingRateAttachmentInfoKHR( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageView{ imageView_ } + , imageLayout{ imageLayout_ } + , shadingRateAttachmentTexelSize{ shadingRateAttachmentTexelSize_ } { } - VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + RenderingFragmentShadingRateAttachmentInfoKHR( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ProtectedSubmitInfo( *reinterpret_cast( &rhs ) ) + RenderingFragmentShadingRateAttachmentInfoKHR( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingFragmentShadingRateAttachmentInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ProtectedSubmitInfo & operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderingFragmentShadingRateAttachmentInfoKHR & operator=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + RenderingFragmentShadingRateAttachmentInfoKHR & operator=( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT { - protectedSubmit = protectedSubmit_; + imageView = imageView_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkProtectedSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + imageLayout = imageLayout_; + return *this; } - operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & + setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderingFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, protectedSubmit ); + return std::tie( sType, pNext, imageView, imageLayout, shadingRateAttachmentTexelSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ProtectedSubmitInfo const & ) const = default; + auto operator<=>( RenderingFragmentShadingRateAttachmentInfoKHR const & ) const = default; #else - bool operator==( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedSubmit == rhs.protectedSubmit ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ) && + ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize ); # endif } - bool operator!=( ProtectedSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {}; }; template <> - struct CppType + struct CppType { - using Type = ProtectedSubmitInfo; + using Type = RenderingFragmentShadingRateAttachmentInfoKHR; }; - struct QueryPoolCreateInfo + struct RenderingInfo { - using NativeType = VkQueryPoolCreateInfo; + using NativeType = VkRenderingInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, - uint32_t queryCount_ = {}, - VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , queryType( queryType_ ) - , queryCount( queryCount_ ) - , pipelineStatistics( pipelineStatistics_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 RenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, + uint32_t layerCount_ = {}, + uint32_t viewMask_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {}, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , renderArea{ renderArea_ } + , layerCount{ layerCount_ } + , viewMask{ viewMask_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachments{ pColorAttachments_ } + , pDepthAttachment{ pDepthAttachment_ } + , pStencilAttachment{ pStencilAttachment_ } { } - VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 RenderingInfo( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : QueryPoolCreateInfo( *reinterpret_cast( &rhs ) ) + RenderingInfo( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderingInfo( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_, + uint32_t layerCount_, + uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {}, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , renderArea( renderArea_ ) + , layerCount( layerCount_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) + , pColorAttachments( colorAttachments_.data() ) + , pDepthAttachment( pDepthAttachment_ ) + , pStencilAttachment( pStencilAttachment_ ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - QueryPoolCreateInfo & operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderingInfo & operator=( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + RenderingInfo & operator=( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT { - queryType = queryType_; + renderArea = renderArea_; return *this; } - VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT { - queryCount = queryCount_; + layerCount = layerCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & - setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT { - pipelineStatistics = pipelineStatistics_; + viewMask = viewMask_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkQueryPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + colorAttachmentCount = colorAttachmentCount_; + return *this; } - operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPColorAttachments( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pColorAttachments = pColorAttachments_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingInfo & setColorAttachments( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachments_.size() ); + pColorAttachments = colorAttachments_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPDepthAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthAttachment = pDepthAttachment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingInfo & + setPStencilAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pStencilAttachment = pStencilAttachment_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkRenderingInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -77190,534 +118287,573 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + uint32_t const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * const &, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * const &, + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, queryType, queryCount, pipelineStatistics ); + return std::tie( sType, pNext, flags, renderArea, layerCount, viewMask, colorAttachmentCount, pColorAttachments, pDepthAttachment, pStencilAttachment ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( QueryPoolCreateInfo const & ) const = default; + auto operator<=>( RenderingInfo const & ) const = default; #else - bool operator==( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queryType == rhs.queryType ) && ( queryCount == rhs.queryCount ) && - ( pipelineStatistics == rhs.pipelineStatistics ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( renderArea == rhs.renderArea ) && + ( layerCount == rhs.layerCount ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachments == rhs.pColorAttachments ) && ( pDepthAttachment == rhs.pDepthAttachment ) && ( pStencilAttachment == rhs.pStencilAttachment ); # endif } - bool operator!=( QueryPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion; - uint32_t queryCount = {}; - VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderingFlags flags = {}; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + uint32_t layerCount = {}; + uint32_t viewMask = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment = {}; + const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment = {}; }; template <> - struct CppType + struct CppType { - using Type = QueryPoolCreateInfo; + using Type = RenderingInfo; }; - struct QueryPoolPerformanceCreateInfoKHR + using RenderingInfoKHR = RenderingInfo; + + struct RenderingInputAttachmentIndexInfo { - using NativeType = VkQueryPoolPerformanceCreateInfoKHR; + using NativeType = VkRenderingInputAttachmentIndexInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceCreateInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingInputAttachmentIndexInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_ = {}, - uint32_t counterIndexCount_ = {}, - const uint32_t * pCounterIndices_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , queueFamilyIndex( queueFamilyIndex_ ) - , counterIndexCount( counterIndexCount_ ) - , pCounterIndices( pCounterIndices_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderingInputAttachmentIndexInfo( uint32_t colorAttachmentCount_ = {}, + const uint32_t * pColorAttachmentInputIndices_ = {}, + const uint32_t * pDepthInputAttachmentIndex_ = {}, + const uint32_t * pStencilInputAttachmentIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentInputIndices{ pColorAttachmentInputIndices_ } + , pDepthInputAttachmentIndex{ pDepthInputAttachmentIndex_ } + , pStencilInputAttachmentIndex{ pStencilInputAttachmentIndex_ } { } - VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderingInputAttachmentIndexInfo( RenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : QueryPoolPerformanceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + RenderingInputAttachmentIndexInfo( VkRenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingInputAttachmentIndexInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & counterIndices_, - const void * pNext_ = nullptr ) + RenderingInputAttachmentIndexInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentInputIndices_, + const uint32_t * pDepthInputAttachmentIndex_ = {}, + const uint32_t * pStencilInputAttachmentIndex_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , queueFamilyIndex( queueFamilyIndex_ ) - , counterIndexCount( static_cast( counterIndices_.size() ) ) - , pCounterIndices( counterIndices_.data() ) + , colorAttachmentCount( static_cast( colorAttachmentInputIndices_.size() ) ) + , pColorAttachmentInputIndices( colorAttachmentInputIndices_.data() ) + , pDepthInputAttachmentIndex( pDepthInputAttachmentIndex_ ) + , pStencilInputAttachmentIndex( pStencilInputAttachmentIndex_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderingInputAttachmentIndexInfo & operator=( RenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + RenderingInputAttachmentIndexInfo & operator=( VkRenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT { - queueFamilyIndex = queueFamilyIndex_; + colorAttachmentCount = colorAttachmentCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & + setPColorAttachmentInputIndices( const uint32_t * pColorAttachmentInputIndices_ ) VULKAN_HPP_NOEXCEPT { - counterIndexCount = counterIndexCount_; + pColorAttachmentInputIndices = pColorAttachmentInputIndices_; return *this; } - VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t * pCounterIndices_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingInputAttachmentIndexInfo & + setColorAttachmentInputIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentInputIndices_ ) VULKAN_HPP_NOEXCEPT { - pCounterIndices = pCounterIndices_; + colorAttachmentCount = static_cast( colorAttachmentInputIndices_.size() ); + pColorAttachmentInputIndices = colorAttachmentInputIndices_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - QueryPoolPerformanceCreateInfoKHR & - setCounterIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & counterIndices_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & + setPDepthInputAttachmentIndex( const uint32_t * pDepthInputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT { - counterIndexCount = static_cast( counterIndices_.size() ); - pCounterIndices = counterIndices_.data(); + pDepthInputAttachmentIndex = pDepthInputAttachmentIndex_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkQueryPoolPerformanceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & + setPStencilInputAttachmentIndex( const uint32_t * pStencilInputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pStencilInputAttachmentIndex = pStencilInputAttachmentIndex_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkRenderingInputAttachmentIndexInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkRenderingInputAttachmentIndexInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, queueFamilyIndex, counterIndexCount, pCounterIndices ); + return std::tie( sType, pNext, colorAttachmentCount, pColorAttachmentInputIndices, pDepthInputAttachmentIndex, pStencilInputAttachmentIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( QueryPoolPerformanceCreateInfoKHR const & ) const = default; + auto operator<=>( RenderingInputAttachmentIndexInfo const & ) const = default; #else - bool operator==( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderingInputAttachmentIndexInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && - ( counterIndexCount == rhs.counterIndexCount ) && ( pCounterIndices == rhs.pCounterIndices ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachmentInputIndices == rhs.pColorAttachmentInputIndices ) && ( pDepthInputAttachmentIndex == rhs.pDepthInputAttachmentIndex ) && + ( pStencilInputAttachmentIndex == rhs.pStencilInputAttachmentIndex ); # endif } - bool operator!=( QueryPoolPerformanceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderingInputAttachmentIndexInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR; - const void * pNext = {}; - uint32_t queueFamilyIndex = {}; - uint32_t counterIndexCount = {}; - const uint32_t * pCounterIndices = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingInputAttachmentIndexInfo; + const void * pNext = {}; + uint32_t colorAttachmentCount = {}; + const uint32_t * pColorAttachmentInputIndices = {}; + const uint32_t * pDepthInputAttachmentIndex = {}; + const uint32_t * pStencilInputAttachmentIndex = {}; }; template <> - struct CppType + struct CppType { - using Type = QueryPoolPerformanceCreateInfoKHR; + using Type = RenderingInputAttachmentIndexInfo; }; - struct QueryPoolPerformanceQueryCreateInfoINTEL + using RenderingInputAttachmentIndexInfoKHR = RenderingInputAttachmentIndexInfo; + + struct ResolveImageInfo2 { - using NativeType = VkQueryPoolPerformanceQueryCreateInfoINTEL; + using NativeType = VkResolveImageInfo2; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( - VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , performanceCountersSampling( performanceCountersSampling_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ResolveImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } { } - VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ResolveImageInfo2( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT - : QueryPoolPerformanceQueryCreateInfoINTEL( *reinterpret_cast( &rhs ) ) + ResolveImageInfo2( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : ResolveImageInfo2( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ResolveImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - QueryPoolPerformanceQueryCreateInfoINTEL & operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ResolveImageInfo2 & operator=( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - QueryPoolPerformanceQueryCreateInfoINTEL & operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + ResolveImageInfo2 & operator=( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & - setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT { - performanceCountersSampling = performanceCountersSampling_; + srcImage = srcImage_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkQueryPoolPerformanceQueryCreateInfoINTEL const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, performanceCountersSampling ); + srcImageLayout = srcImageLayout_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const & ) const = default; -#else - bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performanceCountersSampling == rhs.performanceCountersSampling ); -# endif + dstImage = dstImage_; + return *this; } - bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + dstImageLayout = dstImageLayout_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual; - }; - - template <> - struct CppType - { - using Type = QueryPoolPerformanceQueryCreateInfoINTEL; - }; - using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL; - - struct QueueFamilyCheckpointProperties2NV - { - using NativeType = VkQueueFamilyCheckpointProperties2NV; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointProperties2NV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , checkpointExecutionStageMask( checkpointExecutionStageMask_ ) + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT { + regionCount = regionCount_; + return *this; } - VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - QueueFamilyCheckpointProperties2NV( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT - : QueueFamilyCheckpointProperties2NV( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ ) VULKAN_HPP_NOEXCEPT { + pRegions = pRegions_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - QueueFamilyCheckpointProperties2NV & operator=( QueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - QueueFamilyCheckpointProperties2NV & operator=( VkQueueFamilyCheckpointProperties2NV const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ResolveImageInfo2 & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkQueueFamilyCheckpointProperties2NV const &() const VULKAN_HPP_NOEXCEPT + operator VkResolveImageInfo2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkQueueFamilyCheckpointProperties2NV &() VULKAN_HPP_NOEXCEPT + operator VkResolveImageInfo2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, checkpointExecutionStageMask ); + return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( QueueFamilyCheckpointProperties2NV const & ) const = default; + auto operator<=>( ResolveImageInfo2 const & ) const = default; #else - bool operator==( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && + ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } - bool operator!=( QueueFamilyCheckpointProperties2NV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointProperties2NV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions = {}; }; template <> - struct CppType + struct CppType { - using Type = QueueFamilyCheckpointProperties2NV; + using Type = ResolveImageInfo2; }; - struct QueueFamilyCheckpointPropertiesNV + using ResolveImageInfo2KHR = ResolveImageInfo2; + + struct SamplerBlockMatchWindowCreateInfoQCOM { - using NativeType = VkQueueFamilyCheckpointPropertiesNV; + using NativeType = VkSamplerBlockMatchWindowCreateInfoQCOM; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointPropertiesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerBlockMatchWindowCreateInfoQCOM; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , checkpointExecutionStageMask( checkpointExecutionStageMask_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerBlockMatchWindowCreateInfoQCOM( + VULKAN_HPP_NAMESPACE::Extent2D windowExtent_ = {}, + VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM windowCompareMode_ = VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM::eMin, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , windowExtent{ windowExtent_ } + , windowCompareMode{ windowCompareMode_ } { } - VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SamplerBlockMatchWindowCreateInfoQCOM( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : QueueFamilyCheckpointPropertiesNV( *reinterpret_cast( &rhs ) ) + SamplerBlockMatchWindowCreateInfoQCOM( VkSamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerBlockMatchWindowCreateInfoQCOM( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - QueueFamilyCheckpointPropertiesNV & operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SamplerBlockMatchWindowCreateInfoQCOM & operator=( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerBlockMatchWindowCreateInfoQCOM & operator=( VkSamplerBlockMatchWindowCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkQueueFamilyCheckpointPropertiesNV const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerBlockMatchWindowCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerBlockMatchWindowCreateInfoQCOM & setWindowExtent( VULKAN_HPP_NAMESPACE::Extent2D const & windowExtent_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + windowExtent = windowExtent_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerBlockMatchWindowCreateInfoQCOM & + setWindowCompareMode( VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM windowCompareMode_ ) VULKAN_HPP_NOEXCEPT + { + windowCompareMode = windowCompareMode_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSamplerBlockMatchWindowCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerBlockMatchWindowCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, checkpointExecutionStageMask ); + return std::tie( sType, pNext, windowExtent, windowCompareMode ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( QueueFamilyCheckpointPropertiesNV const & ) const = default; + auto operator<=>( SamplerBlockMatchWindowCreateInfoQCOM const & ) const = default; #else - bool operator==( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( windowExtent == rhs.windowExtent ) && ( windowCompareMode == rhs.windowCompareMode ); # endif } - bool operator!=( QueueFamilyCheckpointPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerBlockMatchWindowCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerBlockMatchWindowCreateInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D windowExtent = {}; + VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM windowCompareMode = VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM::eMin; }; template <> - struct CppType + struct CppType { - using Type = QueueFamilyCheckpointPropertiesNV; + using Type = SamplerBlockMatchWindowCreateInfoQCOM; }; - struct QueueFamilyGlobalPriorityPropertiesKHR + struct SamplerBorderColorComponentMappingCreateInfoEXT { - using NativeType = VkQueueFamilyGlobalPriorityPropertiesKHR; + using NativeType = VkSamplerBorderColorComponentMappingCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyGlobalPriorityPropertiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 - QueueFamilyGlobalPriorityPropertiesKHR( uint32_t priorityCount_ = {}, - std::array const & - priorities_ = { { VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow } }, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , priorityCount( priorityCount_ ) - , priorities( priorities_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerBorderColorComponentMappingCreateInfoEXT( VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 srgb_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , components{ components_ } + , srgb{ srgb_ } { } - VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + SamplerBorderColorComponentMappingCreateInfoEXT( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - QueueFamilyGlobalPriorityPropertiesKHR( VkQueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : QueueFamilyGlobalPriorityPropertiesKHR( *reinterpret_cast( &rhs ) ) + SamplerBorderColorComponentMappingCreateInfoEXT( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerBorderColorComponentMappingCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - QueueFamilyGlobalPriorityPropertiesKHR & operator=( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SamplerBorderColorComponentMappingCreateInfoEXT & operator=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - QueueFamilyGlobalPriorityPropertiesKHR & operator=( VkQueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerBorderColorComponentMappingCreateInfoEXT & operator=( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & setPriorityCount( uint32_t priorityCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & + setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT { - priorityCount = priorityCount_; + components = components_; return *this; } - VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR & - setPriorities( std::array priorities_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setSrgb( VULKAN_HPP_NAMESPACE::Bool32 srgb_ ) VULKAN_HPP_NOEXCEPT { - priorities = priorities_; + srgb = srgb_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkQueueFamilyGlobalPriorityPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkSamplerBorderColorComponentMappingCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkQueueFamilyGlobalPriorityPropertiesKHR &() VULKAN_HPP_NOEXCEPT + operator VkSamplerBorderColorComponentMappingCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -77725,2218 +118861,2469 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple const &> + const void * const &, + VULKAN_HPP_NAMESPACE::ComponentMapping const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, priorityCount, priorities ); + return std::tie( sType, pNext, components, srgb ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( QueueFamilyGlobalPriorityPropertiesKHR const & ) const = default; + auto operator<=>( SamplerBorderColorComponentMappingCreateInfoEXT const & ) const = default; #else - bool operator==( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priorityCount == rhs.priorityCount ) && ( priorities == rhs.priorities ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( components == rhs.components ) && ( srgb == rhs.srgb ); # endif } - bool operator!=( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyGlobalPriorityPropertiesKHR; - void * pNext = {}; - uint32_t priorityCount = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D priorities = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::Bool32 srgb = {}; }; template <> - struct CppType + struct CppType { - using Type = QueueFamilyGlobalPriorityPropertiesKHR; + using Type = SamplerBorderColorComponentMappingCreateInfoEXT; }; - using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityPropertiesKHR; - struct QueueFamilyProperties + struct SamplerCaptureDescriptorDataInfoEXT { - using NativeType = VkQueueFamilyProperties; + using NativeType = VkSamplerCaptureDescriptorDataInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, - uint32_t queueCount_ = {}, - uint32_t timestampValidBits_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT - : queueFlags( queueFlags_ ) - , queueCount( queueCount_ ) - , timestampValidBits( timestampValidBits_ ) - , minImageTransferGranularity( minImageTransferGranularity_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCaptureDescriptorDataInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , sampler{ sampler_ } { } - VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SamplerCaptureDescriptorDataInfoEXT( SamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : QueueFamilyProperties( *reinterpret_cast( &rhs ) ) + SamplerCaptureDescriptorDataInfoEXT( VkSamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - QueueFamilyProperties & operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SamplerCaptureDescriptorDataInfoEXT & operator=( SamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerCaptureDescriptorDataInfoEXT & operator=( VkSamplerCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkQueueFamilyProperties const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerCaptureDescriptorDataInfoEXT & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + sampler = sampler_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSamplerCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( queueFlags, queueCount, timestampValidBits, minImageTransferGranularity ); + return std::tie( sType, pNext, sampler ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( QueueFamilyProperties const & ) const = default; + auto operator<=>( SamplerCaptureDescriptorDataInfoEXT const & ) const = default; #else - bool operator==( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SamplerCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( queueFlags == rhs.queueFlags ) && ( queueCount == rhs.queueCount ) && ( timestampValidBits == rhs.timestampValidBits ) && - ( minImageTransferGranularity == rhs.minImageTransferGranularity ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampler == rhs.sampler ); # endif } - bool operator!=( QueueFamilyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {}; - uint32_t queueCount = {}; - uint32_t timestampValidBits = {}; - VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCaptureDescriptorDataInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; }; - struct QueueFamilyProperties2 + template <> + struct CppType { - using NativeType = VkQueueFamilyProperties2; + using Type = SamplerCaptureDescriptorDataInfoEXT; + }; + + struct SamplerCreateInfo + { + using NativeType = VkSamplerCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , queueFamilyProperties( queueFamilyProperties_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + float mipLodBias_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, + float maxAnisotropy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, + VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + float minLod_ = {}, + float maxLod_ = {}, + VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, + VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , magFilter{ magFilter_ } + , minFilter{ minFilter_ } + , mipmapMode{ mipmapMode_ } + , addressModeU{ addressModeU_ } + , addressModeV{ addressModeV_ } + , addressModeW{ addressModeW_ } + , mipLodBias{ mipLodBias_ } + , anisotropyEnable{ anisotropyEnable_ } + , maxAnisotropy{ maxAnisotropy_ } + , compareEnable{ compareEnable_ } + , compareOp{ compareOp_ } + , minLod{ minLod_ } + , maxLod{ maxLod_ } + , borderColor{ borderColor_ } + , unnormalizedCoordinates{ unnormalizedCoordinates_ } { } - VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - : QueueFamilyProperties2( *reinterpret_cast( &rhs ) ) + SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SamplerCreateInfo( *reinterpret_cast( &rhs ) ) {} + + SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - QueueFamilyProperties2 & operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + pNext = pNext_; return *this; } - operator VkQueueFamilyProperties2 const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; } - operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + magFilter = magFilter_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, queueFamilyProperties ); + minFilter = minFilter_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( QueueFamilyProperties2 const & ) const = default; -#else - bool operator==( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyProperties == rhs.queueFamilyProperties ); -# endif + mipmapMode = mipmapMode_; + return *this; } - bool operator!=( QueueFamilyProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + addressModeU = addressModeU_; + return *this; } -#endif - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {}; - }; + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT + { + addressModeV = addressModeV_; + return *this; + } - template <> - struct CppType - { - using Type = QueueFamilyProperties2; - }; - using QueueFamilyProperties2KHR = QueueFamilyProperties2; + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT + { + addressModeW = addressModeW_; + return *this; + } -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct QueueFamilyQueryResultStatusPropertiesKHR - { - using NativeType = VkQueueFamilyQueryResultStatusPropertiesKHR; + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT + { + mipLodBias = mipLodBias_; + return *this; + } - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyQueryResultStatusPropertiesKHR; + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT + { + anisotropyEnable = anisotropyEnable_; + return *this; + } -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 queryResultStatusSupport_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , queryResultStatusSupport( queryResultStatusSupport_ ) + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT { + maxAnisotropy = maxAnisotropy_; + return *this; } - VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusPropertiesKHR( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT + { + compareEnable = compareEnable_; + return *this; + } - QueueFamilyQueryResultStatusPropertiesKHR( VkQueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : QueueFamilyQueryResultStatusPropertiesKHR( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT { + compareOp = compareOp_; + return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - QueueFamilyQueryResultStatusPropertiesKHR & operator=( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT + { + minLod = minLod_; + return *this; + } - QueueFamilyQueryResultStatusPropertiesKHR & operator=( VkQueueFamilyQueryResultStatusPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + maxLod = maxLod_; return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 QueueFamilyQueryResultStatusPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + borderColor = borderColor_; return *this; } - VULKAN_HPP_CONSTEXPR_14 QueueFamilyQueryResultStatusPropertiesKHR & - setQueryResultStatusSupport( VULKAN_HPP_NAMESPACE::Bool32 queryResultStatusSupport_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT { - queryResultStatusSupport = queryResultStatusSupport_; + unnormalizedCoordinates = unnormalizedCoordinates_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkQueueFamilyQueryResultStatusPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkQueueFamilyQueryResultStatusPropertiesKHR &() VULKAN_HPP_NOEXCEPT + operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, queryResultStatusSupport ); + return std::tie( sType, + pNext, + flags, + magFilter, + minFilter, + mipmapMode, + addressModeU, + addressModeV, + addressModeW, + mipLodBias, + anisotropyEnable, + maxAnisotropy, + compareEnable, + compareOp, + minLod, + maxLod, + borderColor, + unnormalizedCoordinates ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( QueueFamilyQueryResultStatusPropertiesKHR const & ) const = default; -# else - bool operator==( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerCreateInfo const & ) const = default; +#else + bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queryResultStatusSupport == rhs.queryResultStatusSupport ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( magFilter == rhs.magFilter ) && ( minFilter == rhs.minFilter ) && + ( mipmapMode == rhs.mipmapMode ) && ( addressModeU == rhs.addressModeU ) && ( addressModeV == rhs.addressModeV ) && + ( addressModeW == rhs.addressModeW ) && ( mipLodBias == rhs.mipLodBias ) && ( anisotropyEnable == rhs.anisotropyEnable ) && + ( maxAnisotropy == rhs.maxAnisotropy ) && ( compareEnable == rhs.compareEnable ) && ( compareOp == rhs.compareOp ) && ( minLod == rhs.minLod ) && + ( maxLod == rhs.maxLod ) && ( borderColor == rhs.borderColor ) && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); +# endif } - bool operator!=( QueueFamilyQueryResultStatusPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyQueryResultStatusPropertiesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 queryResultStatusSupport = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + float mipLodBias = {}; + VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {}; + float maxAnisotropy = {}; + VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {}; + VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + float minLod = {}; + float maxLod = {}; + VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack; + VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {}; }; template <> - struct CppType + struct CppType { - using Type = QueueFamilyQueryResultStatusPropertiesKHR; + using Type = SamplerCreateInfo; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct QueueFamilyVideoPropertiesKHR + struct SamplerCubicWeightsCreateInfoQCOM { - using NativeType = VkQueueFamilyVideoPropertiesKHR; + using NativeType = VkSamplerCubicWeightsCreateInfoQCOM; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyVideoPropertiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCubicWeightsCreateInfoQCOM; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR QueueFamilyVideoPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , videoCodecOperations( videoCodecOperations_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SamplerCubicWeightsCreateInfoQCOM( VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cubicWeights{ cubicWeights_ } { } - VULKAN_HPP_CONSTEXPR QueueFamilyVideoPropertiesKHR( QueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SamplerCubicWeightsCreateInfoQCOM( SamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - QueueFamilyVideoPropertiesKHR( VkQueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : QueueFamilyVideoPropertiesKHR( *reinterpret_cast( &rhs ) ) + SamplerCubicWeightsCreateInfoQCOM( VkSamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerCubicWeightsCreateInfoQCOM( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - QueueFamilyVideoPropertiesKHR & operator=( QueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SamplerCubicWeightsCreateInfoQCOM & operator=( SamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - QueueFamilyVideoPropertiesKHR & operator=( VkQueueFamilyVideoPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerCubicWeightsCreateInfoQCOM & operator=( VkSamplerCubicWeightsCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 QueueFamilyVideoPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCubicWeightsCreateInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 QueueFamilyVideoPropertiesKHR & - setVideoCodecOperations( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerCubicWeightsCreateInfoQCOM & + setCubicWeights( VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ ) VULKAN_HPP_NOEXCEPT { - videoCodecOperations = videoCodecOperations_; + cubicWeights = cubicWeights_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkQueueFamilyVideoPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkSamplerCubicWeightsCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkQueueFamilyVideoPropertiesKHR &() VULKAN_HPP_NOEXCEPT + operator VkSamplerCubicWeightsCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, videoCodecOperations ); + return std::tie( sType, pNext, cubicWeights ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( QueueFamilyVideoPropertiesKHR const & ) const = default; -# else - bool operator==( QueueFamilyVideoPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerCubicWeightsCreateInfoQCOM const & ) const = default; +#else + bool operator==( SamplerCubicWeightsCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperations == rhs.videoCodecOperations ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cubicWeights == rhs.cubicWeights ); +# endif } - bool operator!=( QueueFamilyVideoPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerCubicWeightsCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyVideoPropertiesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCubicWeightsCreateInfoQCOM; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom; }; template <> - struct CppType + struct CppType { - using Type = QueueFamilyVideoPropertiesKHR; + using Type = SamplerCubicWeightsCreateInfoQCOM; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct RayTracingShaderGroupCreateInfoKHR + struct SamplerCustomBorderColorCreateInfoEXT { - using NativeType = VkRayTracingShaderGroupCreateInfoKHR; + using NativeType = VkSamplerCustomBorderColorCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCustomBorderColorCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( - VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, - uint32_t generalShader_ = {}, - uint32_t closestHitShader_ = {}, - uint32_t anyHitShader_ = {}, - uint32_t intersectionShader_ = {}, - const void * pShaderGroupCaptureReplayHandle_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , type( type_ ) - , generalShader( generalShader_ ) - , closestHitShader( closestHitShader_ ) - , anyHitShader( anyHitShader_ ) - , intersectionShader( intersectionShader_ ) - , pShaderGroupCaptureReplayHandle( pShaderGroupCaptureReplayHandle_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , customBorderColor{ customBorderColor_ } + , format{ format_ } { } - VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : RayTracingShaderGroupCreateInfoKHR( *reinterpret_cast( &rhs ) ) + SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerCustomBorderColorCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RayTracingShaderGroupCreateInfoKHR & operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SamplerCustomBorderColorCreateInfoEXT & operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - RayTracingShaderGroupCreateInfoKHR & operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerCustomBorderColorCreateInfoEXT & operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & + setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT { - type = type_; + customBorderColor = customBorderColor_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT { - generalShader = generalShader_; + format = format_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT + operator VkSamplerCustomBorderColorCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - closestHitShader = closestHitShader_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT + operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - anyHitShader = anyHitShader_; + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, customBorderColor, format ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCustomBorderColorCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = SamplerCustomBorderColorCreateInfoEXT; + }; + + struct SamplerReductionModeCreateInfo + { + using NativeType = VkSamplerReductionModeCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SamplerReductionModeCreateInfo( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , reductionMode{ reductionMode_ } + { + } + + VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerReductionModeCreateInfo( *reinterpret_cast( &rhs ) ) + { + } + + SamplerReductionModeCreateInfo & operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - intersectionShader = intersectionShader_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & - setPShaderGroupCaptureReplayHandle( const void * pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT { - pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_; + reductionMode = reductionMode_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkRayTracingShaderGroupCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkSamplerReductionModeCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader, pShaderGroupCaptureReplayHandle ); + return std::tie( sType, pNext, reductionMode ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RayTracingShaderGroupCreateInfoKHR const & ) const = default; + auto operator<=>( SamplerReductionModeCreateInfo const & ) const = default; #else - bool operator==( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( generalShader == rhs.generalShader ) && - ( closestHitShader == rhs.closestHitShader ) && ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ) && - ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reductionMode == rhs.reductionMode ); # endif } - bool operator!=( RayTracingShaderGroupCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; - uint32_t generalShader = {}; - uint32_t closestHitShader = {}; - uint32_t anyHitShader = {}; - uint32_t intersectionShader = {}; - const void * pShaderGroupCaptureReplayHandle = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage; }; template <> - struct CppType + struct CppType { - using Type = RayTracingShaderGroupCreateInfoKHR; + using Type = SamplerReductionModeCreateInfo; }; - struct RayTracingPipelineInterfaceCreateInfoKHR + using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo; + + struct SamplerYcbcrConversionCreateInfo { - using NativeType = VkRayTracingPipelineInterfaceCreateInfoKHR; + using NativeType = VkSamplerYcbcrConversionCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( uint32_t maxPipelineRayPayloadSize_ = {}, - uint32_t maxPipelineRayHitAttributeSize_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxPipelineRayPayloadSize( maxPipelineRayPayloadSize_ ) - , maxPipelineRayHitAttributeSize( maxPipelineRayHitAttributeSize_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , format{ format_ } + , ycbcrModel{ ycbcrModel_ } + , ycbcrRange{ ycbcrRange_ } + , components{ components_ } + , xChromaOffset{ xChromaOffset_ } + , yChromaOffset{ yChromaOffset_ } + , chromaFilter{ chromaFilter_ } + , forceExplicitReconstruction{ forceExplicitReconstruction_ } { } - VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : RayTracingPipelineInterfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) + SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RayTracingPipelineInterfaceCreateInfoKHR & operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SamplerYcbcrConversionCreateInfo & operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - RayTracingPipelineInterfaceCreateInfoKHR & operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & setMaxPipelineRayPayloadSize( uint32_t maxPipelineRayPayloadSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT { - maxPipelineRayPayloadSize = maxPipelineRayPayloadSize_; + format = format_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & - setMaxPipelineRayHitAttributeSize( uint32_t maxPipelineRayHitAttributeSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & + setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT { - maxPipelineRayHitAttributeSize = maxPipelineRayHitAttributeSize_; + ycbcrModel = ycbcrModel_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkRayTracingPipelineInterfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + ycbcrRange = ycbcrRange_; + return *this; } - operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + components = components_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + xChromaOffset = xChromaOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + yChromaOffset = yChromaOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT + { + chromaFilter = chromaFilter_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & + setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT + { + forceExplicitReconstruction = forceExplicitReconstruction_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSamplerYcbcrConversionCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxPipelineRayPayloadSize, maxPipelineRayHitAttributeSize ); + return std::tie( sType, pNext, format, ycbcrModel, ycbcrRange, components, xChromaOffset, yChromaOffset, chromaFilter, forceExplicitReconstruction ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const & ) const = default; + auto operator<=>( SamplerYcbcrConversionCreateInfo const & ) const = default; #else - bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPipelineRayPayloadSize == rhs.maxPipelineRayPayloadSize ) && - ( maxPipelineRayHitAttributeSize == rhs.maxPipelineRayHitAttributeSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( ycbcrModel == rhs.ycbcrModel ) && + ( ycbcrRange == rhs.ycbcrRange ) && ( components == rhs.components ) && ( xChromaOffset == rhs.xChromaOffset ) && + ( yChromaOffset == rhs.yChromaOffset ) && ( chromaFilter == rhs.chromaFilter ) && + ( forceExplicitReconstruction == rhs.forceExplicitReconstruction ); # endif } - bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR; - const void * pNext = {}; - uint32_t maxPipelineRayPayloadSize = {}; - uint32_t maxPipelineRayHitAttributeSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {}; }; template <> - struct CppType + struct CppType { - using Type = RayTracingPipelineInterfaceCreateInfoKHR; + using Type = SamplerYcbcrConversionCreateInfo; }; - struct RayTracingPipelineCreateInfoKHR + using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; + + struct SamplerYcbcrConversionImageFormatProperties { - using NativeType = VkRayTracingPipelineCreateInfoKHR; + using NativeType = VkSamplerYcbcrConversionImageFormatProperties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionImageFormatProperties; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, - uint32_t stageCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, - uint32_t groupCount_ = {}, - const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ = {}, - uint32_t maxPipelineRayRecursionDepth_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, - const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, - int32_t basePipelineIndex_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , stageCount( stageCount_ ) - , pStages( pStages_ ) - , groupCount( groupCount_ ) - , pGroups( pGroups_ ) - , maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ) - , pLibraryInfo( pLibraryInfo_ ) - , pLibraryInterface( pLibraryInterface_ ) - , pDynamicState( pDynamicState_ ) - , layout( layout_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( uint32_t combinedImageSamplerDescriptorCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , combinedImageSamplerDescriptorCount{ combinedImageSamplerDescriptorCount_ } { } - VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : RayTracingPipelineCreateInfoKHR( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RayTracingPipelineCreateInfoKHR( - VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ = {}, - uint32_t maxPipelineRayRecursionDepth_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, - const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, - int32_t basePipelineIndex_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , stageCount( static_cast( stages_.size() ) ) - , pStages( stages_.data() ) - , groupCount( static_cast( groups_.size() ) ) - , pGroups( groups_.data() ) - , maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ) - , pLibraryInfo( pLibraryInfo_ ) - , pLibraryInterface( pLibraryInterface_ ) - , pDynamicState( pDynamicState_ ) - , layout( layout_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) + SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionImageFormatProperties( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - RayTracingPipelineCreateInfoKHR & operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + SamplerYcbcrConversionImageFormatProperties & operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionImageFormatProperties & operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + operator VkSamplerYcbcrConversionImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT { - flags = flags_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT { - stageCount = stageCount_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & - setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - pStages = pStages_; - return *this; + return std::tie( sType, pNext, combinedImageSamplerDescriptorCount ); } +#endif -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RayTracingPipelineCreateInfoKHR & - setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SamplerYcbcrConversionImageFormatProperties const & ) const = default; +#else + bool operator==( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - stageCount = static_cast( stages_.size() ); - pStages = stages_.data(); - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount ); +# endif } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - groupCount = groupCount_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & - setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups_ ) VULKAN_HPP_NOEXCEPT - { - pGroups = pGroups_; - return *this; - } + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties; + void * pNext = {}; + uint32_t combinedImageSamplerDescriptorCount = {}; + }; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RayTracingPipelineCreateInfoKHR & setGroups( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ ) VULKAN_HPP_NOEXCEPT - { - groupCount = static_cast( groups_.size() ); - pGroups = groups_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct CppType + { + using Type = SamplerYcbcrConversionImageFormatProperties; + }; - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setMaxPipelineRayRecursionDepth( uint32_t maxPipelineRayRecursionDepth_ ) VULKAN_HPP_NOEXCEPT - { - maxPipelineRayRecursionDepth = maxPipelineRayRecursionDepth_; - return *this; - } + using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & - setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT - { - pLibraryInfo = pLibraryInfo_; - return *this; - } + struct SamplerYcbcrConversionInfo + { + using NativeType = VkSamplerYcbcrConversionInfo; - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & - setPLibraryInterface( const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , conversion{ conversion_ } { - pLibraryInterface = pLibraryInterface_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & - setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionInfo( *reinterpret_cast( &rhs ) ) { - pDynamicState = pDynamicState_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - layout = layout_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - basePipelineHandle = basePipelineHandle_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT { - basePipelineIndex = basePipelineIndex_; + conversion = conversion_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkRayTracingPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkSamplerYcbcrConversionInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - flags, - stageCount, - pStages, - groupCount, - pGroups, - maxPipelineRayRecursionDepth, - pLibraryInfo, - pLibraryInterface, - pDynamicState, - layout, - basePipelineHandle, - basePipelineIndex ); + return std::tie( sType, pNext, conversion ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RayTracingPipelineCreateInfoKHR const & ) const = default; + auto operator<=>( SamplerYcbcrConversionInfo const & ) const = default; #else - bool operator==( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && - ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) && ( maxPipelineRayRecursionDepth == rhs.maxPipelineRayRecursionDepth ) && - ( pLibraryInfo == rhs.pLibraryInfo ) && ( pLibraryInterface == rhs.pLibraryInterface ) && ( pDynamicState == rhs.pDynamicState ) && - ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conversion == rhs.conversion ); # endif } - bool operator!=( RayTracingPipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; - uint32_t stageCount = {}; - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; - uint32_t groupCount = {}; - const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR * pGroups = {}; - uint32_t maxPipelineRayRecursionDepth = {}; - const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo = {}; - const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR * pLibraryInterface = {}; - const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo * pDynamicState = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; - int32_t basePipelineIndex = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {}; }; template <> - struct CppType + struct CppType { - using Type = RayTracingPipelineCreateInfoKHR; + using Type = SamplerYcbcrConversionInfo; }; - struct RayTracingShaderGroupCreateInfoNV + using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; + + struct SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM { - using NativeType = VkRayTracingShaderGroupCreateInfoNV; + using NativeType = VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( - VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, - uint32_t generalShader_ = {}, - uint32_t closestHitShader_ = {}, - uint32_t anyHitShader_ = {}, - uint32_t intersectionShader_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , type( type_ ) - , generalShader( generalShader_ ) - , closestHitShader( closestHitShader_ ) - , anyHitShader( anyHitShader_ ) - , intersectionShader( intersectionShader_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( VULKAN_HPP_NAMESPACE::Bool32 enableYDegamma_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 enableCbCrDegamma_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , enableYDegamma{ enableYDegamma_ } + , enableCbCrDegamma{ enableCbCrDegamma_ } { } - VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : RayTracingShaderGroupCreateInfoNV( *reinterpret_cast( &rhs ) ) + SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RayTracingShaderGroupCreateInfoNV & operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & operator=( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & operator=( VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT - { - generalShader = generalShader_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT - { - closestHitShader = closestHitShader_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & + setEnableYDegamma( VULKAN_HPP_NAMESPACE::Bool32 enableYDegamma_ ) VULKAN_HPP_NOEXCEPT { - anyHitShader = anyHitShader_; + enableYDegamma = enableYDegamma_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM & + setEnableCbCrDegamma( VULKAN_HPP_NAMESPACE::Bool32 enableCbCrDegamma_ ) VULKAN_HPP_NOEXCEPT { - intersectionShader = intersectionShader_; + enableCbCrDegamma = enableCbCrDegamma_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkRayTracingShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, type, generalShader, closestHitShader, anyHitShader, intersectionShader ); + return std::tie( sType, pNext, enableYDegamma, enableCbCrDegamma ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RayTracingShaderGroupCreateInfoNV const & ) const = default; + auto operator<=>( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & ) const = default; #else - bool operator==( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( generalShader == rhs.generalShader ) && - ( closestHitShader == rhs.closestHitShader ) && ( anyHitShader == rhs.anyHitShader ) && ( intersectionShader == rhs.intersectionShader ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( enableYDegamma == rhs.enableYDegamma ) && ( enableCbCrDegamma == rhs.enableCbCrDegamma ); # endif } - bool operator!=( RayTracingShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral; - uint32_t generalShader = {}; - uint32_t closestHitShader = {}; - uint32_t anyHitShader = {}; - uint32_t intersectionShader = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 enableYDegamma = {}; + VULKAN_HPP_NAMESPACE::Bool32 enableCbCrDegamma = {}; }; template <> - struct CppType + struct CppType { - using Type = RayTracingShaderGroupCreateInfoNV; + using Type = SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; }; - struct RayTracingPipelineCreateInfoNV +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + struct ScreenBufferFormatPropertiesQNX { - using NativeType = VkRayTracingPipelineCreateInfoNV; + using NativeType = VkScreenBufferFormatPropertiesQNX; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenBufferFormatPropertiesQNX; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, - uint32_t stageCount_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, - uint32_t groupCount_ = {}, - const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ = {}, - uint32_t maxRecursionDepth_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, - int32_t basePipelineIndex_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , stageCount( stageCount_ ) - , pStages( pStages_ ) - , groupCount( groupCount_ ) - , pGroups( pGroups_ ) - , maxRecursionDepth( maxRecursionDepth_ ) - , layout( layout_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ScreenBufferFormatPropertiesQNX( + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint64_t externalFormat_ = {}, + uint64_t screenUsage_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , format{ format_ } + , externalFormat{ externalFormat_ } + , screenUsage{ screenUsage_ } + , formatFeatures{ formatFeatures_ } + , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ } + , suggestedYcbcrModel{ suggestedYcbcrModel_ } + , suggestedYcbcrRange{ suggestedYcbcrRange_ } + , suggestedXChromaOffset{ suggestedXChromaOffset_ } + , suggestedYChromaOffset{ suggestedYChromaOffset_ } { } - VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ScreenBufferFormatPropertiesQNX( ScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT - : RayTracingPipelineCreateInfoNV( *reinterpret_cast( &rhs ) ) + ScreenBufferFormatPropertiesQNX( VkScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : ScreenBufferFormatPropertiesQNX( *reinterpret_cast( &rhs ) ) { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RayTracingPipelineCreateInfoNV( - VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ = {}, - uint32_t maxRecursionDepth_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, - int32_t basePipelineIndex_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , stageCount( static_cast( stages_.size() ) ) - , pStages( stages_.data() ) - , groupCount( static_cast( groups_.size() ) ) - , pGroups( groups_.data() ) - , maxRecursionDepth( maxRecursionDepth_ ) - , layout( layout_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) + ScreenBufferFormatPropertiesQNX & operator=( ScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ScreenBufferFormatPropertiesQNX & operator=( VkScreenBufferFormatPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RayTracingPipelineCreateInfoNV & operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + operator VkScreenBufferFormatPropertiesQNX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } - RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkScreenBufferFormatPropertiesQNX &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); - return *this; + return *reinterpret_cast( this ); } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return std::tie( sType, + pNext, + format, + externalFormat, + screenUsage, + formatFeatures, + samplerYcbcrConversionComponents, + suggestedYcbcrModel, + suggestedYcbcrRange, + suggestedXChromaOffset, + suggestedYChromaOffset ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ScreenBufferFormatPropertiesQNX const & ) const = default; +# else + bool operator==( ScreenBufferFormatPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) && + ( screenUsage == rhs.screenUsage ) && ( formatFeatures == rhs.formatFeatures ) && + ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && + ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && + ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); +# endif } - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( ScreenBufferFormatPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT { - flags = flags_; - return *this; + return !operator==( rhs ); } +# endif - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenBufferFormatPropertiesQNX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint64_t externalFormat = {}; + uint64_t screenUsage = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + + template <> + struct CppType + { + using Type = ScreenBufferFormatPropertiesQNX; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + struct ScreenBufferPropertiesQNX + { + using NativeType = VkScreenBufferPropertiesQNX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenBufferPropertiesQNX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ScreenBufferPropertiesQNX( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, + uint32_t memoryTypeBits_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , allocationSize{ allocationSize_ } + , memoryTypeBits{ memoryTypeBits_ } { - stageCount = stageCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & - setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ScreenBufferPropertiesQNX( ScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ScreenBufferPropertiesQNX( VkScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : ScreenBufferPropertiesQNX( *reinterpret_cast( &rhs ) ) { - pStages = pStages_; - return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RayTracingPipelineCreateInfoNV & - setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + ScreenBufferPropertiesQNX & operator=( ScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ScreenBufferPropertiesQNX & operator=( VkScreenBufferPropertiesQNX const & rhs ) VULKAN_HPP_NOEXCEPT { - stageCount = static_cast( stages_.size() ); - pStages = stages_.data(); + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + operator VkScreenBufferPropertiesQNX const &() const VULKAN_HPP_NOEXCEPT { - groupCount = groupCount_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & - setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups_ ) VULKAN_HPP_NOEXCEPT + operator VkScreenBufferPropertiesQNX &() VULKAN_HPP_NOEXCEPT { - pGroups = pGroups_; - return *this; + return *reinterpret_cast( this ); } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RayTracingPipelineCreateInfoNV & setGroups( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & groups_ ) VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - groupCount = static_cast( groups_.size() ); - pGroups = groups_.data(); + return std::tie( sType, pNext, allocationSize, memoryTypeBits ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ScreenBufferPropertiesQNX const & ) const = default; +# else + bool operator==( ScreenBufferPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeBits == rhs.memoryTypeBits ); +# endif + } + + bool operator!=( ScreenBufferPropertiesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenBufferPropertiesQNX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeBits = {}; + }; + + template <> + struct CppType + { + using Type = ScreenBufferPropertiesQNX; + }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + struct ScreenSurfaceCreateInfoQNX + { + using NativeType = VkScreenSurfaceCreateInfoQNX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenSurfaceCreateInfoQNX; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ = {}, + struct _screen_context * context_ = {}, + struct _screen_window * window_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , context{ context_ } + , window{ window_ } + { + } + + VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ScreenSurfaceCreateInfoQNX( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT + : ScreenSurfaceCreateInfoQNX( *reinterpret_cast( &rhs ) ) + { + } + + ScreenSurfaceCreateInfoQNX & operator=( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ScreenSurfaceCreateInfoQNX & operator=( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - maxRecursionDepth = maxRecursionDepth_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setFlags( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ ) VULKAN_HPP_NOEXCEPT { - layout = layout_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setContext( struct _screen_context * context_ ) VULKAN_HPP_NOEXCEPT { - basePipelineHandle = basePipelineHandle_; + context = context_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setWindow( struct _screen_window * window_ ) VULKAN_HPP_NOEXCEPT { - basePipelineIndex = basePipelineIndex_; + window = window_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkRayTracingPipelineCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + operator VkScreenSurfaceCreateInfoQNX const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT + operator VkScreenSurfaceCreateInfoQNX &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX const &, + struct _screen_context * const &, + struct _screen_window * const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, stageCount, pStages, groupCount, pGroups, maxRecursionDepth, layout, basePipelineHandle, basePipelineIndex ); + return std::tie( sType, pNext, flags, context, window ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RayTracingPipelineCreateInfoNV const & ) const = default; -#else - bool operator==( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ScreenSurfaceCreateInfoQNX const & ) const = default; +# else + bool operator==( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && - ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) && ( maxRecursionDepth == rhs.maxRecursionDepth ) && ( layout == rhs.layout ) && - ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( context == rhs.context ) && ( window == rhs.window ); +# endif } - bool operator!=( RayTracingPipelineCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; - uint32_t stageCount = {}; - const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; - uint32_t groupCount = {}; - const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV * pGroups = {}; - uint32_t maxRecursionDepth = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; - VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; - int32_t basePipelineIndex = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenSurfaceCreateInfoQNX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags = {}; + struct _screen_context * context = {}; + struct _screen_window * window = {}; }; template <> - struct CppType + struct CppType { - using Type = RayTracingPipelineCreateInfoNV; + using Type = ScreenSurfaceCreateInfoQNX; }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - struct RefreshCycleDurationGOOGLE + struct SemaphoreCreateInfo { - using NativeType = VkRefreshCycleDurationGOOGLE; + using NativeType = VkSemaphoreCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT : refreshDuration( refreshDuration_ ) {} +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + { + } - VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT - : RefreshCycleDurationGOOGLE( *reinterpret_cast( &rhs ) ) + SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreCreateInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RefreshCycleDurationGOOGLE & operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SemaphoreCreateInfo & operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - RefreshCycleDurationGOOGLE & operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkRefreshCycleDurationGOOGLE const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( refreshDuration ); + return std::tie( sType, pNext, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RefreshCycleDurationGOOGLE const & ) const = default; + auto operator<=>( SemaphoreCreateInfo const & ) const = default; #else - bool operator==( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( refreshDuration == rhs.refreshDuration ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); # endif } - bool operator!=( RefreshCycleDurationGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint64_t refreshDuration = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {}; }; - struct RenderPassAttachmentBeginInfo + template <> + struct CppType { - using NativeType = VkRenderPassAttachmentBeginInfo; + using Type = SemaphoreCreateInfo; + }; + + struct SemaphoreGetFdInfoKHR + { + using NativeType = VkSemaphoreGetFdInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , handleType{ handleType_ } { } - VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderPassAttachmentBeginInfo( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassAttachmentBeginInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), attachmentCount( static_cast( attachments_.size() ) ), pAttachments( attachments_.data() ) + SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetFdInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderPassAttachmentBeginInfo & operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SemaphoreGetFdInfoKHR & operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - attachmentCount = attachmentCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT { - pAttachments = pAttachments_; + semaphore = semaphore_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassAttachmentBeginInfo & - setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - attachmentCount = static_cast( attachments_.size() ); - pAttachments = attachments_.data(); + handleType = handleType_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkRenderPassAttachmentBeginInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkSemaphoreGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT + operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, attachmentCount, pAttachments ); + return std::tie( sType, pNext, semaphore, handleType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderPassAttachmentBeginInfo const & ) const = default; + auto operator<=>( SemaphoreGetFdInfoKHR const & ) const = default; #else - bool operator==( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType ); # endif } - bool operator!=( RenderPassAttachmentBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo; - const void * pNext = {}; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::ImageView * pAttachments = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; }; template <> - struct CppType + struct CppType { - using Type = RenderPassAttachmentBeginInfo; + using Type = SemaphoreGetFdInfoKHR; }; - using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo; - struct RenderPassBeginInfo +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct SemaphoreGetWin32HandleInfoKHR { - using NativeType = VkRenderPassBeginInfo; + using NativeType = VkSemaphoreGetWin32HandleInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetWin32HandleInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, - VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, - uint32_t clearValueCount_ = {}, - const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , renderPass( renderPass_ ) - , framebuffer( framebuffer_ ) - , renderArea( renderArea_ ) - , clearValueCount( clearValueCount_ ) - , pClearValues( pClearValues_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , handleType{ handleType_ } { } - VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderPassBeginInfo( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_, - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, - VULKAN_HPP_NAMESPACE::Rect2D renderArea_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & clearValues_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , renderPass( renderPass_ ) - , framebuffer( framebuffer_ ) - , renderArea( renderArea_ ) - , clearValueCount( static_cast( clearValues_.size() ) ) - , pClearValues( clearValues_.data() ) + SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SemaphoreGetWin32HandleInfoKHR & operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT { - renderPass = renderPass_; + semaphore = semaphore_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - framebuffer = framebuffer_; + handleType = handleType_; return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT + operator VkSemaphoreGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - renderArea = renderArea_; + return *reinterpret_cast( this ); + } + + operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, semaphore, handleType ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetWin32HandleInfoKHR const & ) const = default; +# else + bool operator==( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType ); +# endif + } + + bool operator!=( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + + template <> + struct CppType + { + using Type = SemaphoreGetWin32HandleInfoKHR; + }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + struct SemaphoreGetZirconHandleInfoFUCHSIA + { + using NativeType = VkSemaphoreGetZirconHandleInfoFUCHSIA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , handleType{ handleType_ } + { + } + + VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreGetZirconHandleInfoFUCHSIA( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreGetZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + { + } + + SemaphoreGetZirconHandleInfoFUCHSIA & operator=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SemaphoreGetZirconHandleInfoFUCHSIA & operator=( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - clearValueCount = clearValueCount_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT { - pClearValues = pClearValues_; + semaphore = semaphore_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassBeginInfo & - setClearValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & clearValues_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & + setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT { - clearValueCount = static_cast( clearValues_.size() ); - pClearValues = clearValues_.data(); + handleType = handleType_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkSemaphoreGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT + operator VkSemaphoreGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + VULKAN_HPP_NAMESPACE::Semaphore const &, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, renderPass, framebuffer, renderArea, clearValueCount, pClearValues ); + return std::tie( sType, pNext, semaphore, handleType ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderPassBeginInfo const & ) const = default; -#else - bool operator==( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreGetZirconHandleInfoFUCHSIA const & ) const = default; +# else + bool operator==( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( framebuffer == rhs.framebuffer ) && - ( renderArea == rhs.renderArea ) && ( clearValueCount == rhs.clearValueCount ) && ( pClearValues == rhs.pClearValues ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType ); +# endif } - bool operator!=( RenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; - VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; - VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; - uint32_t clearValueCount = {}; - const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; }; template <> - struct CppType + struct CppType { - using Type = RenderPassBeginInfo; + using Type = SemaphoreGetZirconHandleInfoFUCHSIA; }; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct SubpassDescription + struct SemaphoreSignalInfo { - using NativeType = VkSubpassDescription; + using NativeType = VkSemaphoreSignalInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - uint32_t inputAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ = {}, - uint32_t colorAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, - uint32_t preserveAttachmentCount_ = {}, - const uint32_t * pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , inputAttachmentCount( inputAttachmentCount_ ) - , pInputAttachments( pInputAttachments_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachments( pColorAttachments_ ) - , pResolveAttachments( pResolveAttachments_ ) - , pDepthStencilAttachment( pDepthStencilAttachment_ ) - , preserveAttachmentCount( preserveAttachmentCount_ ) - , pPreserveAttachments( pPreserveAttachments_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SemaphoreSignalInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , value{ value_ } { } - VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDescription( *reinterpret_cast( &rhs ) ) {} + VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ = {} ) - : flags( flags_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , inputAttachmentCount( static_cast( inputAttachments_.size() ) ) - , pInputAttachments( inputAttachments_.data() ) - , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) - , pColorAttachments( colorAttachments_.data() ) - , pResolveAttachments( resolveAttachments_.data() ) - , pDepthStencilAttachment( pDepthStencilAttachment_ ) - , preserveAttachmentCount( static_cast( preserveAttachments_.size() ) ) - , pPreserveAttachments( preserveAttachments_.data() ) + SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreSignalInfo( *reinterpret_cast( &rhs ) ) { -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) ); -# else - if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) ) - { - throw LogicError( - VULKAN_HPP_NAMESPACE_STRING - "::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SubpassDescription & operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SemaphoreSignalInfo & operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT + SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT { - pipelineBindPoint = pipelineBindPoint_; + semaphore = semaphore_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT { - inputAttachmentCount = inputAttachmentCount_; + value = value_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 SubpassDescription & - setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT + operator VkSemaphoreSignalInfo const &() const VULKAN_HPP_NOEXCEPT { - pInputAttachments = pInputAttachments_; - return *this; + return *reinterpret_cast( this ); } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubpassDescription & setInputAttachments( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT + operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT { - inputAttachmentCount = static_cast( inputAttachments_.size() ); - pInputAttachments = inputAttachments_.data(); - return *this; + return *reinterpret_cast( this ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - colorAttachmentCount = colorAttachmentCount_; - return *this; + return std::tie( sType, pNext, semaphore, value ); } +#endif - VULKAN_HPP_CONSTEXPR_14 SubpassDescription & - setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SemaphoreSignalInfo const & ) const = default; +#else + bool operator==( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - pColorAttachments = pColorAttachments_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( value == rhs.value ); +# endif } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubpassDescription & setColorAttachments( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - colorAttachmentCount = static_cast( colorAttachments_.size() ); - pColorAttachments = colorAttachments_.data(); - return *this; + return !operator==( rhs ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif - VULKAN_HPP_CONSTEXPR_14 SubpassDescription & - setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + uint64_t value = {}; + }; + + template <> + struct CppType + { + using Type = SemaphoreSignalInfo; + }; + + using SemaphoreSignalInfoKHR = SemaphoreSignalInfo; + + struct SemaphoreTypeCreateInfo + { + using NativeType = VkSemaphoreTypeCreateInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, + uint64_t initialValue_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , semaphoreType{ semaphoreType_ } + , initialValue{ initialValue_ } { - pResolveAttachments = pResolveAttachments_; - return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubpassDescription & setResolveAttachments( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : SemaphoreTypeCreateInfo( *reinterpret_cast( &rhs ) ) { - colorAttachmentCount = static_cast( resolveAttachments_.size() ); - pResolveAttachments = resolveAttachments_.data(); - return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 SubpassDescription & - setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + SemaphoreTypeCreateInfo & operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - pDepthStencilAttachment = pDepthStencilAttachment_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - preserveAttachmentCount = preserveAttachmentCount_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDescription & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setSemaphoreType( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ ) VULKAN_HPP_NOEXCEPT { - pPreserveAttachments = pPreserveAttachments_; + semaphoreType = semaphoreType_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubpassDescription & - setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setInitialValue( uint64_t initialValue_ ) VULKAN_HPP_NOEXCEPT { - preserveAttachmentCount = static_cast( preserveAttachments_.size() ); - pPreserveAttachments = preserveAttachments_.data(); + initialValue = initialValue_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSubpassDescription const &() const VULKAN_HPP_NOEXCEPT + operator VkSemaphoreTypeCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT + operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( flags, - pipelineBindPoint, - inputAttachmentCount, - pInputAttachments, - colorAttachmentCount, - pColorAttachments, - pResolveAttachments, - pDepthStencilAttachment, - preserveAttachmentCount, - pPreserveAttachments ); + return std::tie( sType, pNext, semaphoreType, initialValue ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubpassDescription const & ) const = default; + auto operator<=>( SemaphoreTypeCreateInfo const & ) const = default; #else - bool operator==( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( inputAttachmentCount == rhs.inputAttachmentCount ) && - ( pInputAttachments == rhs.pInputAttachments ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && - ( pColorAttachments == rhs.pColorAttachments ) && ( pResolveAttachments == rhs.pResolveAttachments ) && - ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && - ( pPreserveAttachments == rhs.pPreserveAttachments ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphoreType == rhs.semaphoreType ) && ( initialValue == rhs.initialValue ); # endif } - bool operator!=( SubpassDescription const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - uint32_t inputAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference * pInputAttachments = {}; - uint32_t colorAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference * pColorAttachments = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference * pResolveAttachments = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment = {}; - uint32_t preserveAttachmentCount = {}; - const uint32_t * pPreserveAttachments = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary; + uint64_t initialValue = {}; }; - struct SubpassDependency + template <> + struct CppType { - using NativeType = VkSubpassDependency; + using Type = SemaphoreTypeCreateInfo; + }; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubpassDependency( uint32_t srcSubpass_ = {}, - uint32_t dstSubpass_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubpass( srcSubpass_ ) - , dstSubpass( dstSubpass_ ) - , srcStageMask( srcStageMask_ ) - , dstStageMask( dstStageMask_ ) - , srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , dependencyFlags( dependencyFlags_ ) + using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo; + + struct SemaphoreWaitInfo + { + using NativeType = VkSemaphoreWaitInfo; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, + uint32_t semaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ = {}, + const uint64_t * pValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , semaphoreCount{ semaphoreCount_ } + , pSemaphores{ pSemaphores_ } + , pValues{ pValues_ } { } - VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDependency( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreWaitInfo( *reinterpret_cast( &rhs ) ) {} - SubpassDependency & operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & semaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , semaphoreCount( static_cast( semaphores_.size() ) ) + , pSemaphores( semaphores_.data() ) + , pValues( values_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( semaphores_.size() == values_.size() ); +# else + if ( semaphores_.size() != values_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT + SemaphoreWaitInfo & operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - srcSubpass = srcSubpass_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT { - dstSubpass = dstSubpass_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT { - srcStageMask = srcStageMask_; + semaphoreCount = semaphoreCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ ) VULKAN_HPP_NOEXCEPT { - dstStageMask = dstStageMask_; + pSemaphores = pSemaphores_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo & + setSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & semaphores_ ) VULKAN_HPP_NOEXCEPT { - srcAccessMask = srcAccessMask_; + semaphoreCount = static_cast( semaphores_.size() ); + pSemaphores = semaphores_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPValues( const uint64_t * pValues_ ) VULKAN_HPP_NOEXCEPT { - dstAccessMask = dstAccessMask_; + pValues = pValues_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SemaphoreWaitInfo & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT { - dependencyFlags = dependencyFlags_; + semaphoreCount = static_cast( values_.size() ); + pValues = values_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSubpassDependency const &() const VULKAN_HPP_NOEXCEPT + operator VkSemaphoreWaitInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT + operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + const VULKAN_HPP_NAMESPACE::Semaphore * const &, + const uint64_t * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags ); + return std::tie( sType, pNext, flags, semaphoreCount, pSemaphores, pValues ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubpassDependency const & ) const = default; + auto operator<=>( SemaphoreWaitInfo const & ) const = default; #else - bool operator==( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) && ( srcStageMask == rhs.srcStageMask ) && - ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) && - ( dependencyFlags == rhs.dependencyFlags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( semaphoreCount == rhs.semaphoreCount ) && + ( pSemaphores == rhs.pSemaphores ) && ( pValues == rhs.pValues ); # endif } - bool operator!=( SubpassDependency const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t srcSubpass = {}; - uint32_t dstSubpass = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {}; + uint32_t semaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores = {}; + const uint64_t * pValues = {}; }; - struct RenderPassCreateInfo + template <> + struct CppType { - using NativeType = VkRenderPassCreateInfo; + using Type = SemaphoreWaitInfo; + }; + + using SemaphoreWaitInfoKHR = SemaphoreWaitInfo; + + struct SetDescriptorBufferOffsetsInfoEXT + { + using NativeType = VkSetDescriptorBufferOffsetsInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSetDescriptorBufferOffsetsInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, - uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ = {}, - uint32_t subpassCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ = {}, - uint32_t dependencyCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - , subpassCount( subpassCount_ ) - , pSubpasses( pSubpasses_ ) - , dependencyCount( dependencyCount_ ) - , pDependencies( pDependencies_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SetDescriptorBufferOffsetsInfoEXT( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t firstSet_ = {}, + uint32_t setCount_ = {}, + const uint32_t * pBufferIndices_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stageFlags{ stageFlags_ } + , layout{ layout_ } + , firstSet{ firstSet_ } + , setCount{ setCount_ } + , pBufferIndices{ pBufferIndices_ } + , pOffsets{ pOffsets_ } { } - VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SetDescriptorBufferOffsetsInfoEXT( SetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderPassCreateInfo( *reinterpret_cast( &rhs ) ) + SetDescriptorBufferOffsetsInfoEXT( VkSetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SetDescriptorBufferOffsetsInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ = {}, - const void * pNext_ = nullptr ) + SetDescriptorBufferOffsetsInfoEXT( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_, + uint32_t firstSet_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferIndices_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & offsets_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , flags( flags_ ) - , attachmentCount( static_cast( attachments_.size() ) ) - , pAttachments( attachments_.data() ) - , subpassCount( static_cast( subpasses_.size() ) ) - , pSubpasses( subpasses_.data() ) - , dependencyCount( static_cast( dependencies_.size() ) ) - , pDependencies( dependencies_.data() ) + , stageFlags( stageFlags_ ) + , layout( layout_ ) + , firstSet( firstSet_ ) + , setCount( static_cast( bufferIndices_.size() ) ) + , pBufferIndices( bufferIndices_.data() ) + , pOffsets( offsets_.data() ) { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( bufferIndices_.size() == offsets_.size() ); +# else + if ( bufferIndices_.size() != offsets_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::SetDescriptorBufferOffsetsInfoEXT::SetDescriptorBufferOffsetsInfoEXT: bufferIndices_.size() != offsets_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderPassCreateInfo & operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SetDescriptorBufferOffsetsInfoEXT & operator=( SetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SetDescriptorBufferOffsetsInfoEXT & operator=( VkSetDescriptorBufferOffsetsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT { - attachmentCount = attachmentCount_; + stageFlags = stageFlags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT { - pAttachments = pAttachments_; + layout = layout_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassCreateInfo & setAttachments( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setFirstSet( uint32_t firstSet_ ) VULKAN_HPP_NOEXCEPT { - attachmentCount = static_cast( attachments_.size() ); - pAttachments = attachments_.data(); + firstSet = firstSet_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setSetCount( uint32_t setCount_ ) VULKAN_HPP_NOEXCEPT { - subpassCount = subpassCount_; + setCount = setCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setPBufferIndices( const uint32_t * pBufferIndices_ ) VULKAN_HPP_NOEXCEPT { - pSubpasses = pSubpasses_; + pBufferIndices = pBufferIndices_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassCreateInfo & - setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ ) VULKAN_HPP_NOEXCEPT + SetDescriptorBufferOffsetsInfoEXT & + setBufferIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferIndices_ ) VULKAN_HPP_NOEXCEPT { - subpassCount = static_cast( subpasses_.size() ); - pSubpasses = subpasses_.data(); + setCount = static_cast( bufferIndices_.size() ); + pBufferIndices = bufferIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT - { - dependencyCount = dependencyCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SetDescriptorBufferOffsetsInfoEXT & setPOffsets( const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets_ ) VULKAN_HPP_NOEXCEPT { - pDependencies = pDependencies_; + pOffsets = pOffsets_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassCreateInfo & - setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ ) VULKAN_HPP_NOEXCEPT + SetDescriptorBufferOffsetsInfoEXT & + setOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & offsets_ ) VULKAN_HPP_NOEXCEPT { - dependencyCount = static_cast( dependencies_.size() ); - pDependencies = dependencies_.data(); + setCount = static_cast( offsets_.size() ); + pOffsets = offsets_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkRenderPassCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkSetDescriptorBufferOffsetsInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkSetDescriptorBufferOffsetsInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -79945,459 +121332,437 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + const uint32_t * const &, + const VULKAN_HPP_NAMESPACE::DeviceSize * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, attachmentCount, pAttachments, subpassCount, pSubpasses, dependencyCount, pDependencies ); + return std::tie( sType, pNext, stageFlags, layout, firstSet, setCount, pBufferIndices, pOffsets ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderPassCreateInfo const & ) const = default; + auto operator<=>( SetDescriptorBufferOffsetsInfoEXT const & ) const = default; #else - bool operator==( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SetDescriptorBufferOffsetsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( attachmentCount == rhs.attachmentCount ) && - ( pAttachments == rhs.pAttachments ) && ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) && - ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( firstSet == rhs.firstSet ) && + ( setCount == rhs.setCount ) && ( pBufferIndices == rhs.pBufferIndices ) && ( pOffsets == rhs.pOffsets ); # endif } - bool operator!=( RenderPassCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SetDescriptorBufferOffsetsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentDescription * pAttachments = {}; - uint32_t subpassCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassDescription * pSubpasses = {}; - uint32_t dependencyCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSetDescriptorBufferOffsetsInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + uint32_t firstSet = {}; + uint32_t setCount = {}; + const uint32_t * pBufferIndices = {}; + const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets = {}; }; template <> - struct CppType + struct CppType { - using Type = RenderPassCreateInfo; + using Type = SetDescriptorBufferOffsetsInfoEXT; }; - struct SubpassDescription2 + struct SetLatencyMarkerInfoNV { - using NativeType = VkSubpassDescription2; + using NativeType = VkSetLatencyMarkerInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSetLatencyMarkerInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, - uint32_t viewMask_ = {}, - uint32_t inputAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ = {}, - uint32_t colorAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, - uint32_t preserveAttachmentCount_ = {}, - const uint32_t * pPreserveAttachments_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , viewMask( viewMask_ ) - , inputAttachmentCount( inputAttachmentCount_ ) - , pInputAttachments( pInputAttachments_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachments( pColorAttachments_ ) - , pResolveAttachments( pResolveAttachments_ ) - , pDepthStencilAttachment( pDepthStencilAttachment_ ) - , preserveAttachmentCount( preserveAttachmentCount_ ) - , pPreserveAttachments( pPreserveAttachments_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SetLatencyMarkerInfoNV( uint64_t presentID_ = {}, + VULKAN_HPP_NAMESPACE::LatencyMarkerNV marker_ = VULKAN_HPP_NAMESPACE::LatencyMarkerNV::eSimulationStart, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentID{ presentID_ } + , marker{ marker_ } { } - VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDescription2( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR SetLatencyMarkerInfoNV( SetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, - uint32_t viewMask_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , viewMask( viewMask_ ) - , inputAttachmentCount( static_cast( inputAttachments_.size() ) ) - , pInputAttachments( inputAttachments_.data() ) - , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) - , pColorAttachments( colorAttachments_.data() ) - , pResolveAttachments( resolveAttachments_.data() ) - , pDepthStencilAttachment( pDepthStencilAttachment_ ) - , preserveAttachmentCount( static_cast( preserveAttachments_.size() ) ) - , pPreserveAttachments( preserveAttachments_.data() ) + SetLatencyMarkerInfoNV( VkSetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SetLatencyMarkerInfoNV( *reinterpret_cast( &rhs ) ) { -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) ); -# else - if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) ) - { - throw LogicError( - VULKAN_HPP_NAMESPACE_STRING - "::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SubpassDescription2 & operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SetLatencyMarkerInfoNV & operator=( SetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT + SetLatencyMarkerInfoNV & operator=( VkSetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setPresentID( uint64_t presentID_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + presentID = presentID_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SetLatencyMarkerInfoNV & setMarker( VULKAN_HPP_NAMESPACE::LatencyMarkerNV marker_ ) VULKAN_HPP_NOEXCEPT { - pipelineBindPoint = pipelineBindPoint_; + marker = marker_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + operator VkSetLatencyMarkerInfoNV const &() const VULKAN_HPP_NOEXCEPT { - viewMask = viewMask_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + operator VkSetLatencyMarkerInfoNV &() VULKAN_HPP_NOEXCEPT { - inputAttachmentCount = inputAttachmentCount_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & - setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - pInputAttachments = pInputAttachments_; - return *this; + return std::tie( sType, pNext, presentID, marker ); } +#endif -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubpassDescription2 & setInputAttachments( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SetLatencyMarkerInfoNV const & ) const = default; +#else + bool operator==( SetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - inputAttachmentCount = static_cast( inputAttachments_.size() ); - pInputAttachments = inputAttachments_.data(); - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentID == rhs.presentID ) && ( marker == rhs.marker ); +# endif } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( SetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - colorAttachmentCount = colorAttachmentCount_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & - setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pColorAttachments = pColorAttachments_; - return *this; - } + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSetLatencyMarkerInfoNV; + const void * pNext = {}; + uint64_t presentID = {}; + VULKAN_HPP_NAMESPACE::LatencyMarkerNV marker = VULKAN_HPP_NAMESPACE::LatencyMarkerNV::eSimulationStart; + }; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubpassDescription2 & setColorAttachments( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT - { - colorAttachmentCount = static_cast( colorAttachments_.size() ); - pColorAttachments = colorAttachments_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + template <> + struct CppType + { + using Type = SetLatencyMarkerInfoNV; + }; - VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & - setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pResolveAttachments = pResolveAttachments_; - return *this; - } + struct SetStateFlagsIndirectCommandNV + { + using NativeType = VkSetStateFlagsIndirectCommandNV; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubpassDescription2 & setResolveAttachments( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT - { - colorAttachmentCount = static_cast( resolveAttachments_.size() ); - pResolveAttachments = resolveAttachments_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( uint32_t data_ = {} ) VULKAN_HPP_NOEXCEPT : data{ data_ } {} - VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & - setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT - { - pDepthStencilAttachment = pDepthStencilAttachment_; - return *this; - } + VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SetStateFlagsIndirectCommandNV( *reinterpret_cast( &rhs ) ) { - preserveAttachmentCount = preserveAttachmentCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & setPPreserveAttachments( const uint32_t * pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pPreserveAttachments = pPreserveAttachments_; - return *this; - } + SetStateFlagsIndirectCommandNV & operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubpassDescription2 & - setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT + SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { - preserveAttachmentCount = static_cast( preserveAttachments_.size() ); - pPreserveAttachments = preserveAttachments_.data(); + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSubpassDescription2 const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SetStateFlagsIndirectCommandNV & setData( uint32_t data_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + data = data_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT + operator VkSetStateFlagsIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple + operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - flags, - pipelineBindPoint, - viewMask, - inputAttachmentCount, - pInputAttachments, - colorAttachmentCount, - pColorAttachments, - pResolveAttachments, - pDepthStencilAttachment, - preserveAttachmentCount, - pPreserveAttachments ); + return std::tie( data ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubpassDescription2 const & ) const = default; + auto operator<=>( SetStateFlagsIndirectCommandNV const & ) const = default; #else - bool operator==( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && - ( viewMask == rhs.viewMask ) && ( inputAttachmentCount == rhs.inputAttachmentCount ) && ( pInputAttachments == rhs.pInputAttachments ) && - ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachments == rhs.pColorAttachments ) && - ( pResolveAttachments == rhs.pResolveAttachments ) && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) && - ( preserveAttachmentCount == rhs.preserveAttachmentCount ) && ( pPreserveAttachments == rhs.pPreserveAttachments ); + return ( data == rhs.data ); # endif } - bool operator!=( SubpassDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; - VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; - uint32_t viewMask = {}; - uint32_t inputAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pInputAttachments = {}; - uint32_t colorAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pColorAttachments = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pResolveAttachments = {}; - const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilAttachment = {}; - uint32_t preserveAttachmentCount = {}; - const uint32_t * pPreserveAttachments = {}; - }; - - template <> - struct CppType - { - using Type = SubpassDescription2; + uint32_t data = {}; }; - using SubpassDescription2KHR = SubpassDescription2; - struct SubpassDependency2 + struct ShaderCreateInfoEXT { - using NativeType = VkSubpassDependency2; + using NativeType = VkShaderCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubpassDependency2( uint32_t srcSubpass_ = {}, - uint32_t dstSubpass_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, - int32_t viewOffset_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcSubpass( srcSubpass_ ) - , dstSubpass( dstSubpass_ ) - , srcStageMask( srcStageMask_ ) - , dstStageMask( dstStageMask_ ) - , srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , dependencyFlags( dependencyFlags_ ) - , viewOffset( viewOffset_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderCreateInfoEXT( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, + VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_ = {}, + VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_ = VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT::eBinary, + size_t codeSize_ = {}, + const void * pCode_ = {}, + const char * pName_ = {}, + uint32_t setLayoutCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, + uint32_t pushConstantRangeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , stage{ stage_ } + , nextStage{ nextStage_ } + , codeType{ codeType_ } + , codeSize{ codeSize_ } + , pCode{ pCode_ } + , pName{ pName_ } + , setLayoutCount{ setLayoutCount_ } + , pSetLayouts{ pSetLayouts_ } + , pushConstantRangeCount{ pushConstantRangeCount_ } + , pPushConstantRanges{ pPushConstantRanges_ } + , pSpecializationInfo{ pSpecializationInfo_ } { } - VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ShaderCreateInfoEXT( ShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassDependency2( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + ShaderCreateInfoEXT( VkShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ShaderCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } - SubpassDependency2 & operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ShaderCreateInfoEXT( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_, + VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_, + const char * pName_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , stage( stage_ ) + , nextStage( nextStage_ ) + , codeType( codeType_ ) + , codeSize( code_.size() * sizeof( T ) ) + , pCode( code_.data() ) + , pName( pName_ ) + , setLayoutCount( static_cast( setLayouts_.size() ) ) + , pSetLayouts( setLayouts_.data() ) + , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) + , pPushConstantRanges( pushConstantRanges_.data() ) + , pSpecializationInfo( pSpecializationInfo_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT + ShaderCreateInfoEXT & operator=( ShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ShaderCreateInfoEXT & operator=( VkShaderCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { - srcSubpass = srcSubpass_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT { - dstSubpass = dstSubpass_; + stage = stage_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setNextStage( VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage_ ) VULKAN_HPP_NOEXCEPT { - srcStageMask = srcStageMask_; + nextStage = nextStage_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setCodeType( VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType_ ) VULKAN_HPP_NOEXCEPT { - dstStageMask = dstStageMask_; + codeType = codeType_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT { - srcAccessMask = srcAccessMask_; + codeSize = codeSize_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPCode( const void * pCode_ ) VULKAN_HPP_NOEXCEPT { - dstAccessMask = dstAccessMask_; + pCode = pCode_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ShaderCreateInfoEXT & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_ ) VULKAN_HPP_NOEXCEPT { - dependencyFlags = dependencyFlags_; + codeSize = code_.size() * sizeof( T ); + pCode = code_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT { - viewOffset = viewOffset_; + pName = pName_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSubpassDependency2 const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + setLayoutCount = setLayoutCount_; + return *this; } - operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pSetLayouts = pSetLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderCreateInfoEXT & + setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = pushConstantRangeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & + setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pPushConstantRanges = pPushConstantRanges_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ShaderCreateInfoEXT & setPushConstantRanges( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); + pPushConstantRanges = pushConstantRanges_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ShaderCreateInfoEXT & + setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pSpecializationInfo = pSpecializationInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkShaderCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -80406,239 +121771,290 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + const VULKAN_HPP_NAMESPACE::PushConstantRange * const &, + const VULKAN_HPP_NAMESPACE::SpecializationInfo * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, srcSubpass, dstSubpass, srcStageMask, dstStageMask, srcAccessMask, dstAccessMask, dependencyFlags, viewOffset ); + return std::tie( sType, + pNext, + flags, + stage, + nextStage, + codeType, + codeSize, + pCode, + pName, + setLayoutCount, + pSetLayouts, + pushConstantRangeCount, + pPushConstantRanges, + pSpecializationInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubpassDependency2 const & ) const = default; -#else - bool operator==( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubpass == rhs.srcSubpass ) && ( dstSubpass == rhs.dstSubpass ) && - ( srcStageMask == rhs.srcStageMask ) && ( dstStageMask == rhs.dstStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && - ( dstAccessMask == rhs.dstAccessMask ) && ( dependencyFlags == rhs.dependencyFlags ) && ( viewOffset == rhs.viewOffset ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = stage <=> rhs.stage; cmp != 0 ) + return cmp; + if ( auto cmp = nextStage <=> rhs.nextStage; cmp != 0 ) + return cmp; + if ( auto cmp = codeType <=> rhs.codeType; cmp != 0 ) + return cmp; + if ( auto cmp = codeSize <=> rhs.codeSize; cmp != 0 ) + return cmp; + if ( auto cmp = pCode <=> rhs.pCode; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = setLayoutCount <=> rhs.setLayoutCount; cmp != 0 ) + return cmp; + if ( auto cmp = pSetLayouts <=> rhs.pSetLayouts; cmp != 0 ) + return cmp; + if ( auto cmp = pushConstantRangeCount <=> rhs.pushConstantRangeCount; cmp != 0 ) + return cmp; + if ( auto cmp = pPushConstantRanges <=> rhs.pPushConstantRanges; cmp != 0 ) + return cmp; + if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( nextStage == rhs.nextStage ) && + ( codeType == rhs.codeType ) && ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode ) && + ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( setLayoutCount == rhs.setLayoutCount ) && + ( pSetLayouts == rhs.pSetLayouts ) && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && + ( pPushConstantRanges == rhs.pPushConstantRanges ) && ( pSpecializationInfo == rhs.pSpecializationInfo ); } - bool operator!=( SubpassDependency2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ShaderCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2; - const void * pNext = {}; - uint32_t srcSubpass = {}; - uint32_t dstSubpass = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; - VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; - VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; - int32_t viewOffset = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex; + VULKAN_HPP_NAMESPACE::ShaderStageFlags nextStage = {}; + VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT codeType = VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT::eBinary; + size_t codeSize = {}; + const void * pCode = {}; + const char * pName = {}; + uint32_t setLayoutCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + uint32_t pushConstantRangeCount = {}; + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; + const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo = {}; }; template <> - struct CppType + struct CppType { - using Type = SubpassDependency2; + using Type = ShaderCreateInfoEXT; }; - using SubpassDependency2KHR = SubpassDependency2; - struct RenderPassCreateInfo2 + struct ShaderModuleCreateInfo { - using NativeType = VkRenderPassCreateInfo2; + using NativeType = VkShaderModuleCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleCreateInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, - uint32_t attachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ = {}, - uint32_t subpassCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ = {}, - uint32_t dependencyCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ = {}, - uint32_t correlatedViewMaskCount_ = {}, - const uint32_t * pCorrelatedViewMasks_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - , subpassCount( subpassCount_ ) - , pSubpasses( pSubpasses_ ) - , dependencyCount( dependencyCount_ ) - , pDependencies( pDependencies_ ) - , correlatedViewMaskCount( correlatedViewMaskCount_ ) - , pCorrelatedViewMasks( pCorrelatedViewMasks_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, + size_t codeSize_ = {}, + const uint32_t * pCode_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , codeSize{ codeSize_ } + , pCode{ pCode_ } { } - VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderPassCreateInfo2( *reinterpret_cast( &rhs ) ) + ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderModuleCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlatedViewMasks_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , attachmentCount( static_cast( attachments_.size() ) ) - , pAttachments( attachments_.data() ) - , subpassCount( static_cast( subpasses_.size() ) ) - , pSubpasses( subpasses_.data() ) - , dependencyCount( static_cast( dependencies_.size() ) ) - , pDependencies( dependencies_.data() ) - , correlatedViewMaskCount( static_cast( correlatedViewMasks_.size() ) ) - , pCorrelatedViewMasks( correlatedViewMasks_.data() ) + ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderPassCreateInfo2 & operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ShaderModuleCreateInfo & operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + ShaderModuleCreateInfo & operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT { - attachmentCount = attachmentCount_; + codeSize = codeSize_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPCode( const uint32_t * pCode_ ) VULKAN_HPP_NOEXCEPT { - pAttachments = pAttachments_; + pCode = pCode_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassCreateInfo2 & setAttachments( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT + ShaderModuleCreateInfo & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_ ) VULKAN_HPP_NOEXCEPT { - attachmentCount = static_cast( attachments_.size() ); - pAttachments = attachments_.data(); + codeSize = code_.size() * 4; + pCode = code_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + operator VkShaderModuleCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - subpassCount = subpassCount_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses_ ) VULKAN_HPP_NOEXCEPT + operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT { - pSubpasses = pSubpasses_; - return *this; + return *reinterpret_cast( this ); } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassCreateInfo2 & - setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & subpasses_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - subpassCount = static_cast( subpasses_.size() ); - pSubpasses = subpasses_.data(); - return *this; + return std::tie( sType, pNext, flags, codeSize, pCode ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ShaderModuleCreateInfo const & ) const = default; +#else + bool operator==( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - dependencyCount = dependencyCount_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode ); +# endif } - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { - pDependencies = pDependencies_; - return *this; + return !operator==( rhs ); } +#endif -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassCreateInfo2 & - setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dependencies_ ) VULKAN_HPP_NOEXCEPT - { - dependencyCount = static_cast( dependencies_.size() ); - pDependencies = dependencies_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {}; + size_t codeSize = {}; + const uint32_t * pCode = {}; + }; - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT + template <> + struct CppType + { + using Type = ShaderModuleCreateInfo; + }; + + struct ShaderModuleIdentifierEXT + { + using NativeType = VkShaderModuleIdentifierEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleIdentifierEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ShaderModuleIdentifierEXT( uint32_t identifierSize_ = {}, + std::array const & identifier_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , identifierSize{ identifierSize_ } + , identifier{ identifier_ } { - correlatedViewMaskCount = correlatedViewMaskCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t * pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ShaderModuleIdentifierEXT( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ShaderModuleIdentifierEXT( VkShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderModuleIdentifierEXT( *reinterpret_cast( &rhs ) ) { - pCorrelatedViewMasks = pCorrelatedViewMasks_; - return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassCreateInfo2 & - setCorrelatedViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT + ShaderModuleIdentifierEXT & operator=( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ShaderModuleIdentifierEXT & operator=( VkShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - correlatedViewMaskCount = static_cast( correlatedViewMasks_.size() ); - pCorrelatedViewMasks = correlatedViewMasks_.data(); + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkRenderPassCreateInfo2 const &() const VULKAN_HPP_NOEXCEPT + operator VkShaderModuleIdentifierEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT + operator VkShaderModuleIdentifierEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -80646,970 +122062,1038 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - flags, - attachmentCount, - pAttachments, - subpassCount, - pSubpasses, - dependencyCount, - pDependencies, - correlatedViewMaskCount, - pCorrelatedViewMasks ); + return std::tie( sType, pNext, identifierSize, identifier ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderPassCreateInfo2 const & ) const = default; -#else - bool operator==( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( attachmentCount == rhs.attachmentCount ) && - ( pAttachments == rhs.pAttachments ) && ( subpassCount == rhs.subpassCount ) && ( pSubpasses == rhs.pSubpasses ) && - ( dependencyCount == rhs.dependencyCount ) && ( pDependencies == rhs.pDependencies ) && - ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = identifierSize <=> rhs.identifierSize; cmp != 0 ) + return cmp; + for ( size_t i = 0; i < identifierSize; ++i ) + { + if ( auto cmp = identifier[i] <=> rhs.identifier[i]; cmp != 0 ) + return cmp; + } + + return std::strong_ordering::equivalent; } +#endif - bool operator!=( RenderPassCreateInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( identifierSize == rhs.identifierSize ) && + ( memcmp( identifier, rhs.identifier, identifierSize * sizeof( uint8_t ) ) == 0 ); + } + + bool operator!=( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; - uint32_t attachmentCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentDescription2 * pAttachments = {}; - uint32_t subpassCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassDescription2 * pSubpasses = {}; - uint32_t dependencyCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassDependency2 * pDependencies = {}; - uint32_t correlatedViewMaskCount = {}; - const uint32_t * pCorrelatedViewMasks = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleIdentifierEXT; + void * pNext = {}; + uint32_t identifierSize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D identifier = {}; }; template <> - struct CppType + struct CppType { - using Type = RenderPassCreateInfo2; + using Type = ShaderModuleIdentifierEXT; }; - using RenderPassCreateInfo2KHR = RenderPassCreateInfo2; - struct RenderPassCreationControlEXT + struct ShaderModuleValidationCacheCreateInfoEXT { - using NativeType = VkRenderPassCreationControlEXT; + using NativeType = VkShaderModuleValidationCacheCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreationControlEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RenderPassCreationControlEXT( VULKAN_HPP_NAMESPACE::Bool32 disallowMerging_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , disallowMerging( disallowMerging_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , validationCache{ validationCache_ } { } - VULKAN_HPP_CONSTEXPR RenderPassCreationControlEXT( RenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RenderPassCreationControlEXT( VkRenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderPassCreationControlEXT( *reinterpret_cast( &rhs ) ) + ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderModuleValidationCacheCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderPassCreationControlEXT & operator=( RenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ShaderModuleValidationCacheCreateInfoEXT & operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - RenderPassCreationControlEXT & operator=( VkRenderPassCreationControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ShaderModuleValidationCacheCreateInfoEXT & operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderPassCreationControlEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassCreationControlEXT & setDisallowMerging( VULKAN_HPP_NAMESPACE::Bool32 disallowMerging_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & + setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT { - disallowMerging = disallowMerging_; + validationCache = validationCache_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkRenderPassCreationControlEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkShaderModuleValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderPassCreationControlEXT &() VULKAN_HPP_NOEXCEPT + operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, disallowMerging ); + return std::tie( sType, pNext, validationCache ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderPassCreationControlEXT const & ) const = default; + auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const & ) const = default; #else - bool operator==( RenderPassCreationControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( disallowMerging == rhs.disallowMerging ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( validationCache == rhs.validationCache ); # endif } - bool operator!=( RenderPassCreationControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreationControlEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 disallowMerging = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {}; }; template <> - struct CppType + struct CppType { - using Type = RenderPassCreationControlEXT; + using Type = ShaderModuleValidationCacheCreateInfoEXT; }; - struct RenderPassCreationFeedbackInfoEXT + struct ShaderResourceUsageAMD { - using NativeType = VkRenderPassCreationFeedbackInfoEXT; + using NativeType = VkShaderResourceUsageAMD; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackInfoEXT( uint32_t postMergeSubpassCount_ = {} ) VULKAN_HPP_NOEXCEPT - : postMergeSubpassCount( postMergeSubpassCount_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( uint32_t numUsedVgprs_ = {}, + uint32_t numUsedSgprs_ = {}, + uint32_t ldsSizePerLocalWorkGroup_ = {}, + size_t ldsUsageSizeInBytes_ = {}, + size_t scratchMemUsageInBytes_ = {} ) VULKAN_HPP_NOEXCEPT + : numUsedVgprs{ numUsedVgprs_ } + , numUsedSgprs{ numUsedSgprs_ } + , ldsSizePerLocalWorkGroup{ ldsSizePerLocalWorkGroup_ } + , ldsUsageSizeInBytes{ ldsUsageSizeInBytes_ } + , scratchMemUsageInBytes{ scratchMemUsageInBytes_ } { } - VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackInfoEXT( RenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RenderPassCreationFeedbackInfoEXT( VkRenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderPassCreationFeedbackInfoEXT( *reinterpret_cast( &rhs ) ) + ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderResourceUsageAMD( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderPassCreationFeedbackInfoEXT & operator=( RenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ShaderResourceUsageAMD & operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - RenderPassCreationFeedbackInfoEXT & operator=( VkRenderPassCreationFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ShaderResourceUsageAMD & operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkRenderPassCreationFeedbackInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkShaderResourceUsageAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderPassCreationFeedbackInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( postMergeSubpassCount ); + return std::tie( numUsedVgprs, numUsedSgprs, ldsSizePerLocalWorkGroup, ldsUsageSizeInBytes, scratchMemUsageInBytes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderPassCreationFeedbackInfoEXT const & ) const = default; + auto operator<=>( ShaderResourceUsageAMD const & ) const = default; #else - bool operator==( RenderPassCreationFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( postMergeSubpassCount == rhs.postMergeSubpassCount ); + return ( numUsedVgprs == rhs.numUsedVgprs ) && ( numUsedSgprs == rhs.numUsedSgprs ) && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) && + ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes ); # endif } - bool operator!=( RenderPassCreationFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t postMergeSubpassCount = {}; + uint32_t numUsedVgprs = {}; + uint32_t numUsedSgprs = {}; + uint32_t ldsSizePerLocalWorkGroup = {}; + size_t ldsUsageSizeInBytes = {}; + size_t scratchMemUsageInBytes = {}; }; - struct RenderPassCreationFeedbackCreateInfoEXT + struct ShaderStatisticsInfoAMD { - using NativeType = VkRenderPassCreationFeedbackCreateInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreationFeedbackCreateInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pRenderPassFeedback( pRenderPassFeedback_ ) - { - } - - VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackCreateInfoEXT( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + using NativeType = VkShaderStatisticsInfoAMD; - RenderPassCreationFeedbackCreateInfoEXT( VkRenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderPassCreationFeedbackCreateInfoEXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, + VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, + uint32_t numPhysicalVgprs_ = {}, + uint32_t numPhysicalSgprs_ = {}, + uint32_t numAvailableVgprs_ = {}, + uint32_t numAvailableSgprs_ = {}, + std::array const & computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderStageMask{ shaderStageMask_ } + , resourceUsage{ resourceUsage_ } + , numPhysicalVgprs{ numPhysicalVgprs_ } + , numPhysicalSgprs{ numPhysicalSgprs_ } + , numAvailableVgprs{ numAvailableVgprs_ } + , numAvailableSgprs{ numAvailableSgprs_ } + , computeWorkGroupSize{ computeWorkGroupSize_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderPassCreationFeedbackCreateInfoEXT & operator=( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RenderPassCreationFeedbackCreateInfoEXT & operator=( VkRenderPassCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : ShaderStatisticsInfoAMD( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderPassCreationFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + ShaderStatisticsInfoAMD & operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 RenderPassCreationFeedbackCreateInfoEXT & - setPRenderPassFeedback( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback_ ) VULKAN_HPP_NOEXCEPT + ShaderStatisticsInfoAMD & operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - pRenderPassFeedback = pRenderPassFeedback_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkRenderPassCreationFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkShaderStatisticsInfoAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderPassCreationFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pRenderPassFeedback ); + return std::tie( shaderStageMask, resourceUsage, numPhysicalVgprs, numPhysicalSgprs, numAvailableVgprs, numAvailableSgprs, computeWorkGroupSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderPassCreationFeedbackCreateInfoEXT const & ) const = default; + auto operator<=>( ShaderStatisticsInfoAMD const & ) const = default; #else - bool operator==( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pRenderPassFeedback == rhs.pRenderPassFeedback ); + return ( shaderStageMask == rhs.shaderStageMask ) && ( resourceUsage == rhs.resourceUsage ) && ( numPhysicalVgprs == rhs.numPhysicalVgprs ) && + ( numPhysicalSgprs == rhs.numPhysicalSgprs ) && ( numAvailableVgprs == rhs.numAvailableVgprs ) && ( numAvailableSgprs == rhs.numAvailableSgprs ) && + ( computeWorkGroupSize == rhs.computeWorkGroupSize ); # endif } - bool operator!=( RenderPassCreationFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreationFeedbackCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback = {}; - }; - - template <> - struct CppType - { - using Type = RenderPassCreationFeedbackCreateInfoEXT; + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {}; + VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {}; + uint32_t numPhysicalVgprs = {}; + uint32_t numPhysicalSgprs = {}; + uint32_t numAvailableVgprs = {}; + uint32_t numAvailableSgprs = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D computeWorkGroupSize = {}; }; - struct RenderPassFragmentDensityMapCreateInfoEXT + struct SharedPresentSurfaceCapabilitiesKHR { - using NativeType = VkRenderPassFragmentDensityMapCreateInfoEXT; + using NativeType = VkSharedPresentSurfaceCapabilitiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentDensityMapAttachment( fragmentDensityMapAttachment_ ) - { - } - - VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; - RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderPassFragmentDensityMapCreateInfoEXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , sharedPresentSupportedUsageFlags{ sharedPresentSupportedUsageFlags_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderPassFragmentDensityMapCreateInfoEXT & operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RenderPassFragmentDensityMapCreateInfoEXT & operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SharedPresentSurfaceCapabilitiesKHR( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + SharedPresentSurfaceCapabilitiesKHR & operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & - setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT + SharedPresentSurfaceCapabilitiesKHR & operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - fragmentDensityMapAttachment = fragmentDensityMapAttachment_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkRenderPassFragmentDensityMapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkSharedPresentSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, fragmentDensityMapAttachment ); + return std::tie( sType, pNext, sharedPresentSupportedUsageFlags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const & ) const = default; + auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const & ) const = default; #else - bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags ); # endif } - bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {}; }; template <> - struct CppType + struct CppType { - using Type = RenderPassFragmentDensityMapCreateInfoEXT; + using Type = SharedPresentSurfaceCapabilitiesKHR; }; - struct RenderPassInputAttachmentAspectCreateInfo + struct SparseImageFormatProperties { - using NativeType = VkRenderPassInputAttachmentAspectCreateInfo; + using NativeType = VkSparseImageFormatProperties; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, + VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask{ aspectMask_ } + , imageGranularity{ imageGranularity_ } + , flags{ flags_ } + { + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = {}, - const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , aspectReferenceCount( aspectReferenceCount_ ) - , pAspectReferences( pAspectReferences_ ) + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageFormatProperties( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SparseImageFormatProperties & operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderPassInputAttachmentAspectCreateInfo( *reinterpret_cast( &rhs ) ) + SparseImageFormatProperties & operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassInputAttachmentAspectCreateInfo( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & aspectReferences_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), aspectReferenceCount( static_cast( aspectReferences_.size() ) ), pAspectReferences( aspectReferences_.data() ) + operator VkSparseImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT { + return *reinterpret_cast( this ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderPassInputAttachmentAspectCreateInfo & operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } - RenderPassInputAttachmentAspectCreateInfo & operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); - return *this; + return std::tie( aspectMask, imageGranularity, flags ); } +#endif -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageFormatProperties const & ) const = default; +#else + bool operator==( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( aspectMask == rhs.aspectMask ) && ( imageGranularity == rhs.imageGranularity ) && ( flags == rhs.flags ); +# endif } - VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - aspectReferenceCount = aspectReferenceCount_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & - setPAspectReferences( const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {}; + }; + + struct SparseImageFormatProperties2 + { + using NativeType = VkSparseImageFormatProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , properties{ properties_ } { - pAspectReferences = pAspectReferences_; - return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassInputAttachmentAspectCreateInfo & setAspectReferences( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & aspectReferences_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageFormatProperties2( *reinterpret_cast( &rhs ) ) { - aspectReferenceCount = static_cast( aspectReferences_.size() ); - pAspectReferences = aspectReferences_.data(); + } + + SparseImageFormatProperties2 & operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SparseImageFormatProperties2 & operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkRenderPassInputAttachmentAspectCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkSparseImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, aspectReferenceCount, pAspectReferences ); + return std::tie( sType, pNext, properties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const & ) const = default; + auto operator<=>( SparseImageFormatProperties2 const & ) const = default; #else - bool operator==( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aspectReferenceCount == rhs.aspectReferenceCount ) && - ( pAspectReferences == rhs.pAspectReferences ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); # endif } - bool operator!=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; - const void * pNext = {}; - uint32_t aspectReferenceCount = {}; - const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {}; }; template <> - struct CppType + struct CppType { - using Type = RenderPassInputAttachmentAspectCreateInfo; + using Type = SparseImageFormatProperties2; }; - using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; - struct RenderPassMultiviewCreateInfo - { - using NativeType = VkRenderPassMultiviewCreateInfo; + using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassMultiviewCreateInfo; + struct SparseImageMemoryRequirements + { + using NativeType = VkSparseImageMemoryRequirements; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = {}, - const uint32_t * pViewMasks_ = {}, - uint32_t dependencyCount_ = {}, - const int32_t * pViewOffsets_ = {}, - uint32_t correlationMaskCount_ = {}, - const uint32_t * pCorrelationMasks_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , subpassCount( subpassCount_ ) - , pViewMasks( pViewMasks_ ) - , dependencyCount( dependencyCount_ ) - , pViewOffsets( pViewOffsets_ ) - , correlationMaskCount( correlationMaskCount_ ) - , pCorrelationMasks( pCorrelationMasks_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, + uint32_t imageMipTailFirstLod_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT + : formatProperties{ formatProperties_ } + , imageMipTailFirstLod{ imageMipTailFirstLod_ } + , imageMipTailSize{ imageMipTailSize_ } + , imageMipTailOffset{ imageMipTailOffset_ } + , imageMipTailStride{ imageMipTailStride_ } { } - VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderPassMultiviewCreateInfo( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassMultiviewCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewMasks_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewOffsets_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlationMasks_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , subpassCount( static_cast( viewMasks_.size() ) ) - , pViewMasks( viewMasks_.data() ) - , dependencyCount( static_cast( viewOffsets_.size() ) ) - , pViewOffsets( viewOffsets_.data() ) - , correlationMaskCount( static_cast( correlationMasks_.size() ) ) - , pCorrelationMasks( correlationMasks_.data() ) + SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryRequirements( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderPassMultiviewCreateInfo & operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SparseImageMemoryRequirements & operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + operator VkSparseImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT { - subpassCount = subpassCount_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t * pViewMasks_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - pViewMasks = pViewMasks_; - return *this; + return std::tie( formatProperties, imageMipTailFirstLod, imageMipTailSize, imageMipTailOffset, imageMipTailStride ); } +#endif -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassMultiviewCreateInfo & setViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewMasks_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SparseImageMemoryRequirements const & ) const = default; +#else + bool operator==( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { - subpassCount = static_cast( viewMasks_.size() ); - pViewMasks = viewMasks_.data(); - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( formatProperties == rhs.formatProperties ) && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) && + ( imageMipTailSize == rhs.imageMipTailSize ) && ( imageMipTailOffset == rhs.imageMipTailOffset ) && + ( imageMipTailStride == rhs.imageMipTailStride ); +# endif } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { - dependencyCount = dependencyCount_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t * pViewOffsets_ ) VULKAN_HPP_NOEXCEPT - { - pViewOffsets = pViewOffsets_; - return *this; - } + public: + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {}; + uint32_t imageMipTailFirstLod = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {}; + }; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassMultiviewCreateInfo & setViewOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT - { - dependencyCount = static_cast( viewOffsets_.size() ); - pViewOffsets = viewOffsets_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + struct SparseImageMemoryRequirements2 + { + using NativeType = VkSparseImageMemoryRequirements2; - VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , memoryRequirements{ memoryRequirements_ } { - correlationMaskCount = correlationMaskCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t * pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + : SparseImageMemoryRequirements2( *reinterpret_cast( &rhs ) ) { - pCorrelationMasks = pCorrelationMasks_; - return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassMultiviewCreateInfo & - setCorrelationMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT + SparseImageMemoryRequirements2 & operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT { - correlationMaskCount = static_cast( correlationMasks_.size() ); - pCorrelationMasks = correlationMasks_.data(); + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkRenderPassMultiviewCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkSparseImageMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, subpassCount, pViewMasks, dependencyCount, pViewOffsets, correlationMaskCount, pCorrelationMasks ); + return std::tie( sType, pNext, memoryRequirements ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderPassMultiviewCreateInfo const & ) const = default; + auto operator<=>( SparseImageMemoryRequirements2 const & ) const = default; #else - bool operator==( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subpassCount == rhs.subpassCount ) && ( pViewMasks == rhs.pViewMasks ) && - ( dependencyCount == rhs.dependencyCount ) && ( pViewOffsets == rhs.pViewOffsets ) && ( correlationMaskCount == rhs.correlationMaskCount ) && - ( pCorrelationMasks == rhs.pCorrelationMasks ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements ); # endif } - bool operator!=( RenderPassMultiviewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo; - const void * pNext = {}; - uint32_t subpassCount = {}; - const uint32_t * pViewMasks = {}; - uint32_t dependencyCount = {}; - const int32_t * pViewOffsets = {}; - uint32_t correlationMaskCount = {}; - const uint32_t * pCorrelationMasks = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {}; }; template <> - struct CppType + struct CppType { - using Type = RenderPassMultiviewCreateInfo; + using Type = SparseImageMemoryRequirements2; }; - using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; - struct SubpassSampleLocationsEXT + using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; + +#if defined( VK_USE_PLATFORM_GGP ) + struct StreamDescriptorSurfaceCreateInfoGGP { - using NativeType = VkSubpassSampleLocationsEXT; + using NativeType = VkStreamDescriptorSurfaceCreateInfoGGP; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( uint32_t subpassIndex_ = {}, - VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT - : subpassIndex( subpassIndex_ ) - , sampleLocationsInfo( sampleLocationsInfo_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; + +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, + GgpStreamDescriptor streamDescriptor_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , streamDescriptor{ streamDescriptor_ } { } - VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : SubpassSampleLocationsEXT( *reinterpret_cast( &rhs ) ) + StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + : StreamDescriptorSurfaceCreateInfoGGP( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SubpassSampleLocationsEXT & operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + StreamDescriptorSurfaceCreateInfoGGP & operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + StreamDescriptorSurfaceCreateInfoGGP & operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & setSubpassIndex( uint32_t subpassIndex_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - subpassIndex = subpassIndex_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & - setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & + setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT { - sampleLocationsInfo = sampleLocationsInfo_; + flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSubpassSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + streamDescriptor = streamDescriptor_; + return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT + operator VkStreamDescriptorSurfaceCreateInfoGGP const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( subpassIndex, sampleLocationsInfo ); + return std::tie( sType, pNext, flags, streamDescriptor ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubpassSampleLocationsEXT const & ) const = default; -#else - bool operator==( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( subpassIndex == rhs.subpassIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } # endif + + bool operator==( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ) == 0 ); } - bool operator!=( SubpassSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - uint32_t subpassIndex = {}; - VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {}; + GgpStreamDescriptor streamDescriptor = {}; }; - struct RenderPassSampleLocationsBeginInfoEXT + template <> + struct CppType { - using NativeType = VkRenderPassSampleLocationsBeginInfoEXT; + using Type = StreamDescriptorSurfaceCreateInfoGGP; + }; +#endif /*VK_USE_PLATFORM_GGP*/ + + struct SubmitInfo + { + using NativeType = VkSubmitInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = {}, - const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ = {}, - uint32_t postSubpassSampleLocationsCount_ = {}, - const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ) - , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ) - , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ) - , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubmitInfo( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ = {}, + uint32_t commandBufferCount_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , waitSemaphoreCount{ waitSemaphoreCount_ } + , pWaitSemaphores{ pWaitSemaphores_ } + , pWaitDstStageMask{ pWaitDstStageMask_ } + , commandBufferCount{ commandBufferCount_ } + , pCommandBuffers{ pCommandBuffers_ } + , signalSemaphoreCount{ signalSemaphoreCount_ } + , pSignalSemaphores{ pSignalSemaphores_ } { } - VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderPassSampleLocationsBeginInfoEXT( *reinterpret_cast( &rhs ) ) - { - } + SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubmitInfo( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassSampleLocationsBeginInfoEXT( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentInitialSampleLocations_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & postSubpassSampleLocations_ = {}, - const void * pNext_ = nullptr ) + SubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitDstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBuffers_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , attachmentInitialSampleLocationsCount( static_cast( attachmentInitialSampleLocations_.size() ) ) - , pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() ) - , postSubpassSampleLocationsCount( static_cast( postSubpassSampleLocations_.size() ) ) - , pPostSubpassSampleLocations( postSubpassSampleLocations_.data() ) + , waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) + , pWaitSemaphores( waitSemaphores_.data() ) + , pWaitDstStageMask( waitDstStageMask_.data() ) + , commandBufferCount( static_cast( commandBuffers_.size() ) ) + , pCommandBuffers( commandBuffers_.data() ) + , signalSemaphoreCount( static_cast( signalSemaphores_.size() ) ) + , pSignalSemaphores( signalSemaphores_.data() ) { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( waitSemaphores_.size() == waitDstStageMask_.size() ); +# else + if ( waitSemaphores_.size() != waitDstStageMask_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderPassSampleLocationsBeginInfoEXT & operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - RenderPassSampleLocationsBeginInfoEXT & operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & - setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT { - attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_; + waitSemaphoreCount = waitSemaphoreCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & - setPAttachmentInitialSampleLocations( const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT { - pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_; + pWaitSemaphores = pWaitSemaphores_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & attachmentInitialSampleLocations_ ) + SubmitInfo & + setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = static_cast( waitSemaphores_.size() ); + pWaitSemaphores = waitSemaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + pWaitDstStageMask = pWaitDstStageMask_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & setWaitDstStageMask( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitDstStageMask_ ) VULKAN_HPP_NOEXCEPT { - attachmentInitialSampleLocationsCount = static_cast( attachmentInitialSampleLocations_.size() ); - pAttachmentInitialSampleLocations = attachmentInitialSampleLocations_.data(); + waitSemaphoreCount = static_cast( waitDstStageMask_.size() ); + pWaitDstStageMask = waitDstStageMask_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & - setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT { - postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_; + commandBufferCount = commandBufferCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & - setPPostSubpassSampleLocations( const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT { - pPostSubpassSampleLocations = pPostSubpassSampleLocations_; + pCommandBuffers = pCommandBuffers_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocations( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & postSubpassSampleLocations_ ) - VULKAN_HPP_NOEXCEPT + SubmitInfo & + setCommandBuffers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBuffers_ ) VULKAN_HPP_NOEXCEPT { - postSubpassSampleLocationsCount = static_cast( postSubpassSampleLocations_.size() ); - pPostSubpassSampleLocations = postSubpassSampleLocations_.data(); + commandBufferCount = static_cast( commandBuffers_.size() ); + pCommandBuffers = commandBuffers_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkRenderPassSampleLocationsBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + signalSemaphoreCount = signalSemaphoreCount_; + return *this; } - operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pSignalSemaphores = pSignalSemaphores_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo & + setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = static_cast( signalSemaphores_.size() ); + pSignalSemaphores = signalSemaphores_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -81619,435 +123103,447 @@ namespace VULKAN_HPP_NAMESPACE std::tuple + const VULKAN_HPP_NAMESPACE::CommandBuffer * const &, + uint32_t const &, + const VULKAN_HPP_NAMESPACE::Semaphore * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( - sType, pNext, attachmentInitialSampleLocationsCount, pAttachmentInitialSampleLocations, postSubpassSampleLocationsCount, pPostSubpassSampleLocations ); + sType, pNext, waitSemaphoreCount, pWaitSemaphores, pWaitDstStageMask, commandBufferCount, pCommandBuffers, signalSemaphoreCount, pSignalSemaphores ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const & ) const = default; + auto operator<=>( SubmitInfo const & ) const = default; #else - bool operator==( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) && - ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) && - ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && + ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) && ( commandBufferCount == rhs.commandBufferCount ) && + ( pCommandBuffers == rhs.pCommandBuffers ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && + ( pSignalSemaphores == rhs.pSignalSemaphores ); # endif } - bool operator!=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; - const void * pNext = {}; - uint32_t attachmentInitialSampleLocationsCount = {}; - const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT * pAttachmentInitialSampleLocations = {}; - uint32_t postSubpassSampleLocationsCount = {}; - const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; + const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask = {}; + uint32_t commandBufferCount = {}; + const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers = {}; + uint32_t signalSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {}; }; template <> - struct CppType + struct CppType { - using Type = RenderPassSampleLocationsBeginInfoEXT; + using Type = SubmitInfo; }; - struct RenderPassSubpassFeedbackInfoEXT + struct SubmitInfo2 { - using NativeType = VkRenderPassSubpassFeedbackInfoEXT; + using NativeType = VkSubmitInfo2; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 - RenderPassSubpassFeedbackInfoEXT( VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus_ = VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT::eMerged, - std::array const & description_ = {}, - uint32_t postMergeIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : subpassMergeStatus( subpassMergeStatus_ ) - , description( description_ ) - , postMergeIndex( postMergeIndex_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo2; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_ = {}, + uint32_t waitSemaphoreInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ = {}, + uint32_t commandBufferInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ = {}, + uint32_t signalSemaphoreInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , waitSemaphoreInfoCount{ waitSemaphoreInfoCount_ } + , pWaitSemaphoreInfos{ pWaitSemaphoreInfos_ } + , commandBufferInfoCount{ commandBufferInfoCount_ } + , pCommandBufferInfos{ pCommandBufferInfos_ } + , signalSemaphoreInfoCount{ signalSemaphoreInfoCount_ } + , pSignalSemaphoreInfos{ pSignalSemaphoreInfos_ } { } - VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackInfoEXT( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SubmitInfo2( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RenderPassSubpassFeedbackInfoEXT( VkRenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderPassSubpassFeedbackInfoEXT( *reinterpret_cast( &rhs ) ) + SubmitInfo2( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubmitInfo2( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreInfos_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferInfos_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreInfos_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , waitSemaphoreInfoCount( static_cast( waitSemaphoreInfos_.size() ) ) + , pWaitSemaphoreInfos( waitSemaphoreInfos_.data() ) + , commandBufferInfoCount( static_cast( commandBufferInfos_.size() ) ) + , pCommandBufferInfos( commandBufferInfos_.data() ) + , signalSemaphoreInfoCount( static_cast( signalSemaphoreInfos_.size() ) ) + , pSignalSemaphoreInfos( signalSemaphoreInfos_.data() ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - RenderPassSubpassFeedbackInfoEXT & operator=( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SubmitInfo2 & operator=( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - RenderPassSubpassFeedbackInfoEXT & operator=( VkRenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SubmitInfo2 & operator=( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkRenderPassSubpassFeedbackInfoEXT const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkRenderPassSubpassFeedbackInfoEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setFlags( VULKAN_HPP_NAMESPACE::SubmitFlags flags_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + flags = flags_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std:: - tuple const &, uint32_t const &> -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setWaitSemaphoreInfoCount( uint32_t waitSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( subpassMergeStatus, description, postMergeIndex ); + waitSemaphoreInfoCount = waitSemaphoreInfoCount_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderPassSubpassFeedbackInfoEXT const & ) const = default; -#else - bool operator==( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPWaitSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( subpassMergeStatus == rhs.subpassMergeStatus ) && ( description == rhs.description ) && ( postMergeIndex == rhs.postMergeIndex ); -# endif + pWaitSemaphoreInfos = pWaitSemaphoreInfos_; + return *this; } - bool operator!=( RenderPassSubpassFeedbackInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2 & setWaitSemaphoreInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + waitSemaphoreInfoCount = static_cast( waitSemaphoreInfos_.size() ); + pWaitSemaphoreInfos = waitSemaphoreInfos_.data(); + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus = VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT::eMerged; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D description = {}; - uint32_t postMergeIndex = {}; - }; - - struct RenderPassSubpassFeedbackCreateInfoEXT - { - using NativeType = VkRenderPassSubpassFeedbackCreateInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSubpassFeedbackCreateInfoEXT; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pSubpassFeedback( pSubpassFeedback_ ) + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setCommandBufferInfoCount( uint32_t commandBufferInfoCount_ ) VULKAN_HPP_NOEXCEPT { + commandBufferInfoCount = commandBufferInfoCount_; + return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - RenderPassSubpassFeedbackCreateInfoEXT( VkRenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderPassSubpassFeedbackCreateInfoEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & + setPCommandBufferInfos( const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ ) VULKAN_HPP_NOEXCEPT { + pCommandBufferInfos = pCommandBufferInfos_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderPassSubpassFeedbackCreateInfoEXT & operator=( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2 & setCommandBufferInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferInfos_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferInfoCount = static_cast( commandBufferInfos_.size() ); + pCommandBufferInfos = commandBufferInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - RenderPassSubpassFeedbackCreateInfoEXT & operator=( VkRenderPassSubpassFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setSignalSemaphoreInfoCount( uint32_t signalSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + signalSemaphoreInfoCount = signalSemaphoreInfoCount_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & + setPSignalSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + pSignalSemaphoreInfos = pSignalSemaphoreInfos_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT & - setPSubpassFeedback( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubmitInfo2 & setSignalSemaphoreInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT { - pSubpassFeedback = pSubpassFeedback_; + signalSemaphoreInfoCount = static_cast( signalSemaphoreInfos_.size() ); + pSignalSemaphoreInfos = signalSemaphoreInfos_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkRenderPassSubpassFeedbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkSubmitInfo2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderPassSubpassFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkSubmitInfo2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pSubpassFeedback ); + return std::tie( sType, + pNext, + flags, + waitSemaphoreInfoCount, + pWaitSemaphoreInfos, + commandBufferInfoCount, + pCommandBufferInfos, + signalSemaphoreInfoCount, + pSignalSemaphoreInfos ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderPassSubpassFeedbackCreateInfoEXT const & ) const = default; + auto operator<=>( SubmitInfo2 const & ) const = default; #else - bool operator==( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pSubpassFeedback == rhs.pSubpassFeedback ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount ) && + ( pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos ) && ( commandBufferInfoCount == rhs.commandBufferInfoCount ) && + ( pCommandBufferInfos == rhs.pCommandBufferInfos ) && ( signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount ) && + ( pSignalSemaphoreInfos == rhs.pSignalSemaphoreInfos ); # endif } - bool operator!=( RenderPassSubpassFeedbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSubpassFeedbackCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo2; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubmitFlags flags = {}; + uint32_t waitSemaphoreInfoCount = {}; + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos = {}; + uint32_t commandBufferInfoCount = {}; + const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos = {}; + uint32_t signalSemaphoreInfoCount = {}; + const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos = {}; }; template <> - struct CppType + struct CppType { - using Type = RenderPassSubpassFeedbackCreateInfoEXT; + using Type = SubmitInfo2; }; - struct RenderPassTransformBeginInfoQCOM + using SubmitInfo2KHR = SubmitInfo2; + + struct SubpassBeginInfo { - using NativeType = VkRenderPassTransformBeginInfoQCOM; + using NativeType = VkSubpassBeginInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , transform( transform_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , contents{ contents_ } { } - VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RenderPassTransformBeginInfoQCOM( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderPassTransformBeginInfoQCOM( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassBeginInfo( *reinterpret_cast( &rhs ) ) {} - RenderPassTransformBeginInfoQCOM & operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT { - transform = transform_; + contents = contents_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkRenderPassTransformBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT + operator VkSubpassBeginInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT + operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, transform ); + return std::tie( sType, pNext, contents ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderPassTransformBeginInfoQCOM const & ) const = default; + auto operator<=>( SubpassBeginInfo const & ) const = default; #else - bool operator==( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( contents == rhs.contents ); # endif } - bool operator!=( RenderPassTransformBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline; }; template <> - struct CppType + struct CppType { - using Type = RenderPassTransformBeginInfoQCOM; + using Type = SubpassBeginInfo; }; - struct RenderingAttachmentInfo + using SubpassBeginInfoKHR = SubpassBeginInfo; + + struct SubpassDescriptionDepthStencilResolve { - using NativeType = VkRenderingAttachmentInfo; + using NativeType = VkSubpassDescriptionDepthStencilResolve; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAttachmentInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescriptionDepthStencilResolve; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, - VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, - VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, - VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageView( imageView_ ) - , imageLayout( imageLayout_ ) - , resolveMode( resolveMode_ ) - , resolveImageView( resolveImageView_ ) - , resolveImageLayout( resolveImageLayout_ ) - , loadOp( loadOp_ ) - , storeOp( storeOp_ ) - , clearValue( clearValue_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SubpassDescriptionDepthStencilResolve( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthResolveMode{ depthResolveMode_ } + , stencilResolveMode{ stencilResolveMode_ } + , pDepthStencilResolveAttachment{ pDepthStencilResolveAttachment_ } { } - VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RenderingAttachmentInfo( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderingAttachmentInfo( *reinterpret_cast( &rhs ) ) + SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassDescriptionDepthStencilResolve( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderingAttachmentInfo & operator=( RenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SubpassDescriptionDepthStencilResolve & operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - RenderingAttachmentInfo & operator=( VkRenderingAttachmentInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassDescriptionDepthStencilResolve & operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT - { - imageView = imageView_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT - { - imageLayout = imageLayout_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode_ ) VULKAN_HPP_NOEXCEPT - { - resolveMode = resolveMode_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageView( VULKAN_HPP_NAMESPACE::ImageView resolveImageView_ ) VULKAN_HPP_NOEXCEPT - { - resolveImageView = resolveImageView_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setResolveImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout_ ) VULKAN_HPP_NOEXCEPT - { - resolveImageLayout = resolveImageLayout_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & + setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT { - loadOp = loadOp_; + depthResolveMode = depthResolveMode_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & + setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT { - storeOp = storeOp_; + stencilResolveMode = stencilResolveMode_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentInfo & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & + setPDepthStencilResolveAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT { - clearValue = clearValue_; + pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkRenderingAttachmentInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkSubpassDescriptionDepthStencilResolve const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderingAttachmentInfo &() VULKAN_HPP_NOEXCEPT + operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -82056,1857 +123552,1747 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits const &, + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, imageView, imageLayout, resolveMode, resolveImageView, resolveImageLayout, loadOp, storeOp, clearValue ); + return std::tie( sType, pNext, depthResolveMode, stencilResolveMode, pDepthStencilResolveAttachment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassDescriptionDepthStencilResolve const & ) const = default; +#else + bool operator==( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthResolveMode == rhs.depthResolveMode ) && + ( stencilResolveMode == rhs.stencilResolveMode ) && ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment ); +# endif + } + + bool operator!=( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAttachmentInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageView imageView = {}; - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::ResolveModeFlagBits resolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; - VULKAN_HPP_NAMESPACE::ImageView resolveImageView = {}; - VULKAN_HPP_NAMESPACE::ImageLayout resolveImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; - VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; - VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; + VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; + const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment = {}; }; template <> - struct CppType + struct CppType { - using Type = RenderingAttachmentInfo; + using Type = SubpassDescriptionDepthStencilResolve; }; - using RenderingAttachmentInfoKHR = RenderingAttachmentInfo; - struct RenderingFragmentDensityMapAttachmentInfoEXT + using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve; + + struct SubpassEndInfo { - using NativeType = VkRenderingFragmentDensityMapAttachmentInfoEXT; + using NativeType = VkSubpassEndInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - RenderingFragmentDensityMapAttachmentInfoEXT( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageView( imageView_ ) - , imageLayout( imageLayout_ ) - { - } +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassEndInfo( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} - VULKAN_HPP_CONSTEXPR RenderingFragmentDensityMapAttachmentInfoEXT( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RenderingFragmentDensityMapAttachmentInfoEXT( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderingFragmentDensityMapAttachmentInfoEXT( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassEndInfo( *reinterpret_cast( &rhs ) ) {} - RenderingFragmentDensityMapAttachmentInfoEXT & operator=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - RenderingFragmentDensityMapAttachmentInfoEXT & operator=( VkRenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT - { - imageView = imageView_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RenderingFragmentDensityMapAttachmentInfoEXT & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT - { - imageLayout = imageLayout_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkRenderingFragmentDensityMapAttachmentInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkSubpassEndInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderingFragmentDensityMapAttachmentInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, imageView, imageLayout ); + return std::tie( sType, pNext ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderingFragmentDensityMapAttachmentInfoEXT const & ) const = default; + auto operator<=>( SubpassEndInfo const & ) const = default; #else - bool operator==( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); # endif } - bool operator!=( RenderingFragmentDensityMapAttachmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageView imageView = {}; - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo; + const void * pNext = {}; }; template <> - struct CppType + struct CppType { - using Type = RenderingFragmentDensityMapAttachmentInfoEXT; + using Type = SubpassEndInfo; }; - struct RenderingFragmentShadingRateAttachmentInfoKHR + using SubpassEndInfoKHR = SubpassEndInfo; + + struct SubpassFragmentDensityMapOffsetEndInfoQCOM { - using NativeType = VkRenderingFragmentShadingRateAttachmentInfoKHR; + using NativeType = VkSubpassFragmentDensityMapOffsetEndInfoQCOM; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - RenderingFragmentShadingRateAttachmentInfoKHR( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageView( imageView_ ) - , imageLayout( imageLayout_ ) - , shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM( uint32_t fragmentDensityOffsetCount_ = {}, + const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fragmentDensityOffsetCount{ fragmentDensityOffsetCount_ } + , pFragmentDensityOffsets{ pFragmentDensityOffsets_ } { } - VULKAN_HPP_CONSTEXPR - RenderingFragmentShadingRateAttachmentInfoKHR( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RenderingFragmentShadingRateAttachmentInfoKHR( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderingFragmentShadingRateAttachmentInfoKHR( *reinterpret_cast( &rhs ) ) + SubpassFragmentDensityMapOffsetEndInfoQCOM( VkSubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassFragmentDensityMapOffsetEndInfoQCOM( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderingFragmentShadingRateAttachmentInfoKHR & operator=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassFragmentDensityMapOffsetEndInfoQCOM( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fragmentDensityOffsets_, const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , fragmentDensityOffsetCount( static_cast( fragmentDensityOffsets_.size() ) ) + , pFragmentDensityOffsets( fragmentDensityOffsets_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - RenderingFragmentShadingRateAttachmentInfoKHR & operator=( VkRenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassFragmentDensityMapOffsetEndInfoQCOM & operator=( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubpassFragmentDensityMapOffsetEndInfoQCOM & operator=( VkSubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & + setFragmentDensityOffsetCount( uint32_t fragmentDensityOffsetCount_ ) VULKAN_HPP_NOEXCEPT { - imageView = imageView_; + fragmentDensityOffsetCount = fragmentDensityOffsetCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & + setPFragmentDensityOffsets( const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT { - imageLayout = imageLayout_; + pFragmentDensityOffsets = pFragmentDensityOffsets_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderingFragmentShadingRateAttachmentInfoKHR & - setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SubpassFragmentDensityMapOffsetEndInfoQCOM & setFragmentDensityOffsets( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT { - shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_; + fragmentDensityOffsetCount = static_cast( fragmentDensityOffsets_.size() ); + pFragmentDensityOffsets = fragmentDensityOffsets_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkRenderingFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderingFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, imageView, imageLayout, shadingRateAttachmentTexelSize ); + return std::tie( sType, pNext, fragmentDensityOffsetCount, pFragmentDensityOffsets ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderingFragmentShadingRateAttachmentInfoKHR const & ) const = default; + auto operator<=>( SubpassFragmentDensityMapOffsetEndInfoQCOM const & ) const = default; #else - bool operator==( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ) && - ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityOffsetCount == rhs.fragmentDensityOffsetCount ) && + ( pFragmentDensityOffsets == rhs.pFragmentDensityOffsets ); # endif } - bool operator!=( RenderingFragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageView imageView = {}; - VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM; + const void * pNext = {}; + uint32_t fragmentDensityOffsetCount = {}; + const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets = {}; }; template <> - struct CppType + struct CppType { - using Type = RenderingFragmentShadingRateAttachmentInfoKHR; + using Type = SubpassFragmentDensityMapOffsetEndInfoQCOM; }; - struct RenderingInfo + struct SubpassResolvePerformanceQueryEXT { - using NativeType = VkRenderingInfo; + using NativeType = VkSubpassResolvePerformanceQueryEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingInfo; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 RenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, - uint32_t layerCount_ = {}, - uint32_t viewMask_ = {}, - uint32_t colorAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ = {}, - const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {}, - const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , renderArea( renderArea_ ) - , layerCount( layerCount_ ) - , viewMask( viewMask_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachments( pColorAttachments_ ) - , pDepthAttachment( pDepthAttachment_ ) - , pStencilAttachment( pStencilAttachment_ ) - { - } - - VULKAN_HPP_CONSTEXPR_14 RenderingInfo( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - RenderingInfo( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderingInfo( *reinterpret_cast( &rhs ) ) {} - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderingInfo( VULKAN_HPP_NAMESPACE::RenderingFlags flags_, - VULKAN_HPP_NAMESPACE::Rect2D renderArea_, - uint32_t layerCount_, - uint32_t viewMask_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_, - const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {}, - const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , renderArea( renderArea_ ) - , layerCount( layerCount_ ) - , viewMask( viewMask_ ) - , colorAttachmentCount( static_cast( colorAttachments_.size() ) ) - , pColorAttachments( colorAttachments_.data() ) - , pDepthAttachment( pDepthAttachment_ ) - , pStencilAttachment( pStencilAttachment_ ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - RenderingInfo & operator=( RenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - RenderingInfo & operator=( VkRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderingFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT - { - renderArea = renderArea_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT - { - layerCount = layerCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT - { - viewMask = viewMask_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT - { - colorAttachmentCount = colorAttachmentCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPColorAttachments( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments_ ) VULKAN_HPP_NOEXCEPT - { - pColorAttachments = pColorAttachments_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderingInfo & setColorAttachments( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassResolvePerformanceQueryEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassResolvePerformanceQueryEXT( VULKAN_HPP_NAMESPACE::Bool32 optimal_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , optimal{ optimal_ } { - colorAttachmentCount = static_cast( colorAttachments_.size() ); - pColorAttachments = colorAttachments_.data(); - return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 RenderingInfo & setPDepthAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SubpassResolvePerformanceQueryEXT( SubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubpassResolvePerformanceQueryEXT( VkSubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassResolvePerformanceQueryEXT( *reinterpret_cast( &rhs ) ) { - pDepthAttachment = pDepthAttachment_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderingInfo & - setPStencilAttachment( const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + SubpassResolvePerformanceQueryEXT & operator=( SubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubpassResolvePerformanceQueryEXT & operator=( VkSubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - pStencilAttachment = pStencilAttachment_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkRenderingInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkSubpassResolvePerformanceQueryEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderingInfo &() VULKAN_HPP_NOEXCEPT + operator VkSubpassResolvePerformanceQueryEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, renderArea, layerCount, viewMask, colorAttachmentCount, pColorAttachments, pDepthAttachment, pStencilAttachment ); + return std::tie( sType, pNext, optimal ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderingInfo const & ) const = default; + auto operator<=>( SubpassResolvePerformanceQueryEXT const & ) const = default; #else - bool operator==( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubpassResolvePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( renderArea == rhs.renderArea ) && - ( layerCount == rhs.layerCount ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && - ( pColorAttachments == rhs.pColorAttachments ) && ( pDepthAttachment == rhs.pDepthAttachment ) && ( pStencilAttachment == rhs.pStencilAttachment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( optimal == rhs.optimal ); # endif } - bool operator!=( RenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassResolvePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::RenderingFlags flags = {}; - VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; - uint32_t layerCount = {}; - uint32_t viewMask = {}; - uint32_t colorAttachmentCount = {}; - const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pColorAttachments = {}; - const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment = {}; - const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassResolvePerformanceQueryEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 optimal = {}; }; template <> - struct CppType + struct CppType { - using Type = RenderingInfo; + using Type = SubpassResolvePerformanceQueryEXT; }; - using RenderingInfoKHR = RenderingInfo; - struct ResolveImageInfo2 + struct SubpassShadingPipelineCreateInfoHUAWEI { - using NativeType = VkResolveImageInfo2; + using NativeType = VkSubpassShadingPipelineCreateInfoHUAWEI; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ResolveImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - uint32_t regionCount_ = {}, - const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcImage( srcImage_ ) - , srcImageLayout( srcImageLayout_ ) - , dstImage( dstImage_ ) - , dstImageLayout( dstImageLayout_ ) - , regionCount( regionCount_ ) - , pRegions( pRegions_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , renderPass{ renderPass_ } + , subpass{ subpass_ } { } - VULKAN_HPP_CONSTEXPR ResolveImageInfo2( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ResolveImageInfo2( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : ResolveImageInfo2( *reinterpret_cast( &rhs ) ) {} + VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ResolveImageInfo2( VULKAN_HPP_NAMESPACE::Image srcImage_, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, - VULKAN_HPP_NAMESPACE::Image dstImage_, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , srcImage( srcImage_ ) - , srcImageLayout( srcImageLayout_ ) - , dstImage( dstImage_ ) - , dstImageLayout( dstImageLayout_ ) - , regionCount( static_cast( regions_.size() ) ) - , pRegions( regions_.data() ) + SubpassShadingPipelineCreateInfoHUAWEI( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : SubpassShadingPipelineCreateInfoHUAWEI( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ResolveImageInfo2 & operator=( ResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SubpassShadingPipelineCreateInfoHUAWEI & operator=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ResolveImageInfo2 & operator=( VkResolveImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + SubpassShadingPipelineCreateInfoHUAWEI & operator=( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SubpassShadingPipelineCreateInfoHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassShadingPipelineCreateInfoHUAWEI & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT { - srcImage = srcImage_; + renderPass = renderPass_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SubpassShadingPipelineCreateInfoHUAWEI & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT { - srcImageLayout = srcImageLayout_; + subpass = subpass_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + operator VkSubpassShadingPipelineCreateInfoHUAWEI const &() const VULKAN_HPP_NOEXCEPT { - dstImage = dstImage_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + operator VkSubpassShadingPipelineCreateInfoHUAWEI &() VULKAN_HPP_NOEXCEPT { - dstImageLayout = dstImageLayout_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - regionCount = regionCount_; - return *this; + return std::tie( sType, pNext, renderPass, subpass ); } +#endif - VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2 & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubpassShadingPipelineCreateInfoHUAWEI const & ) const = default; +#else + bool operator==( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { - pRegions = pRegions_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ); +# endif } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ResolveImageInfo2 & - setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { - regionCount = static_cast( regions_.size() ); - pRegions = regions_.data(); + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + }; + + template <> + struct CppType + { + using Type = SubpassShadingPipelineCreateInfoHUAWEI; + }; + + struct SubresourceHostMemcpySize + { + using NativeType = VkSubresourceHostMemcpySize; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceHostMemcpySize; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySize( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySize( SubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubresourceHostMemcpySize( VkSubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT + : SubresourceHostMemcpySize( *reinterpret_cast( &rhs ) ) + { + } + + SubresourceHostMemcpySize & operator=( SubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SubresourceHostMemcpySize & operator=( VkSubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkResolveImageInfo2 const &() const VULKAN_HPP_NOEXCEPT + operator VkSubresourceHostMemcpySize const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkResolveImageInfo2 &() VULKAN_HPP_NOEXCEPT + operator VkSubresourceHostMemcpySize &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + return std::tie( sType, pNext, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ResolveImageInfo2 const & ) const = default; + auto operator<=>( SubresourceHostMemcpySize const & ) const = default; #else - bool operator==( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubresourceHostMemcpySize const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && - ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( size == rhs.size ); # endif } - bool operator!=( ResolveImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubresourceHostMemcpySize const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Image srcImage = {}; - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::Image dstImage = {}; - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - uint32_t regionCount = {}; - const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceHostMemcpySize; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; template <> - struct CppType + struct CppType { - using Type = ResolveImageInfo2; + using Type = SubresourceHostMemcpySize; }; - using ResolveImageInfo2KHR = ResolveImageInfo2; - struct SamplerBorderColorComponentMappingCreateInfoEXT + using SubresourceHostMemcpySizeEXT = SubresourceHostMemcpySize; + + struct SubresourceLayout2 { - using NativeType = VkSamplerBorderColorComponentMappingCreateInfoEXT; + using NativeType = VkSubresourceLayout2; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SamplerBorderColorComponentMappingCreateInfoEXT( VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 srgb_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , components( components_ ) - , srgb( srgb_ ) - { - } + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceLayout2; - VULKAN_HPP_CONSTEXPR - SamplerBorderColorComponentMappingCreateInfoEXT( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - SamplerBorderColorComponentMappingCreateInfoEXT( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : SamplerBorderColorComponentMappingCreateInfoEXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubresourceLayout2( VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , subresourceLayout{ subresourceLayout_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - SamplerBorderColorComponentMappingCreateInfoEXT & operator=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SamplerBorderColorComponentMappingCreateInfoEXT & operator=( VkSamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR SubresourceLayout2( SubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + SubresourceLayout2( VkSubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubresourceLayout2( *reinterpret_cast( &rhs ) ) {} - VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & - setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT - { - components = components_; - return *this; - } + SubresourceLayout2 & operator=( SubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 SamplerBorderColorComponentMappingCreateInfoEXT & setSrgb( VULKAN_HPP_NAMESPACE::Bool32 srgb_ ) VULKAN_HPP_NOEXCEPT + SubresourceLayout2 & operator=( VkSubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT { - srgb = srgb_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSamplerBorderColorComponentMappingCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkSubresourceLayout2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSamplerBorderColorComponentMappingCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkSubresourceLayout2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, components, srgb ); + return std::tie( sType, pNext, subresourceLayout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SamplerBorderColorComponentMappingCreateInfoEXT const & ) const = default; + auto operator<=>( SubresourceLayout2 const & ) const = default; #else - bool operator==( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubresourceLayout2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( components == rhs.components ) && ( srgb == rhs.srgb ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subresourceLayout == rhs.subresourceLayout ); # endif } - bool operator!=( SamplerBorderColorComponentMappingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubresourceLayout2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; - VULKAN_HPP_NAMESPACE::Bool32 srgb = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceLayout2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout = {}; }; template <> - struct CppType + struct CppType { - using Type = SamplerBorderColorComponentMappingCreateInfoEXT; + using Type = SubresourceLayout2; }; - struct SamplerCreateInfo + using SubresourceLayout2EXT = SubresourceLayout2; + using SubresourceLayout2KHR = SubresourceLayout2; + + struct SurfaceCapabilities2EXT { - using NativeType = VkSamplerCreateInfo; + using NativeType = VkSurfaceCapabilities2EXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SamplerCreateInfo( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, - VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, - VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, - float mipLodBias_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, - float maxAnisotropy_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, - VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, - float minLod_ = {}, - float maxLod_ = {}, - VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, - VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , magFilter( magFilter_ ) - , minFilter( minFilter_ ) - , mipmapMode( mipmapMode_ ) - , addressModeU( addressModeU_ ) - , addressModeV( addressModeV_ ) - , addressModeW( addressModeW_ ) - , mipLodBias( mipLodBias_ ) - , anisotropyEnable( anisotropyEnable_ ) - , maxAnisotropy( maxAnisotropy_ ) - , compareEnable( compareEnable_ ) - , compareOp( compareOp_ ) - , minLod( minLod_ ) - , maxLod( maxLod_ ) - , borderColor( borderColor_ ) - , unnormalizedCoordinates( unnormalizedCoordinates_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( + uint32_t minImageCount_ = {}, + uint32_t maxImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, + uint32_t maxImageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minImageCount{ minImageCount_ } + , maxImageCount{ maxImageCount_ } + , currentExtent{ currentExtent_ } + , minImageExtent{ minImageExtent_ } + , maxImageExtent{ maxImageExtent_ } + , maxImageArrayLayers{ maxImageArrayLayers_ } + , supportedTransforms{ supportedTransforms_ } + , currentTransform{ currentTransform_ } + , supportedCompositeAlpha{ supportedCompositeAlpha_ } + , supportedUsageFlags{ supportedUsageFlags_ } + , supportedSurfaceCounters{ supportedSurfaceCounters_ } { } - VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SamplerCreateInfo( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilities2EXT( *reinterpret_cast( &rhs ) ) + { + } - SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SurfaceCapabilities2EXT & operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + operator VkSurfaceCapabilities2EXT const &() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT { - flags = flags_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - magFilter = magFilter_; - return *this; + return std::tie( sType, + pNext, + minImageCount, + maxImageCount, + currentExtent, + minImageExtent, + maxImageExtent, + maxImageArrayLayers, + supportedTransforms, + currentTransform, + supportedCompositeAlpha, + supportedUsageFlags, + supportedSurfaceCounters ); } +#endif - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilities2EXT const & ) const = default; +#else + bool operator==( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - minFilter = minFilter_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) && + ( currentExtent == rhs.currentExtent ) && ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) && + ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( supportedTransforms == rhs.supportedTransforms ) && + ( currentTransform == rhs.currentTransform ) && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && + ( supportedUsageFlags == rhs.supportedUsageFlags ) && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters ); +# endif } - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - mipmapMode = mipmapMode_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT; + void * pNext = {}; + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {}; + }; + + template <> + struct CppType + { + using Type = SurfaceCapabilities2EXT; + }; + + struct SurfaceCapabilitiesKHR + { + using NativeType = VkSurfaceCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( + uint32_t minImageCount_ = {}, + uint32_t maxImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, + uint32_t maxImageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : minImageCount{ minImageCount_ } + , maxImageCount{ maxImageCount_ } + , currentExtent{ currentExtent_ } + , minImageExtent{ minImageExtent_ } + , maxImageExtent{ maxImageExtent_ } + , maxImageArrayLayers{ maxImageArrayLayers_ } + , supportedTransforms{ supportedTransforms_ } + , currentTransform{ currentTransform_ } + , supportedCompositeAlpha{ supportedCompositeAlpha_ } + , supportedUsageFlags{ supportedUsageFlags_ } { - addressModeU = addressModeU_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilitiesKHR( *reinterpret_cast( &rhs ) ) { - addressModeV = addressModeV_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT + SurfaceCapabilitiesKHR & operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - addressModeW = addressModeW_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT + operator VkSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - mipLodBias = mipLodBias_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT + operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - anisotropyEnable = anisotropyEnable_; - return *this; + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( minImageCount, + maxImageCount, + currentExtent, + minImageExtent, + maxImageExtent, + maxImageArrayLayers, + supportedTransforms, + currentTransform, + supportedCompositeAlpha, + supportedUsageFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilitiesKHR const & ) const = default; +#else + bool operator==( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) && ( currentExtent == rhs.currentExtent ) && + ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && + ( supportedTransforms == rhs.supportedTransforms ) && ( currentTransform == rhs.currentTransform ) && + ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && ( supportedUsageFlags == rhs.supportedUsageFlags ); +# endif } - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - maxAnisotropy = maxAnisotropy_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT - { - compareEnable = compareEnable_; - return *this; - } + public: + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + }; - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT - { - compareOp = compareOp_; - return *this; - } + struct SurfaceCapabilities2KHR + { + using NativeType = VkSurfaceCapabilities2KHR; - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT - { - minLod = minLod_; - return *this; - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR; - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , surfaceCapabilities{ surfaceCapabilities_ } { - maxLod = maxLod_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilities2KHR( *reinterpret_cast( &rhs ) ) { - borderColor = borderColor_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT + SurfaceCapabilities2KHR & operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - unnormalizedCoordinates = unnormalizedCoordinates_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - flags, - magFilter, - minFilter, - mipmapMode, - addressModeU, - addressModeV, - addressModeW, - mipLodBias, - anisotropyEnable, - maxAnisotropy, - compareEnable, - compareOp, - minLod, - maxLod, - borderColor, - unnormalizedCoordinates ); + return std::tie( sType, pNext, surfaceCapabilities ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SamplerCreateInfo const & ) const = default; + auto operator<=>( SurfaceCapabilities2KHR const & ) const = default; #else - bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( magFilter == rhs.magFilter ) && ( minFilter == rhs.minFilter ) && - ( mipmapMode == rhs.mipmapMode ) && ( addressModeU == rhs.addressModeU ) && ( addressModeV == rhs.addressModeV ) && - ( addressModeW == rhs.addressModeW ) && ( mipLodBias == rhs.mipLodBias ) && ( anisotropyEnable == rhs.anisotropyEnable ) && - ( maxAnisotropy == rhs.maxAnisotropy ) && ( compareEnable == rhs.compareEnable ) && ( compareOp == rhs.compareOp ) && ( minLod == rhs.minLod ) && - ( maxLod == rhs.maxLod ) && ( borderColor == rhs.borderColor ) && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCapabilities == rhs.surfaceCapabilities ); # endif } - bool operator!=( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {}; - VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; - VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; - VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest; - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; - VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; - float mipLodBias = {}; - VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {}; - float maxAnisotropy = {}; - VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {}; - VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; - float minLod = {}; - float maxLod = {}; - VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack; - VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {}; }; template <> - struct CppType + struct CppType { - using Type = SamplerCreateInfo; + using Type = SurfaceCapabilities2KHR; }; - struct SamplerCustomBorderColorCreateInfoEXT +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct SurfaceCapabilitiesFullScreenExclusiveEXT { - using NativeType = VkSamplerCustomBorderColorCreateInfoEXT; + using NativeType = VkSurfaceCapabilitiesFullScreenExclusiveEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCustomBorderColorCreateInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {}, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , customBorderColor( customBorderColor_ ) - , format( format_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fullScreenExclusiveSupported{ fullScreenExclusiveSupported_ } { } - VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : SamplerCustomBorderColorCreateInfoEXT( *reinterpret_cast( &rhs ) ) + SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilitiesFullScreenExclusiveEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SamplerCustomBorderColorCreateInfoEXT & operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SamplerCustomBorderColorCreateInfoEXT & operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const &() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & - setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT + operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT { - customBorderColor = customBorderColor_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - format = format_; - return *this; + return std::tie( sType, pNext, fullScreenExclusiveSupported ); } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif - operator VkSamplerCustomBorderColorCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const & ) const = default; +# else + bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported ); +# endif } - operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return !operator==( rhs ); } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple # endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, customBorderColor, format ); - } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCustomBorderColorCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {}; }; template <> - struct CppType + struct CppType { - using Type = SamplerCustomBorderColorCreateInfoEXT; + using Type = SurfaceCapabilitiesFullScreenExclusiveEXT; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct SamplerReductionModeCreateInfo + struct SurfaceCapabilitiesPresentBarrierNV { - using NativeType = VkSamplerReductionModeCreateInfo; + using NativeType = VkSurfaceCapabilitiesPresentBarrierNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - SamplerReductionModeCreateInfo( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , reductionMode( reductionMode_ ) - { - } - - VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesPresentBarrierNV; - SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : SamplerReductionModeCreateInfo( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentBarrierNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentBarrierSupported{ presentBarrierSupported_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SamplerReductionModeCreateInfo & operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentBarrierNV( SurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceCapabilitiesPresentBarrierNV( VkSurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceCapabilitiesPresentBarrierNV( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + SurfaceCapabilitiesPresentBarrierNV & operator=( SurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT + SurfaceCapabilitiesPresentBarrierNV & operator=( VkSurfaceCapabilitiesPresentBarrierNV const & rhs ) VULKAN_HPP_NOEXCEPT { - reductionMode = reductionMode_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSamplerReductionModeCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceCapabilitiesPresentBarrierNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkSurfaceCapabilitiesPresentBarrierNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, reductionMode ); + return std::tie( sType, pNext, presentBarrierSupported ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SamplerReductionModeCreateInfo const & ) const = default; + auto operator<=>( SurfaceCapabilitiesPresentBarrierNV const & ) const = default; #else - bool operator==( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SurfaceCapabilitiesPresentBarrierNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reductionMode == rhs.reductionMode ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrierSupported == rhs.presentBarrierSupported ); # endif } - bool operator!=( SamplerReductionModeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceCapabilitiesPresentBarrierNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesPresentBarrierNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported = {}; }; template <> - struct CppType + struct CppType { - using Type = SamplerReductionModeCreateInfo; + using Type = SurfaceCapabilitiesPresentBarrierNV; }; - using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo; - struct SamplerYcbcrConversionCreateInfo + struct SurfaceFormatKHR { - using NativeType = VkSamplerYcbcrConversionCreateInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo; + using NativeType = VkSurfaceFormatKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, - VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, - VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, - VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, - VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, - VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, - VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , format( format_ ) - , ycbcrModel( ycbcrModel_ ) - , ycbcrRange( ycbcrRange_ ) - , components( components_ ) - , xChromaOffset( xChromaOffset_ ) - , yChromaOffset( yChromaOffset_ ) - , chromaFilter( chromaFilter_ ) - , forceExplicitReconstruction( forceExplicitReconstruction_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT + : format{ format_ } + , colorSpace{ colorSpace_ } { } - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : SamplerYcbcrConversionCreateInfo( *reinterpret_cast( &rhs ) ) - { - } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT : SurfaceFormatKHR( *reinterpret_cast( &rhs ) ) {} - SamplerYcbcrConversionCreateInfo & operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + operator VkSurfaceFormatKHR const &() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT { - format = format_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & - setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - ycbcrModel = ycbcrModel_; - return *this; + return std::tie( format, colorSpace ); } +#endif - VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFormatKHR const & ) const = default; +#else + bool operator==( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - ycbcrRange = ycbcrRange_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( format == rhs.format ) && ( colorSpace == rhs.colorSpace ); +# endif } - VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - components = components_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT - { - xChromaOffset = xChromaOffset_; - return *this; - } + public: + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + }; - VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT - { - yChromaOffset = yChromaOffset_; - return *this; - } + struct SurfaceFormat2KHR + { + using NativeType = VkSurfaceFormat2KHR; - VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , surfaceFormat{ surfaceFormat_ } { - chromaFilter = chromaFilter_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & - setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT : SurfaceFormat2KHR( *reinterpret_cast( &rhs ) ) {} + + SurfaceFormat2KHR & operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - forceExplicitReconstruction = forceExplicitReconstruction_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSamplerYcbcrConversionCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceFormat2KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, format, ycbcrModel, ycbcrRange, components, xChromaOffset, yChromaOffset, chromaFilter, forceExplicitReconstruction ); + return std::tie( sType, pNext, surfaceFormat ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SamplerYcbcrConversionCreateInfo const & ) const = default; + auto operator<=>( SurfaceFormat2KHR const & ) const = default; #else - bool operator==( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( ycbcrModel == rhs.ycbcrModel ) && - ( ycbcrRange == rhs.ycbcrRange ) && ( components == rhs.components ) && ( xChromaOffset == rhs.xChromaOffset ) && - ( yChromaOffset == rhs.yChromaOffset ) && ( chromaFilter == rhs.chromaFilter ) && - ( forceExplicitReconstruction == rhs.forceExplicitReconstruction ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceFormat == rhs.surfaceFormat ); # endif } - bool operator!=( SamplerYcbcrConversionCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; - VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; - VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; - VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; - VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; - VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; - VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {}; }; template <> - struct CppType + struct CppType { - using Type = SamplerYcbcrConversionCreateInfo; + using Type = SurfaceFormat2KHR; }; - using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; - struct SamplerYcbcrConversionImageFormatProperties +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct SurfaceFullScreenExclusiveInfoEXT { - using NativeType = VkSamplerYcbcrConversionImageFormatProperties; + using NativeType = VkSurfaceFullScreenExclusiveInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionImageFormatProperties; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( uint32_t combinedImageSamplerDescriptorCount_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , fullScreenExclusive{ fullScreenExclusive_ } { } - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : SamplerYcbcrConversionImageFormatProperties( *reinterpret_cast( &rhs ) ) + SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceFullScreenExclusiveInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SamplerYcbcrConversionImageFormatProperties & operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SurfaceFullScreenExclusiveInfoEXT & operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SamplerYcbcrConversionImageFormatProperties & operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSamplerYcbcrConversionImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & + setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + fullScreenExclusive = fullScreenExclusive_; + return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkSurfaceFullScreenExclusiveInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, combinedImageSamplerDescriptorCount ); + return std::tie( sType, pNext, fullScreenExclusive ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SamplerYcbcrConversionImageFormatProperties const & ) const = default; -#else - bool operator==( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const & ) const = default; +# else + bool operator==( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fullScreenExclusive == rhs.fullScreenExclusive ); +# endif } - bool operator!=( SamplerYcbcrConversionImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties; - void * pNext = {}; - uint32_t combinedImageSamplerDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault; }; template <> - struct CppType + struct CppType { - using Type = SamplerYcbcrConversionImageFormatProperties; + using Type = SurfaceFullScreenExclusiveInfoEXT; }; - using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct SamplerYcbcrConversionInfo +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + struct SurfaceFullScreenExclusiveWin32InfoEXT { - using NativeType = VkSamplerYcbcrConversionInfo; + using NativeType = VkSurfaceFullScreenExclusiveWin32InfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , conversion( conversion_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hmonitor{ hmonitor_ } { } - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : SamplerYcbcrConversionInfo( *reinterpret_cast( &rhs ) ) + SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceFullScreenExclusiveWin32InfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SurfaceFullScreenExclusiveWin32InfoEXT & operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SurfaceFullScreenExclusiveWin32InfoEXT & operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setHmonitor( HMONITOR hmonitor_ ) VULKAN_HPP_NOEXCEPT { - conversion = conversion_; + hmonitor = hmonitor_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSamplerYcbcrConversionInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceFullScreenExclusiveWin32InfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT + operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, conversion ); + return std::tie( sType, pNext, hmonitor ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SamplerYcbcrConversionInfo const & ) const = default; -#else - bool operator==( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const & ) const = default; +# else + bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conversion == rhs.conversion ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hmonitor == rhs.hmonitor ); +# endif } - bool operator!=( SamplerYcbcrConversionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; + const void * pNext = {}; + HMONITOR hmonitor = {}; }; template <> - struct CppType + struct CppType { - using Type = SamplerYcbcrConversionInfo; + using Type = SurfaceFullScreenExclusiveWin32InfoEXT; }; - using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#if defined( VK_USE_PLATFORM_SCREEN_QNX ) - struct ScreenSurfaceCreateInfoQNX + struct SurfacePresentModeCompatibilityEXT { - using NativeType = VkScreenSurfaceCreateInfoQNX; + using NativeType = VkSurfacePresentModeCompatibilityEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eScreenSurfaceCreateInfoQNX; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfacePresentModeCompatibilityEXT; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ = {}, - struct _screen_context * context_ = {}, - struct _screen_window * window_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , context( context_ ) - , window( window_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfacePresentModeCompatibilityEXT( uint32_t presentModeCount_ = {}, + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentModeCount{ presentModeCount_ } + , pPresentModes{ pPresentModes_ } { } - VULKAN_HPP_CONSTEXPR ScreenSurfaceCreateInfoQNX( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SurfacePresentModeCompatibilityEXT( SurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ScreenSurfaceCreateInfoQNX( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT - : ScreenSurfaceCreateInfoQNX( *reinterpret_cast( &rhs ) ) + SurfacePresentModeCompatibilityEXT( VkSurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfacePresentModeCompatibilityEXT( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ScreenSurfaceCreateInfoQNX & operator=( ScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SurfacePresentModeCompatibilityEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_, + void * pNext_ = nullptr ) + : pNext( pNext_ ), presentModeCount( static_cast( presentModes_.size() ) ), pPresentModes( presentModes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - ScreenSurfaceCreateInfoQNX & operator=( VkScreenSurfaceCreateInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT + SurfacePresentModeCompatibilityEXT & operator=( SurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SurfacePresentModeCompatibilityEXT & operator=( VkSurfacePresentModeCompatibilityEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setFlags( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityEXT & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + presentModeCount = presentModeCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setContext( struct _screen_context * context_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeCompatibilityEXT & setPPresentModes( VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT { - context = context_; + pPresentModes = pPresentModes_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ScreenSurfaceCreateInfoQNX & setWindow( struct _screen_window * window_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SurfacePresentModeCompatibilityEXT & + setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_ ) VULKAN_HPP_NOEXCEPT { - window = window_; + presentModeCount = static_cast( presentModes_.size() ); + pPresentModes = presentModes_.data(); return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkScreenSurfaceCreateInfoQNX const &() const VULKAN_HPP_NOEXCEPT + operator VkSurfacePresentModeCompatibilityEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkScreenSurfaceCreateInfoQNX &() VULKAN_HPP_NOEXCEPT + operator VkSurfacePresentModeCompatibilityEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, context, window ); + return std::tie( sType, pNext, presentModeCount, pPresentModes ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ScreenSurfaceCreateInfoQNX const & ) const = default; -# else - bool operator==( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfacePresentModeCompatibilityEXT const & ) const = default; +#else + bool operator==( SurfacePresentModeCompatibilityEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( context == rhs.context ) && ( window == rhs.window ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeCount == rhs.presentModeCount ) && ( pPresentModes == rhs.pPresentModes ); +# endif } - bool operator!=( ScreenSurfaceCreateInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfacePresentModeCompatibilityEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eScreenSurfaceCreateInfoQNX; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX flags = {}; - struct _screen_context * context = {}; - struct _screen_window * window = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfacePresentModeCompatibilityEXT; + void * pNext = {}; + uint32_t presentModeCount = {}; + VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {}; }; template <> - struct CppType + struct CppType { - using Type = ScreenSurfaceCreateInfoQNX; + using Type = SurfacePresentModeCompatibilityEXT; }; -#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - struct SemaphoreCreateInfo + struct SurfacePresentModeEXT { - using NativeType = VkSemaphoreCreateInfo; + using NativeType = VkSurfacePresentModeEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfacePresentModeEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfacePresentModeEXT( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentMode{ presentMode_ } { } - VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SurfacePresentModeEXT( SurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreCreateInfo( *reinterpret_cast( &rhs ) ) + SurfacePresentModeEXT( VkSurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfacePresentModeEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SemaphoreCreateInfo & operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SurfacePresentModeEXT & operator=( SurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SurfacePresentModeEXT & operator=( VkSurfacePresentModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SurfacePresentModeEXT & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + presentMode = presentMode_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkSurfacePresentModeEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkSurfacePresentModeEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags ); + return std::tie( sType, pNext, presentMode ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SemaphoreCreateInfo const & ) const = default; + auto operator<=>( SurfacePresentModeEXT const & ) const = default; #else - bool operator==( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SurfacePresentModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMode == rhs.presentMode ); # endif } - bool operator!=( SemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfacePresentModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfacePresentModeEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate; }; template <> - struct CppType + struct CppType { - using Type = SemaphoreCreateInfo; + using Type = SurfacePresentModeEXT; }; - struct SemaphoreGetFdInfoKHR + struct SurfacePresentScalingCapabilitiesEXT { - using NativeType = VkSemaphoreGetFdInfoKHR; + using NativeType = VkSurfacePresentScalingCapabilitiesEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( - VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphore( semaphore_ ) - , handleType( handleType_ ) - { - } + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfacePresentScalingCapabilitiesEXT; - VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : SemaphoreGetFdInfoKHR( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfacePresentScalingCapabilitiesEXT( VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT supportedPresentScaling_ = {}, + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityX_ = {}, + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityY_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minScaledImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxScaledImageExtent_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , supportedPresentScaling{ supportedPresentScaling_ } + , supportedPresentGravityX{ supportedPresentGravityX_ } + , supportedPresentGravityY{ supportedPresentGravityY_ } + , minScaledImageExtent{ minScaledImageExtent_ } + , maxScaledImageExtent{ maxScaledImageExtent_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SemaphoreGetFdInfoKHR & operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR SurfacePresentScalingCapabilitiesEXT( SurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + SurfacePresentScalingCapabilitiesEXT( VkSurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfacePresentScalingCapabilitiesEXT( *reinterpret_cast( &rhs ) ) { - pNext = pNext_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT - { - semaphore = semaphore_; - return *this; - } + SurfacePresentScalingCapabilitiesEXT & operator=( SurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + SurfacePresentScalingCapabilitiesEXT & operator=( VkSurfacePresentScalingCapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - handleType = handleType_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSemaphoreGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkSurfacePresentScalingCapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkSurfacePresentScalingCapabilitiesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -83914,454 +125300,458 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + void * const &, + VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT const &, + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT const &, + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT const &, + VULKAN_HPP_NAMESPACE::Extent2D const &, + VULKAN_HPP_NAMESPACE::Extent2D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, semaphore, handleType ); + return std::tie( sType, pNext, supportedPresentScaling, supportedPresentGravityX, supportedPresentGravityY, minScaledImageExtent, maxScaledImageExtent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SemaphoreGetFdInfoKHR const & ) const = default; + auto operator<=>( SurfacePresentScalingCapabilitiesEXT const & ) const = default; #else - bool operator==( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SurfacePresentScalingCapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedPresentScaling == rhs.supportedPresentScaling ) && + ( supportedPresentGravityX == rhs.supportedPresentGravityX ) && ( supportedPresentGravityY == rhs.supportedPresentGravityY ) && + ( minScaledImageExtent == rhs.minScaledImageExtent ) && ( maxScaledImageExtent == rhs.maxScaledImageExtent ); # endif } - bool operator!=( SemaphoreGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfacePresentScalingCapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfacePresentScalingCapabilitiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT supportedPresentScaling = {}; + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityX = {}; + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityY = {}; + VULKAN_HPP_NAMESPACE::Extent2D minScaledImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxScaledImageExtent = {}; }; template <> - struct CppType + struct CppType { - using Type = SemaphoreGetFdInfoKHR; + using Type = SurfacePresentScalingCapabilitiesEXT; }; -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - struct SemaphoreGetWin32HandleInfoKHR + struct SurfaceProtectedCapabilitiesKHR { - using NativeType = VkSemaphoreGetWin32HandleInfoKHR; + using NativeType = VkSurfaceProtectedCapabilitiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetWin32HandleInfoKHR; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( - VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphore( semaphore_ ) - , handleType( handleType_ ) - { - } - - VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceProtectedCapabilitiesKHR; - SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : SemaphoreGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , supportsProtected{ supportsProtected_ } { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - SemaphoreGetWin32HandleInfoKHR & operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SurfaceProtectedCapabilitiesKHR( *reinterpret_cast( &rhs ) ) { - pNext = pNext_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT - { - semaphore = semaphore_; - return *this; - } + SurfaceProtectedCapabilitiesKHR & operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & - setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - handleType = handleType_; + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSemaphoreGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkSurfaceProtectedCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, semaphore, handleType ); + return std::tie( sType, pNext, supportsProtected ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SemaphoreGetWin32HandleInfoKHR const & ) const = default; -# else - bool operator==( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SurfaceProtectedCapabilitiesKHR const & ) const = default; +#else + bool operator==( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportsProtected == rhs.supportsProtected ); +# endif } - bool operator!=( SemaphoreGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {}; }; template <> - struct CppType + struct CppType { - using Type = SemaphoreGetWin32HandleInfoKHR; + using Type = SurfaceProtectedCapabilitiesKHR; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#if defined( VK_USE_PLATFORM_FUCHSIA ) - struct SemaphoreGetZirconHandleInfoFUCHSIA + struct SwapchainCounterCreateInfoEXT { - using NativeType = VkSemaphoreGetZirconHandleInfoFUCHSIA; + using NativeType = VkSwapchainCounterCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( - VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphore( semaphore_ ) - , handleType( handleType_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , surfaceCounters{ surfaceCounters_ } { } - VULKAN_HPP_CONSTEXPR SemaphoreGetZirconHandleInfoFUCHSIA( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SemaphoreGetZirconHandleInfoFUCHSIA( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT - : SemaphoreGetZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) + SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainCounterCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SemaphoreGetZirconHandleInfoFUCHSIA & operator=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SwapchainCounterCreateInfoEXT & operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SemaphoreGetZirconHandleInfoFUCHSIA & operator=( VkSemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT - { - semaphore = semaphore_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SemaphoreGetZirconHandleInfoFUCHSIA & - setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & + setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT { - handleType = handleType_; + surfaceCounters = surfaceCounters_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSemaphoreGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT + operator VkSwapchainCounterCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSemaphoreGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, semaphore, handleType ); + return std::tie( sType, pNext, surfaceCounters ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SemaphoreGetZirconHandleInfoFUCHSIA const & ) const = default; -# else - bool operator==( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SwapchainCounterCreateInfoEXT const & ) const = default; +#else + bool operator==( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( handleType == rhs.handleType ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCounters == rhs.surfaceCounters ); +# endif } - bool operator!=( SemaphoreGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetZirconHandleInfoFUCHSIA; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {}; }; template <> - struct CppType + struct CppType { - using Type = SemaphoreGetZirconHandleInfoFUCHSIA; + using Type = SwapchainCounterCreateInfoEXT; }; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - struct SemaphoreSignalInfo + struct SwapchainCreateInfoKHR { - using NativeType = VkSemaphoreSignalInfo; + using NativeType = VkSwapchainCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR - SemaphoreSignalInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphore( semaphore_ ) - , value( value_ ) + SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, + uint32_t minImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, + uint32_t imageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t * pQueueFamilyIndices_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, + VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , surface{ surface_ } + , minImageCount{ minImageCount_ } + , imageFormat{ imageFormat_ } + , imageColorSpace{ imageColorSpace_ } + , imageExtent{ imageExtent_ } + , imageArrayLayers{ imageArrayLayers_ } + , imageUsage{ imageUsage_ } + , imageSharingMode{ imageSharingMode_ } + , queueFamilyIndexCount{ queueFamilyIndexCount_ } + , pQueueFamilyIndices{ pQueueFamilyIndices_ } + , preTransform{ preTransform_ } + , compositeAlpha{ compositeAlpha_ } + , presentMode{ presentMode_ } + , clipped{ clipped_ } + , oldSwapchain{ oldSwapchain_ } { } - VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreSignalInfo( *reinterpret_cast( &rhs ) ) + SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface_, + uint32_t minImageCount_, + VULKAN_HPP_NAMESPACE::Format imageFormat_, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_, + uint32_t imageArrayLayers_, + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_, + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, + VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , surface( surface_ ) + , minImageCount( minImageCount_ ) + , imageFormat( imageFormat_ ) + , imageColorSpace( imageColorSpace_ ) + , imageExtent( imageExtent_ ) + , imageArrayLayers( imageArrayLayers_ ) + , imageUsage( imageUsage_ ) + , imageSharingMode( imageSharingMode_ ) + , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) + , pQueueFamilyIndices( queueFamilyIndices_.data() ) + , preTransform( preTransform_ ) + , compositeAlpha( compositeAlpha_ ) + , presentMode( presentMode_ ) + , clipped( clipped_ ) + , oldSwapchain( oldSwapchain_ ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - SemaphoreSignalInfo & operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SwapchainCreateInfoKHR & operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { - semaphore = semaphore_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT { - value = value_; + surface = surface_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSemaphoreSignalInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + minImageCount = minImageCount_; + return *this; } - operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + imageFormat = imageFormat_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, semaphore, value ); + imageColorSpace = imageColorSpace_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SemaphoreSignalInfo const & ) const = default; -#else - bool operator==( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( value == rhs.value ); -# endif + imageExtent = imageExtent_; + return *this; } - bool operator!=( SemaphoreSignalInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + imageArrayLayers = imageArrayLayers_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - uint64_t value = {}; - }; - - template <> - struct CppType - { - using Type = SemaphoreSignalInfo; - }; - using SemaphoreSignalInfoKHR = SemaphoreSignalInfo; - struct SemaphoreSubmitInfo - { - using NativeType = VkSemaphoreSubmitInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSubmitInfo; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, - uint64_t value_ = {}, - VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ = {}, - uint32_t deviceIndex_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphore( semaphore_ ) - , value( value_ ) - , stageMask( stageMask_ ) - , deviceIndex( deviceIndex_ ) + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT { + imageUsage = imageUsage_; + return *this; } - VULKAN_HPP_CONSTEXPR SemaphoreSubmitInfo( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT + { + imageSharingMode = imageSharingMode_; + return *this; + } - SemaphoreSubmitInfo( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreSubmitInfo( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SemaphoreSubmitInfo & operator=( SemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } - SemaphoreSubmitInfo & operator=( VkSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainCreateInfoKHR & + setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); + pQueueFamilyIndices = queueFamilyIndices_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + preTransform = preTransform_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT { - semaphore = semaphore_; + compositeAlpha = compositeAlpha_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT { - value = value_; + presentMode = presentMode_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT { - stageMask = stageMask_; + clipped = clipped_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SemaphoreSubmitInfo & setDeviceIndex( uint32_t deviceIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT { - deviceIndex = deviceIndex_; + oldSwapchain = oldSwapchain_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT + operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -84370,1702 +125760,2098 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR const &, + VULKAN_HPP_NAMESPACE::SurfaceKHR const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Format const &, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR const &, + VULKAN_HPP_NAMESPACE::Extent2D const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ImageUsageFlags const &, + VULKAN_HPP_NAMESPACE::SharingMode const &, + uint32_t const &, + const uint32_t * const &, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR const &, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR const &, + VULKAN_HPP_NAMESPACE::PresentModeKHR const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::SwapchainKHR const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, semaphore, value, stageMask, deviceIndex ); + return std::tie( sType, + pNext, + flags, + surface, + minImageCount, + imageFormat, + imageColorSpace, + imageExtent, + imageArrayLayers, + imageUsage, + imageSharingMode, + queueFamilyIndexCount, + pQueueFamilyIndices, + preTransform, + compositeAlpha, + presentMode, + clipped, + oldSwapchain ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SemaphoreSubmitInfo const & ) const = default; + auto operator<=>( SwapchainCreateInfoKHR const & ) const = default; #else - bool operator==( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( value == rhs.value ) && ( stageMask == rhs.stageMask ) && - ( deviceIndex == rhs.deviceIndex ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( surface == rhs.surface ) && + ( minImageCount == rhs.minImageCount ) && ( imageFormat == rhs.imageFormat ) && ( imageColorSpace == rhs.imageColorSpace ) && + ( imageExtent == rhs.imageExtent ) && ( imageArrayLayers == rhs.imageArrayLayers ) && ( imageUsage == rhs.imageUsage ) && + ( imageSharingMode == rhs.imageSharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && + ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( preTransform == rhs.preTransform ) && ( compositeAlpha == rhs.compositeAlpha ) && + ( presentMode == rhs.presentMode ) && ( clipped == rhs.clipped ) && ( oldSwapchain == rhs.oldSwapchain ); # endif } - bool operator!=( SemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSubmitInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; - uint64_t value = {}; - VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask = {}; - uint32_t deviceIndex = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + uint32_t minImageCount = {}; + VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; + uint32_t imageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t * pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque; + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate; + VULKAN_HPP_NAMESPACE::Bool32 clipped = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {}; }; template <> - struct CppType + struct CppType { - using Type = SemaphoreSubmitInfo; + using Type = SwapchainCreateInfoKHR; }; - using SemaphoreSubmitInfoKHR = SemaphoreSubmitInfo; - struct SemaphoreTypeCreateInfo + struct SwapchainDisplayNativeHdrCreateInfoAMD { - using NativeType = VkSemaphoreTypeCreateInfo; + using NativeType = VkSwapchainDisplayNativeHdrCreateInfoAMD; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, - uint64_t initialValue_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphoreType( semaphoreType_ ) - , initialValue( initialValue_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , localDimmingEnable{ localDimmingEnable_ } { } - VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : SemaphoreTypeCreateInfo( *reinterpret_cast( &rhs ) ) + SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainDisplayNativeHdrCreateInfoAMD( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SemaphoreTypeCreateInfo & operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SwapchainDisplayNativeHdrCreateInfoAMD & operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SwapchainDisplayNativeHdrCreateInfoAMD & operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setSemaphoreType( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ ) VULKAN_HPP_NOEXCEPT - { - semaphoreType = semaphoreType_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & setInitialValue( uint64_t initialValue_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & + setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT { - initialValue = initialValue_; + localDimmingEnable = localDimmingEnable_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSemaphoreTypeCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkSwapchainDisplayNativeHdrCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, semaphoreType, initialValue ); + return std::tie( sType, pNext, localDimmingEnable ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SemaphoreTypeCreateInfo const & ) const = default; + auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const & ) const = default; #else - bool operator==( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphoreType == rhs.semaphoreType ) && ( initialValue == rhs.initialValue ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingEnable == rhs.localDimmingEnable ); # endif } - bool operator!=( SemaphoreTypeCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary; - uint64_t initialValue = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {}; }; template <> - struct CppType + struct CppType { - using Type = SemaphoreTypeCreateInfo; + using Type = SwapchainDisplayNativeHdrCreateInfoAMD; }; - using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo; - struct SemaphoreWaitInfo + struct SwapchainLatencyCreateInfoNV { - using NativeType = VkSemaphoreWaitInfo; + using NativeType = VkSwapchainLatencyCreateInfoNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainLatencyCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, - uint32_t semaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ = {}, - const uint64_t * pValues_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , semaphoreCount( semaphoreCount_ ) - , pSemaphores( pSemaphores_ ) - , pValues( pValues_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainLatencyCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 latencyModeEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , latencyModeEnable{ latencyModeEnable_ } { } - VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SemaphoreWaitInfo( *reinterpret_cast( &rhs ) ) {} + VULKAN_HPP_CONSTEXPR SwapchainLatencyCreateInfoNV( SwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & semaphores_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , semaphoreCount( static_cast( semaphores_.size() ) ) - , pSemaphores( semaphores_.data() ) - , pValues( values_.data() ) + SwapchainLatencyCreateInfoNV( VkSwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainLatencyCreateInfoNV( *reinterpret_cast( &rhs ) ) { -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( semaphores_.size() == values_.size() ); -# else - if ( semaphores_.size() != values_.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SemaphoreWaitInfo & operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SwapchainLatencyCreateInfoNV & operator=( SwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SwapchainLatencyCreateInfoNV & operator=( VkSwapchainLatencyCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainLatencyCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT - { - semaphoreCount = semaphoreCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ ) VULKAN_HPP_NOEXCEPT - { - pSemaphores = pSemaphores_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SemaphoreWaitInfo & - setSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & semaphores_ ) VULKAN_HPP_NOEXCEPT - { - semaphoreCount = static_cast( semaphores_.size() ); - pSemaphores = semaphores_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & setPValues( const uint64_t * pValues_ ) VULKAN_HPP_NOEXCEPT - { - pValues = pValues_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SemaphoreWaitInfo & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainLatencyCreateInfoNV & setLatencyModeEnable( VULKAN_HPP_NAMESPACE::Bool32 latencyModeEnable_ ) VULKAN_HPP_NOEXCEPT { - semaphoreCount = static_cast( values_.size() ); - pValues = values_.data(); + latencyModeEnable = latencyModeEnable_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSemaphoreWaitInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkSwapchainLatencyCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT + operator VkSwapchainLatencyCreateInfoNV &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, semaphoreCount, pSemaphores, pValues ); + return std::tie( sType, pNext, latencyModeEnable ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SemaphoreWaitInfo const & ) const = default; + auto operator<=>( SwapchainLatencyCreateInfoNV const & ) const = default; #else - bool operator==( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SwapchainLatencyCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( semaphoreCount == rhs.semaphoreCount ) && - ( pSemaphores == rhs.pSemaphores ) && ( pValues == rhs.pValues ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( latencyModeEnable == rhs.latencyModeEnable ); # endif } - bool operator!=( SemaphoreWaitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SwapchainLatencyCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {}; - uint32_t semaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores = {}; - const uint64_t * pValues = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainLatencyCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 latencyModeEnable = {}; }; template <> - struct CppType + struct CppType { - using Type = SemaphoreWaitInfo; + using Type = SwapchainLatencyCreateInfoNV; }; - using SemaphoreWaitInfoKHR = SemaphoreWaitInfo; - struct SetStateFlagsIndirectCommandNV + struct SwapchainPresentBarrierCreateInfoNV { - using NativeType = VkSetStateFlagsIndirectCommandNV; + using NativeType = VkSwapchainPresentBarrierCreateInfoNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( uint32_t data_ = {} ) VULKAN_HPP_NOEXCEPT : data( data_ ) {} + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentBarrierCreateInfoNV; - VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainPresentBarrierCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentBarrierEnable{ presentBarrierEnable_ } + { + } - SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT - : SetStateFlagsIndirectCommandNV( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR SwapchainPresentBarrierCreateInfoNV( SwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainPresentBarrierCreateInfoNV( VkSwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainPresentBarrierCreateInfoNV( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SetStateFlagsIndirectCommandNV & operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SwapchainPresentBarrierCreateInfoNV & operator=( SwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + SwapchainPresentBarrierCreateInfoNV & operator=( VkSwapchainPresentBarrierCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SetStateFlagsIndirectCommandNV & setData( uint32_t data_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentBarrierCreateInfoNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - data = data_; + pNext = pNext_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSetStateFlagsIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentBarrierCreateInfoNV & + setPresentBarrierEnable( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + presentBarrierEnable = presentBarrierEnable_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + operator VkSwapchainPresentBarrierCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentBarrierCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( data ); + return std::tie( sType, pNext, presentBarrierEnable ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SetStateFlagsIndirectCommandNV const & ) const = default; + auto operator<=>( SwapchainPresentBarrierCreateInfoNV const & ) const = default; #else - bool operator==( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SwapchainPresentBarrierCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( data == rhs.data ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrierEnable == rhs.presentBarrierEnable ); # endif } - bool operator!=( SetStateFlagsIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SwapchainPresentBarrierCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t data = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentBarrierCreateInfoNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable = {}; }; - struct ShaderModuleCreateInfo + template <> + struct CppType { - using NativeType = VkShaderModuleCreateInfo; + using Type = SwapchainPresentBarrierCreateInfoNV; + }; + + struct SwapchainPresentFenceInfoEXT + { + using NativeType = VkSwapchainPresentFenceInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleCreateInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentFenceInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, - size_t codeSize_ = {}, - const uint32_t * pCode_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , codeSize( codeSize_ ) - , pCode( pCode_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainPresentFenceInfoEXT( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::Fence * pFences_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pFences{ pFences_ } { } - VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SwapchainPresentFenceInfoEXT( SwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : ShaderModuleCreateInfo( *reinterpret_cast( &rhs ) ) + SwapchainPresentFenceInfoEXT( VkSwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainPresentFenceInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() ) + SwapchainPresentFenceInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fences_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( fences_.size() ) ), pFences( fences_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ShaderModuleCreateInfo & operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SwapchainPresentFenceInfoEXT & operator=( SwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ShaderModuleCreateInfo & operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + SwapchainPresentFenceInfoEXT & operator=( VkSwapchainPresentFenceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoEXT & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT { - codeSize = codeSize_; + swapchainCount = swapchainCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & setPCode( const uint32_t * pCode_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentFenceInfoEXT & setPFences( const VULKAN_HPP_NAMESPACE::Fence * pFences_ ) VULKAN_HPP_NOEXCEPT { - pCode = pCode_; + pFences = pFences_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ShaderModuleCreateInfo & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & code_ ) VULKAN_HPP_NOEXCEPT + SwapchainPresentFenceInfoEXT & + setFences( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fences_ ) VULKAN_HPP_NOEXCEPT { - codeSize = code_.size() * 4; - pCode = code_.data(); + swapchainCount = static_cast( fences_.size() ); + pFences = fences_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkShaderModuleCreateInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkSwapchainPresentFenceInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT + operator VkSwapchainPresentFenceInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, codeSize, pCode ); + return std::tie( sType, pNext, swapchainCount, pFences ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ShaderModuleCreateInfo const & ) const = default; + auto operator<=>( SwapchainPresentFenceInfoEXT const & ) const = default; #else - bool operator==( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SwapchainPresentFenceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( codeSize == rhs.codeSize ) && ( pCode == rhs.pCode ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pFences == rhs.pFences ); # endif } - bool operator!=( ShaderModuleCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SwapchainPresentFenceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {}; - size_t codeSize = {}; - const uint32_t * pCode = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentFenceInfoEXT; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::Fence * pFences = {}; }; template <> - struct CppType + struct CppType { - using Type = ShaderModuleCreateInfo; + using Type = SwapchainPresentFenceInfoEXT; }; - struct ShaderModuleIdentifierEXT + struct SwapchainPresentModeInfoEXT { - using NativeType = VkShaderModuleIdentifierEXT; + using NativeType = VkSwapchainPresentModeInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleIdentifierEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentModeInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 ShaderModuleIdentifierEXT( uint32_t identifierSize_ = {}, - std::array const & identifier_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , identifierSize( identifierSize_ ) - , identifier( identifier_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainPresentModeInfoEXT( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pPresentModes{ pPresentModes_ } + { + } + + VULKAN_HPP_CONSTEXPR SwapchainPresentModeInfoEXT( SwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SwapchainPresentModeInfoEXT( VkSwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainPresentModeInfoEXT( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR_14 ShaderModuleIdentifierEXT( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ShaderModuleIdentifierEXT( VkShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ShaderModuleIdentifierEXT( *reinterpret_cast( &rhs ) ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainPresentModeInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), swapchainCount( static_cast( presentModes_.size() ) ), pPresentModes( presentModes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + SwapchainPresentModeInfoEXT & operator=( SwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SwapchainPresentModeInfoEXT & operator=( VkSwapchainPresentModeInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoEXT & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT { + swapchainCount = swapchainCount_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ShaderModuleIdentifierEXT & operator=( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModeInfoEXT & setPPresentModes( const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT + { + pPresentModes = pPresentModes_; + return *this; + } - ShaderModuleIdentifierEXT & operator=( VkShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainPresentModeInfoEXT & + setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + swapchainCount = static_cast( presentModes_.size() ); + pPresentModes = presentModes_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkShaderModuleIdentifierEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkSwapchainPresentModeInfoEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkShaderModuleIdentifierEXT &() VULKAN_HPP_NOEXCEPT + operator VkSwapchainPresentModeInfoEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, identifierSize, identifier ); + return std::tie( sType, pNext, swapchainCount, pPresentModes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ShaderModuleIdentifierEXT const & ) const = default; + auto operator<=>( SwapchainPresentModeInfoEXT const & ) const = default; #else - bool operator==( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SwapchainPresentModeInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( identifierSize == rhs.identifierSize ) && ( identifier == rhs.identifier ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pPresentModes == rhs.pPresentModes ); # endif } - bool operator!=( ShaderModuleIdentifierEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SwapchainPresentModeInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleIdentifierEXT; - void * pNext = {}; - uint32_t identifierSize = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D identifier = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentModeInfoEXT; + const void * pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {}; }; template <> - struct CppType + struct CppType { - using Type = ShaderModuleIdentifierEXT; + using Type = SwapchainPresentModeInfoEXT; }; - struct ShaderModuleValidationCacheCreateInfoEXT + struct SwapchainPresentModesCreateInfoEXT { - using NativeType = VkShaderModuleValidationCacheCreateInfoEXT; + using NativeType = VkSwapchainPresentModesCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentModesCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , validationCache( validationCache_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainPresentModesCreateInfoEXT( uint32_t presentModeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentModeCount{ presentModeCount_ } + , pPresentModes{ pPresentModes_ } { } - VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SwapchainPresentModesCreateInfoEXT( SwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ShaderModuleValidationCacheCreateInfoEXT( *reinterpret_cast( &rhs ) ) + SwapchainPresentModesCreateInfoEXT( VkSwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainPresentModesCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ShaderModuleValidationCacheCreateInfoEXT & operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainPresentModesCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), presentModeCount( static_cast( presentModes_.size() ) ), pPresentModes( presentModes_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - ShaderModuleValidationCacheCreateInfoEXT & operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SwapchainPresentModesCreateInfoEXT & operator=( SwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + SwapchainPresentModesCreateInfoEXT & operator=( VkSwapchainPresentModesCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & - setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoEXT & setPresentModeCount( uint32_t presentModeCount_ ) VULKAN_HPP_NOEXCEPT { - validationCache = validationCache_; + presentModeCount = presentModeCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkShaderModuleValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentModesCreateInfoEXT & + setPPresentModes( const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pPresentModes = pPresentModes_; + return *this; } - operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + SwapchainPresentModesCreateInfoEXT & + setPresentModes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & presentModes_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + presentModeCount = static_cast( presentModes_.size() ); + pPresentModes = presentModes_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSwapchainPresentModesCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentModesCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, validationCache ); + return std::tie( sType, pNext, presentModeCount, pPresentModes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const & ) const = default; + auto operator<=>( SwapchainPresentModesCreateInfoEXT const & ) const = default; #else - bool operator==( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SwapchainPresentModesCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( validationCache == rhs.validationCache ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeCount == rhs.presentModeCount ) && ( pPresentModes == rhs.pPresentModes ); # endif } - bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SwapchainPresentModesCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentModesCreateInfoEXT; + const void * pNext = {}; + uint32_t presentModeCount = {}; + const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes = {}; }; template <> - struct CppType + struct CppType { - using Type = ShaderModuleValidationCacheCreateInfoEXT; + using Type = SwapchainPresentModesCreateInfoEXT; }; - struct ShaderResourceUsageAMD + struct SwapchainPresentScalingCreateInfoEXT { - using NativeType = VkShaderResourceUsageAMD; + using NativeType = VkSwapchainPresentScalingCreateInfoEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( uint32_t numUsedVgprs_ = {}, - uint32_t numUsedSgprs_ = {}, - uint32_t ldsSizePerLocalWorkGroup_ = {}, - size_t ldsUsageSizeInBytes_ = {}, - size_t scratchMemUsageInBytes_ = {} ) VULKAN_HPP_NOEXCEPT - : numUsedVgprs( numUsedVgprs_ ) - , numUsedSgprs( numUsedSgprs_ ) - , ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ ) - , ldsUsageSizeInBytes( ldsUsageSizeInBytes_ ) - , scratchMemUsageInBytes( scratchMemUsageInBytes_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainPresentScalingCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SwapchainPresentScalingCreateInfoEXT( VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT scalingBehavior_ = {}, + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityX_ = {}, + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityY_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , scalingBehavior{ scalingBehavior_ } + , presentGravityX{ presentGravityX_ } + , presentGravityY{ presentGravityY_ } { } - VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SwapchainPresentScalingCreateInfoEXT( SwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT - : ShaderResourceUsageAMD( *reinterpret_cast( &rhs ) ) + SwapchainPresentScalingCreateInfoEXT( VkSwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SwapchainPresentScalingCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ShaderResourceUsageAMD & operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SwapchainPresentScalingCreateInfoEXT & operator=( SwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ShaderResourceUsageAMD & operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + SwapchainPresentScalingCreateInfoEXT & operator=( VkSwapchainPresentScalingCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkShaderResourceUsageAMD const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & + setScalingBehavior( VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT scalingBehavior_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + scalingBehavior = scalingBehavior_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & + setPresentGravityX( VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityX_ ) VULKAN_HPP_NOEXCEPT + { + presentGravityX = presentGravityX_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 SwapchainPresentScalingCreateInfoEXT & + setPresentGravityY( VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityY_ ) VULKAN_HPP_NOEXCEPT + { + presentGravityY = presentGravityY_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkSwapchainPresentScalingCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainPresentScalingCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( numUsedVgprs, numUsedSgprs, ldsSizePerLocalWorkGroup, ldsUsageSizeInBytes, scratchMemUsageInBytes ); + return std::tie( sType, pNext, scalingBehavior, presentGravityX, presentGravityY ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ShaderResourceUsageAMD const & ) const = default; + auto operator<=>( SwapchainPresentScalingCreateInfoEXT const & ) const = default; #else - bool operator==( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SwapchainPresentScalingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( numUsedVgprs == rhs.numUsedVgprs ) && ( numUsedSgprs == rhs.numUsedSgprs ) && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) && - ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( scalingBehavior == rhs.scalingBehavior ) && ( presentGravityX == rhs.presentGravityX ) && + ( presentGravityY == rhs.presentGravityY ); # endif } - bool operator!=( ShaderResourceUsageAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SwapchainPresentScalingCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t numUsedVgprs = {}; - uint32_t numUsedSgprs = {}; - uint32_t ldsSizePerLocalWorkGroup = {}; - size_t ldsUsageSizeInBytes = {}; - size_t scratchMemUsageInBytes = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainPresentScalingCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT scalingBehavior = {}; + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityX = {}; + VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityY = {}; }; - struct ShaderStatisticsInfoAMD + template <> + struct CppType { - using NativeType = VkShaderStatisticsInfoAMD; + using Type = SwapchainPresentScalingCreateInfoEXT; + }; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, - VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, - uint32_t numPhysicalVgprs_ = {}, - uint32_t numPhysicalSgprs_ = {}, - uint32_t numAvailableVgprs_ = {}, - uint32_t numAvailableSgprs_ = {}, - std::array const & computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderStageMask( shaderStageMask_ ) - , resourceUsage( resourceUsage_ ) - , numPhysicalVgprs( numPhysicalVgprs_ ) - , numPhysicalSgprs( numPhysicalSgprs_ ) - , numAvailableVgprs( numAvailableVgprs_ ) - , numAvailableSgprs( numAvailableSgprs_ ) - , computeWorkGroupSize( computeWorkGroupSize_ ) + struct TextureLODGatherFormatPropertiesAMD + { + using NativeType = VkTextureLODGatherFormatPropertiesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTextureLodGatherFormatPropertiesAMD; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , supportsTextureGatherLODBiasAMD{ supportsTextureGatherLODBiasAMD_ } { } - VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT - : ShaderStatisticsInfoAMD( *reinterpret_cast( &rhs ) ) + TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : TextureLODGatherFormatPropertiesAMD( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ShaderStatisticsInfoAMD & operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + TextureLODGatherFormatPropertiesAMD & operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ShaderStatisticsInfoAMD & operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + TextureLODGatherFormatPropertiesAMD & operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkShaderStatisticsInfoAMD const &() const VULKAN_HPP_NOEXCEPT + operator VkTextureLODGatherFormatPropertiesAMD const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT + operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( shaderStageMask, resourceUsage, numPhysicalVgprs, numPhysicalSgprs, numAvailableVgprs, numAvailableSgprs, computeWorkGroupSize ); + return std::tie( sType, pNext, supportsTextureGatherLODBiasAMD ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ShaderStatisticsInfoAMD const & ) const = default; + auto operator<=>( TextureLODGatherFormatPropertiesAMD const & ) const = default; #else - bool operator==( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( shaderStageMask == rhs.shaderStageMask ) && ( resourceUsage == rhs.resourceUsage ) && ( numPhysicalVgprs == rhs.numPhysicalVgprs ) && - ( numPhysicalSgprs == rhs.numPhysicalSgprs ) && ( numAvailableVgprs == rhs.numAvailableVgprs ) && ( numAvailableSgprs == rhs.numAvailableSgprs ) && - ( computeWorkGroupSize == rhs.computeWorkGroupSize ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD ); # endif } - bool operator!=( ShaderStatisticsInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {}; - VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {}; - uint32_t numPhysicalVgprs = {}; - uint32_t numPhysicalSgprs = {}; - uint32_t numAvailableVgprs = {}; - uint32_t numAvailableSgprs = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D computeWorkGroupSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {}; }; - struct SharedPresentSurfaceCapabilitiesKHR + template <> + struct CppType { - using NativeType = VkSharedPresentSurfaceCapabilitiesKHR; + using Type = TextureLODGatherFormatPropertiesAMD; + }; + + struct TilePropertiesQCOM + { + using NativeType = VkTilePropertiesQCOM; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTilePropertiesQCOM; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Extent3D tileSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D apronSize_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D origin_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , tileSize{ tileSize_ } + , apronSize{ apronSize_ } + , origin{ origin_ } { } - VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR TilePropertiesQCOM( TilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : SharedPresentSurfaceCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + TilePropertiesQCOM( VkTilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : TilePropertiesQCOM( *reinterpret_cast( &rhs ) ) {} + + TilePropertiesQCOM & operator=( TilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + TilePropertiesQCOM & operator=( VkTilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SharedPresentSurfaceCapabilitiesKHR & operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } - SharedPresentSurfaceCapabilitiesKHR & operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setTileSize( VULKAN_HPP_NAMESPACE::Extent3D const & tileSize_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + tileSize = tileSize_; return *this; } - operator VkSharedPresentSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setApronSize( VULKAN_HPP_NAMESPACE::Extent2D const & apronSize_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + apronSize = apronSize_; + return *this; } - operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setOrigin( VULKAN_HPP_NAMESPACE::Offset2D const & origin_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + origin = origin_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkTilePropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTilePropertiesQCOM &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, sharedPresentSupportedUsageFlags ); + return std::tie( sType, pNext, tileSize, apronSize, origin ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const & ) const = default; + auto operator<=>( TilePropertiesQCOM const & ) const = default; #else - bool operator==( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( TilePropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tileSize == rhs.tileSize ) && ( apronSize == rhs.apronSize ) && ( origin == rhs.origin ); # endif } - bool operator!=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( TilePropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTilePropertiesQCOM; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent3D tileSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D apronSize = {}; + VULKAN_HPP_NAMESPACE::Offset2D origin = {}; }; template <> - struct CppType + struct CppType { - using Type = SharedPresentSurfaceCapabilitiesKHR; + using Type = TilePropertiesQCOM; }; - struct SparseImageFormatProperties + struct TimelineSemaphoreSubmitInfo { - using NativeType = VkSparseImageFormatProperties; + using NativeType = VkTimelineSemaphoreSubmitInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, - VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , imageGranularity( imageGranularity_ ) - , flags( flags_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( uint32_t waitSemaphoreValueCount_ = {}, + const uint64_t * pWaitSemaphoreValues_ = {}, + uint32_t signalSemaphoreValueCount_ = {}, + const uint64_t * pSignalSemaphoreValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , waitSemaphoreValueCount{ waitSemaphoreValueCount_ } + , pWaitSemaphoreValues{ pWaitSemaphoreValues_ } + , signalSemaphoreValueCount{ signalSemaphoreValueCount_ } + , pSignalSemaphoreValues{ pSignalSemaphoreValues_ } { } - VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : SparseImageFormatProperties( *reinterpret_cast( &rhs ) ) + TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : TimelineSemaphoreSubmitInfo( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SparseImageFormatProperties & operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , waitSemaphoreValueCount( static_cast( waitSemaphoreValues_.size() ) ) + , pWaitSemaphoreValues( waitSemaphoreValues_.data() ) + , signalSemaphoreValueCount( static_cast( signalSemaphoreValues_.size() ) ) + , pSignalSemaphoreValues( signalSemaphoreValues_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - SparseImageFormatProperties & operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + TimelineSemaphoreSubmitInfo & operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSparseImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + waitSemaphoreValueCount = waitSemaphoreValueCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreValues = pWaitSemaphoreValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo & + setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValueCount = static_cast( waitSemaphoreValues_.size() ); + pWaitSemaphoreValues = waitSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValueCount = signalSemaphoreValueCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreValues = pSignalSemaphoreValues_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + TimelineSemaphoreSubmitInfo & + setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValueCount = static_cast( signalSemaphoreValues_.size() ); + pSignalSemaphoreValues = signalSemaphoreValues_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkTimelineSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( aspectMask, imageGranularity, flags ); + return std::tie( sType, pNext, waitSemaphoreValueCount, pWaitSemaphoreValues, signalSemaphoreValueCount, pSignalSemaphoreValues ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SparseImageFormatProperties const & ) const = default; + auto operator<=>( TimelineSemaphoreSubmitInfo const & ) const = default; #else - bool operator==( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( aspectMask == rhs.aspectMask ) && ( imageGranularity == rhs.imageGranularity ) && ( flags == rhs.flags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount ) && + ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount ) && + ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); # endif } - bool operator!=( SparseImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; - VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {}; - VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo; + const void * pNext = {}; + uint32_t waitSemaphoreValueCount = {}; + const uint64_t * pWaitSemaphoreValues = {}; + uint32_t signalSemaphoreValueCount = {}; + const uint64_t * pSignalSemaphoreValues = {}; }; - struct SparseImageFormatProperties2 + template <> + struct CppType { - using NativeType = VkSparseImageFormatProperties2; + using Type = TimelineSemaphoreSubmitInfo; + }; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2; + using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , properties( properties_ ) + struct TraceRaysIndirectCommand2KHR + { + using NativeType = VkTraceRaysIndirectCommand2KHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommand2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT + : raygenShaderRecordAddress{ raygenShaderRecordAddress_ } + , raygenShaderRecordSize{ raygenShaderRecordSize_ } + , missShaderBindingTableAddress{ missShaderBindingTableAddress_ } + , missShaderBindingTableSize{ missShaderBindingTableSize_ } + , missShaderBindingTableStride{ missShaderBindingTableStride_ } + , hitShaderBindingTableAddress{ hitShaderBindingTableAddress_ } + , hitShaderBindingTableSize{ hitShaderBindingTableSize_ } + , hitShaderBindingTableStride{ hitShaderBindingTableStride_ } + , callableShaderBindingTableAddress{ callableShaderBindingTableAddress_ } + , callableShaderBindingTableSize{ callableShaderBindingTableSize_ } + , callableShaderBindingTableStride{ callableShaderBindingTableStride_ } + , width{ width_ } + , height{ height_ } + , depth{ depth_ } { } - VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommand2KHR( TraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - : SparseImageFormatProperties2( *reinterpret_cast( &rhs ) ) + TraceRaysIndirectCommand2KHR( VkTraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : TraceRaysIndirectCommand2KHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SparseImageFormatProperties2 & operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + TraceRaysIndirectCommand2KHR & operator=( TraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SparseImageFormatProperties2 & operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + TraceRaysIndirectCommand2KHR & operator=( VkTraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSparseImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setRaygenShaderRecordAddress( VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + raygenShaderRecordAddress = raygenShaderRecordAddress_; + return *this; } - operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setRaygenShaderRecordSize( VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + raygenShaderRecordSize = raygenShaderRecordSize_; + return *this; } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setMissShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, properties ); + missShaderBindingTableAddress = missShaderBindingTableAddress_; + return *this; } -#endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SparseImageFormatProperties2 const & ) const = default; -#else - bool operator==( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setMissShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); -# endif + missShaderBindingTableSize = missShaderBindingTableSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setMissShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT + { + missShaderBindingTableStride = missShaderBindingTableStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setHitShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT + { + hitShaderBindingTableAddress = hitShaderBindingTableAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setHitShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT + { + hitShaderBindingTableSize = hitShaderBindingTableSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setHitShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT + { + hitShaderBindingTableStride = hitShaderBindingTableStride_; + return *this; } - bool operator!=( SparseImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setCallableShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + callableShaderBindingTableAddress = callableShaderBindingTableAddress_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {}; - }; - - template <> - struct CppType - { - using Type = SparseImageFormatProperties2; - }; - using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; - - struct SparseImageMemoryRequirements - { - using NativeType = VkSparseImageMemoryRequirements; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, - uint32_t imageMipTailFirstLod_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT - : formatProperties( formatProperties_ ) - , imageMipTailFirstLod( imageMipTailFirstLod_ ) - , imageMipTailSize( imageMipTailSize_ ) - , imageMipTailOffset( imageMipTailOffset_ ) - , imageMipTailStride( imageMipTailStride_ ) + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setCallableShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT { + callableShaderBindingTableSize = callableShaderBindingTableSize_; + return *this; } - VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & + setCallableShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT + { + callableShaderBindingTableStride = callableShaderBindingTableStride_; + return *this; + } - SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT - : SparseImageMemoryRequirements( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT { + width = width_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SparseImageMemoryRequirements & operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } - SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + depth = depth_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSparseImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT + operator VkTraceRaysIndirectCommand2KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT + operator VkTraceRaysIndirectCommand2KHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + VULKAN_HPP_NAMESPACE::DeviceAddress const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceAddress const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + VULKAN_HPP_NAMESPACE::DeviceSize const &, + uint32_t const &, + uint32_t const &, + uint32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( formatProperties, imageMipTailFirstLod, imageMipTailSize, imageMipTailOffset, imageMipTailStride ); + return std::tie( raygenShaderRecordAddress, + raygenShaderRecordSize, + missShaderBindingTableAddress, + missShaderBindingTableSize, + missShaderBindingTableStride, + hitShaderBindingTableAddress, + hitShaderBindingTableSize, + hitShaderBindingTableStride, + callableShaderBindingTableAddress, + callableShaderBindingTableSize, + callableShaderBindingTableStride, + width, + height, + depth ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SparseImageMemoryRequirements const & ) const = default; + auto operator<=>( TraceRaysIndirectCommand2KHR const & ) const = default; #else - bool operator==( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( TraceRaysIndirectCommand2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( formatProperties == rhs.formatProperties ) && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) && - ( imageMipTailSize == rhs.imageMipTailSize ) && ( imageMipTailOffset == rhs.imageMipTailOffset ) && - ( imageMipTailStride == rhs.imageMipTailStride ); + return ( raygenShaderRecordAddress == rhs.raygenShaderRecordAddress ) && ( raygenShaderRecordSize == rhs.raygenShaderRecordSize ) && + ( missShaderBindingTableAddress == rhs.missShaderBindingTableAddress ) && ( missShaderBindingTableSize == rhs.missShaderBindingTableSize ) && + ( missShaderBindingTableStride == rhs.missShaderBindingTableStride ) && ( hitShaderBindingTableAddress == rhs.hitShaderBindingTableAddress ) && + ( hitShaderBindingTableSize == rhs.hitShaderBindingTableSize ) && ( hitShaderBindingTableStride == rhs.hitShaderBindingTableStride ) && + ( callableShaderBindingTableAddress == rhs.callableShaderBindingTableAddress ) && + ( callableShaderBindingTableSize == rhs.callableShaderBindingTableSize ) && + ( callableShaderBindingTableStride == rhs.callableShaderBindingTableStride ) && ( width == rhs.width ) && ( height == rhs.height ) && + ( depth == rhs.depth ); # endif } - bool operator!=( SparseImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( TraceRaysIndirectCommand2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {}; - uint32_t imageMipTailFirstLod = {}; - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {}; - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t depth = {}; }; - struct SparseImageMemoryRequirements2 + struct TraceRaysIndirectCommandKHR { - using NativeType = VkSparseImageMemoryRequirements2; + using NativeType = VkTraceRaysIndirectCommandKHR; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT + : width{ width_ } + , height{ height_ } + , depth{ depth_ } + { + } -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryRequirements( memoryRequirements_ ) + VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : TraceRaysIndirectCommandKHR( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + explicit TraceRaysIndirectCommandKHR( Extent2D const & extent2D, uint32_t depth_ = {} ) + : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) + { + } - SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT - : SparseImageMemoryRequirements2( *reinterpret_cast( &rhs ) ) + TraceRaysIndirectCommandKHR & operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + TraceRaysIndirectCommandKHR & operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SparseImageMemoryRequirements2 & operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } - SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + height = height_; return *this; } - operator VkSparseImageMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + depth = depth_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT + operator VkTraceRaysIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, memoryRequirements ); + return std::tie( width, height, depth ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SparseImageMemoryRequirements2 const & ) const = default; + auto operator<=>( TraceRaysIndirectCommandKHR const & ) const = default; #else - bool operator==( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements ); + return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth ); # endif } - bool operator!=( SparseImageMemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {}; - }; - - template <> - struct CppType - { - using Type = SparseImageMemoryRequirements2; + uint32_t width = {}; + uint32_t height = {}; + uint32_t depth = {}; }; - using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; -#if defined( VK_USE_PLATFORM_GGP ) - struct StreamDescriptorSurfaceCreateInfoGGP + struct ValidationCacheCreateInfoEXT { - using NativeType = VkStreamDescriptorSurfaceCreateInfoGGP; + using NativeType = VkValidationCacheCreateInfoEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, - GgpStreamDescriptor streamDescriptor_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , streamDescriptor( streamDescriptor_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, + size_t initialDataSize_ = {}, + const void * pInitialData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , initialDataSize{ initialDataSize_ } + , pInitialData{ pInitialData_ } { } - VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT - : StreamDescriptorSurfaceCreateInfoGGP( *reinterpret_cast( &rhs ) ) + ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ValidationCacheCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - StreamDescriptorSurfaceCreateInfoGGP & operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - StreamDescriptorSurfaceCreateInfoGGP & operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + ValidationCacheCreateInfoEXT & operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ValidationCacheCreateInfoEXT & operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & - setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT { - streamDescriptor = streamDescriptor_; + initialDataSize = initialDataSize_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkStreamDescriptorSurfaceCreateInfoGGP const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pInitialData = pInitialData_; + return *this; } - operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + ValidationCacheCreateInfoEXT & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + initialDataSize = initialData_.size() * sizeof( T ); + pInitialData = initialData_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT const &, + size_t const &, + const void * const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, streamDescriptor ); + return std::tie( sType, pNext, flags, initialDataSize, pInitialData ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationCacheCreateInfoEXT const & ) const = default; +#else + bool operator==( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) - return cmp; - if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) - return cmp; - if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) - return cmp; - if ( auto cmp = memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ); cmp != 0 ) - return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - - return std::strong_ordering::equivalent; - } +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( initialDataSize == rhs.initialDataSize ) && + ( pInitialData == rhs.pInitialData ); # endif - - bool operator==( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && - ( memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ) == 0 ); } - bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {}; - GgpStreamDescriptor streamDescriptor = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {}; + size_t initialDataSize = {}; + const void * pInitialData = {}; }; template <> - struct CppType + struct CppType { - using Type = StreamDescriptorSurfaceCreateInfoGGP; + using Type = ValidationCacheCreateInfoEXT; }; -#endif /*VK_USE_PLATFORM_GGP*/ - struct StridedDeviceAddressRegionKHR + struct ValidationFeaturesEXT { - using NativeType = VkStridedDeviceAddressRegionKHR; + using NativeType = VkValidationFeaturesEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT - : deviceAddress( deviceAddress_ ) - , stride( stride_ ) - , size( size_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( uint32_t enabledValidationFeatureCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ = {}, + uint32_t disabledValidationFeatureCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , enabledValidationFeatureCount{ enabledValidationFeatureCount_ } + , pEnabledValidationFeatures{ pEnabledValidationFeatures_ } + , disabledValidationFeatureCount{ disabledValidationFeatureCount_ } + , pDisabledValidationFeatures{ pDisabledValidationFeatures_ } { } - VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : StridedDeviceAddressRegionKHR( *reinterpret_cast( &rhs ) ) + ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ValidationFeaturesEXT( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - StridedDeviceAddressRegionKHR & operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & enabledValidationFeatures_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationFeatures_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , enabledValidationFeatureCount( static_cast( enabledValidationFeatures_.size() ) ) + , pEnabledValidationFeatures( enabledValidationFeatures_.data() ) + , disabledValidationFeatureCount( static_cast( disabledValidationFeatures_.size() ) ) + , pDisabledValidationFeatures( disabledValidationFeatures_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + ValidationFeaturesEXT & operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - deviceAddress = deviceAddress_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT { - stride = stride_; + enabledValidationFeatureCount = enabledValidationFeatureCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & + setPEnabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT { - size = size_; + pEnabledValidationFeatures = pEnabledValidationFeatures_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkStridedDeviceAddressRegionKHR const &() const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT & setEnabledValidationFeatures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & enabledValidationFeatures_ ) + VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + enabledValidationFeatureCount = static_cast( enabledValidationFeatures_.size() ); + pEnabledValidationFeatures = enabledValidationFeatures_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + disabledValidationFeatureCount = disabledValidationFeatureCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & + setPDisabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + pDisabledValidationFeatures = pDisabledValidationFeatures_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFeaturesEXT & setDisabledValidationFeatures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationFeatures_ ) + VULKAN_HPP_NOEXCEPT + { + disabledValidationFeatureCount = static_cast( disabledValidationFeatures_.size() ); + pDisabledValidationFeatures = disabledValidationFeatures_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkValidationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( deviceAddress, stride, size ); + return std::tie( sType, pNext, enabledValidationFeatureCount, pEnabledValidationFeatures, disabledValidationFeatureCount, pDisabledValidationFeatures ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( StridedDeviceAddressRegionKHR const & ) const = default; + auto operator<=>( ValidationFeaturesEXT const & ) const = default; #else - bool operator==( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( deviceAddress == rhs.deviceAddress ) && ( stride == rhs.stride ) && ( size == rhs.size ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount ) && + ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures ) && ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount ) && + ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures ); # endif } - bool operator!=( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; - VULKAN_HPP_NAMESPACE::DeviceSize stride = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT; + const void * pNext = {}; + uint32_t enabledValidationFeatureCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures = {}; + uint32_t disabledValidationFeatureCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures = {}; }; - struct SubmitInfo + template <> + struct CppType { - using NativeType = VkSubmitInfo; + using Type = ValidationFeaturesEXT; + }; + + struct ValidationFlagsEXT + { + using NativeType = VkValidationFlagsEXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFlagsEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubmitInfo( uint32_t waitSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ = {}, - const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ = {}, - uint32_t commandBufferCount_ = {}, - const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ = {}, - uint32_t signalSemaphoreCount_ = {}, - const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphores( pWaitSemaphores_ ) - , pWaitDstStageMask( pWaitDstStageMask_ ) - , commandBufferCount( commandBufferCount_ ) - , pCommandBuffers( pCommandBuffers_ ) - , signalSemaphoreCount( signalSemaphoreCount_ ) - , pSignalSemaphores( pSignalSemaphores_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , disabledValidationCheckCount{ disabledValidationCheckCount_ } + , pDisabledValidationChecks{ pDisabledValidationChecks_ } { } - VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubmitInfo( *reinterpret_cast( &rhs ) ) {} + ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ValidationFlagsEXT( *reinterpret_cast( &rhs ) ) {} -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitDstStageMask_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBuffers_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) - , pWaitSemaphores( waitSemaphores_.data() ) - , pWaitDstStageMask( waitDstStageMask_.data() ) - , commandBufferCount( static_cast( commandBuffers_.size() ) ) - , pCommandBuffers( commandBuffers_.data() ) - , signalSemaphoreCount( static_cast( signalSemaphores_.size() ) ) - , pSignalSemaphores( signalSemaphores_.data() ) - { -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( waitSemaphores_.size() == waitDstStageMask_.size() ); -# else - if ( waitSemaphores_.size() != waitDstStageMask_.size() ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ValidationFlagsEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationChecks_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , disabledValidationCheckCount( static_cast( disabledValidationChecks_.size() ) ) + , pDisabledValidationChecks( disabledValidationChecks_.data() ) + { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ValidationFlagsEXT & operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ValidationFlagsEXT & operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) VULKAN_HPP_NOEXCEPT { - waitSemaphoreCount = waitSemaphoreCount_; + disabledValidationCheckCount = disabledValidationCheckCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & + setPDisabledValidationChecks( const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT { - pWaitSemaphores = pWaitSemaphores_; + pDisabledValidationChecks = pDisabledValidationChecks_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubmitInfo & - setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT + ValidationFlagsEXT & setDisabledValidationChecks( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT { - waitSemaphoreCount = static_cast( waitSemaphores_.size() ); - pWaitSemaphores = waitSemaphores_.data(); + disabledValidationCheckCount = static_cast( disabledValidationChecks_.size() ); + pDisabledValidationChecks = disabledValidationChecks_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT + operator VkValidationFlagsEXT const &() const VULKAN_HPP_NOEXCEPT { - pWaitDstStageMask = pWaitDstStageMask_; - return *this; + return *reinterpret_cast( this ); } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubmitInfo & setWaitDstStageMask( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitDstStageMask_ ) - VULKAN_HPP_NOEXCEPT + operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT { - waitSemaphoreCount = static_cast( waitDstStageMask_.size() ); - pWaitDstStageMask = waitDstStageMask_.data(); - return *this; + return *reinterpret_cast( this ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - commandBufferCount = commandBufferCount_; + return std::tie( sType, pNext, disabledValidationCheckCount, pDisabledValidationChecks ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ValidationFlagsEXT const & ) const = default; +#else + bool operator==( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) && + ( pDisabledValidationChecks == rhs.pDisabledValidationChecks ); +# endif + } + + bool operator!=( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT; + const void * pNext = {}; + uint32_t disabledValidationCheckCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks = {}; + }; + + template <> + struct CppType + { + using Type = ValidationFlagsEXT; + }; + + struct VertexInputAttributeDescription2EXT + { + using NativeType = VkVertexInputAttributeDescription2EXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputAttributeDescription2EXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( uint32_t location_ = {}, + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t offset_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , location{ location_ } + , binding{ binding_ } + , format{ format_ } + , offset{ offset_ } + { + } + + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VertexInputAttributeDescription2EXT( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputAttributeDescription2EXT( *reinterpret_cast( &rhs ) ) + { + } + + VertexInputAttributeDescription2EXT & operator=( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VertexInputAttributeDescription2EXT & operator=( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pCommandBuffers = pCommandBuffers_; + pNext = pNext_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubmitInfo & - setCommandBuffers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBuffers_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT { - commandBufferCount = static_cast( commandBuffers_.size() ); - pCommandBuffers = commandBuffers_.data(); + location = location_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT { - signalSemaphoreCount = signalSemaphoreCount_; + binding = binding_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT { - pSignalSemaphores = pSignalSemaphores_; + format = format_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubmitInfo & - setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT { - signalSemaphoreCount = static_cast( signalSemaphores_.size() ); - pSignalSemaphores = signalSemaphores_.data(); + offset = offset_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkVertexInputAttributeDescription2EXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT + operator VkVertexInputAttributeDescription2EXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -86073,209 +127859,129 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + VULKAN_HPP_NAMESPACE::Format const &, + uint32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( - sType, pNext, waitSemaphoreCount, pWaitSemaphores, pWaitDstStageMask, commandBufferCount, pCommandBuffers, signalSemaphoreCount, pSignalSemaphores ); + return std::tie( sType, pNext, location, binding, format, offset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubmitInfo const & ) const = default; + auto operator<=>( VertexInputAttributeDescription2EXT const & ) const = default; #else - bool operator==( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && - ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) && ( commandBufferCount == rhs.commandBufferCount ) && - ( pCommandBuffers == rhs.pCommandBuffers ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && - ( pSignalSemaphores == rhs.pSignalSemaphores ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) && + ( offset == rhs.offset ); # endif } - bool operator!=( SubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo; - const void * pNext = {}; - uint32_t waitSemaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore * pWaitSemaphores = {}; - const VULKAN_HPP_NAMESPACE::PipelineStageFlags * pWaitDstStageMask = {}; - uint32_t commandBufferCount = {}; - const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers = {}; - uint32_t signalSemaphoreCount = {}; - const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputAttributeDescription2EXT; + void * pNext = {}; + uint32_t location = {}; + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t offset = {}; }; template <> - struct CppType + struct CppType { - using Type = SubmitInfo; + using Type = VertexInputAttributeDescription2EXT; }; - struct SubmitInfo2 + struct VertexInputBindingDescription2EXT { - using NativeType = VkSubmitInfo2; + using NativeType = VkVertexInputBindingDescription2EXT; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo2; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputBindingDescription2EXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_ = {}, - uint32_t waitSemaphoreInfoCount_ = {}, - const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ = {}, - uint32_t commandBufferInfoCount_ = {}, - const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ = {}, - uint32_t signalSemaphoreInfoCount_ = {}, - const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , waitSemaphoreInfoCount( waitSemaphoreInfoCount_ ) - , pWaitSemaphoreInfos( pWaitSemaphoreInfos_ ) - , commandBufferInfoCount( commandBufferInfoCount_ ) - , pCommandBufferInfos( pCommandBufferInfos_ ) - , signalSemaphoreInfoCount( signalSemaphoreInfoCount_ ) - , pSignalSemaphoreInfos( pSignalSemaphoreInfos_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( uint32_t binding_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex, + uint32_t divisor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , binding{ binding_ } + , stride{ stride_ } + , inputRate{ inputRate_ } + , divisor{ divisor_ } { } - VULKAN_HPP_CONSTEXPR SubmitInfo2( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - SubmitInfo2( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubmitInfo2( *reinterpret_cast( &rhs ) ) {} + VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubmitInfo2( VULKAN_HPP_NAMESPACE::SubmitFlags flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreInfos_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferInfos_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreInfos_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , waitSemaphoreInfoCount( static_cast( waitSemaphoreInfos_.size() ) ) - , pWaitSemaphoreInfos( waitSemaphoreInfos_.data() ) - , commandBufferInfoCount( static_cast( commandBufferInfos_.size() ) ) - , pCommandBufferInfos( commandBufferInfos_.data() ) - , signalSemaphoreInfoCount( static_cast( signalSemaphoreInfos_.size() ) ) - , pSignalSemaphoreInfos( signalSemaphoreInfos_.data() ) + VertexInputBindingDescription2EXT( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDescription2EXT( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SubmitInfo2 & operator=( SubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VertexInputBindingDescription2EXT & operator=( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SubmitInfo2 & operator=( VkSubmitInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + VertexInputBindingDescription2EXT & operator=( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setFlags( VULKAN_HPP_NAMESPACE::SubmitFlags flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setWaitSemaphoreInfoCount( uint32_t waitSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT - { - waitSemaphoreInfoCount = waitSemaphoreInfoCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setPWaitSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT - { - pWaitSemaphoreInfos = pWaitSemaphoreInfos_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubmitInfo2 & setWaitSemaphoreInfos( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT - { - waitSemaphoreInfoCount = static_cast( waitSemaphoreInfos_.size() ); - pWaitSemaphoreInfos = waitSemaphoreInfos_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setCommandBufferInfoCount( uint32_t commandBufferInfoCount_ ) VULKAN_HPP_NOEXCEPT - { - commandBufferInfoCount = commandBufferInfoCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & - setPCommandBufferInfos( const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos_ ) VULKAN_HPP_NOEXCEPT - { - pCommandBufferInfos = pCommandBufferInfos_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubmitInfo2 & setCommandBufferInfos( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & commandBufferInfos_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT { - commandBufferInfoCount = static_cast( commandBufferInfos_.size() ); - pCommandBufferInfos = commandBufferInfos_.data(); + binding = binding_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & setSignalSemaphoreInfoCount( uint32_t signalSemaphoreInfoCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT { - signalSemaphoreInfoCount = signalSemaphoreInfoCount_; + stride = stride_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubmitInfo2 & - setPSignalSemaphoreInfos( const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT { - pSignalSemaphoreInfos = pSignalSemaphoreInfos_; + inputRate = inputRate_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubmitInfo2 & setSignalSemaphoreInfos( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreInfos_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT { - signalSemaphoreInfoCount = static_cast( signalSemaphoreInfos_.size() ); - pSignalSemaphoreInfos = signalSemaphoreInfos_.data(); + divisor = divisor_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSubmitInfo2 const &() const VULKAN_HPP_NOEXCEPT + operator VkVertexInputBindingDescription2EXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSubmitInfo2 &() VULKAN_HPP_NOEXCEPT + operator VkVertexInputBindingDescription2EXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -86283,237 +127989,236 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + VULKAN_HPP_NAMESPACE::VertexInputRate const &, + uint32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - flags, - waitSemaphoreInfoCount, - pWaitSemaphoreInfos, - commandBufferInfoCount, - pCommandBufferInfos, - signalSemaphoreInfoCount, - pSignalSemaphoreInfos ); + return std::tie( sType, pNext, binding, stride, inputRate, divisor ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubmitInfo2 const & ) const = default; + auto operator<=>( VertexInputBindingDescription2EXT const & ) const = default; #else - bool operator==( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( waitSemaphoreInfoCount == rhs.waitSemaphoreInfoCount ) && - ( pWaitSemaphoreInfos == rhs.pWaitSemaphoreInfos ) && ( commandBufferInfoCount == rhs.commandBufferInfoCount ) && - ( pCommandBufferInfos == rhs.pCommandBufferInfos ) && ( signalSemaphoreInfoCount == rhs.signalSemaphoreInfoCount ) && - ( pSignalSemaphoreInfos == rhs.pSignalSemaphoreInfos ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ) && + ( divisor == rhs.divisor ); # endif } - bool operator!=( SubmitInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo2; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::SubmitFlags flags = {}; - uint32_t waitSemaphoreInfoCount = {}; - const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pWaitSemaphoreInfos = {}; - uint32_t commandBufferInfoCount = {}; - const VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo * pCommandBufferInfos = {}; - uint32_t signalSemaphoreInfoCount = {}; - const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputBindingDescription2EXT; + void * pNext = {}; + uint32_t binding = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; + uint32_t divisor = {}; }; template <> - struct CppType + struct CppType { - using Type = SubmitInfo2; + using Type = VertexInputBindingDescription2EXT; }; - using SubmitInfo2KHR = SubmitInfo2; - struct SubpassBeginInfo +#if defined( VK_USE_PLATFORM_VI_NN ) + struct ViSurfaceCreateInfoNN { - using NativeType = VkSubpassBeginInfo; + using NativeType = VkViSurfaceCreateInfoNN; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eViSurfaceCreateInfoNN; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , contents( contents_ ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + ViSurfaceCreateInfoNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, void * window_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , window{ window_ } { } - VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassBeginInfo( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT + : ViSurfaceCreateInfoNN( *reinterpret_cast( &rhs ) ) + { + } - SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ViSurfaceCreateInfoNN & operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + ViSurfaceCreateInfoNN & operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setFlags( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT { - contents = contents_; + flags = flags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSubpassBeginInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setWindow( void * window_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + window = window_; + return *this; } +# endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT + operator VkViSurfaceCreateInfoNN const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, contents ); + return std::tie( sType, pNext, flags, window ); } -#endif +# endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubpassBeginInfo const & ) const = default; -#else - bool operator==( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ViSurfaceCreateInfoNN const & ) const = default; +# else + bool operator==( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( contents == rhs.contents ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window ); +# endif } - bool operator!=( SubpassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif +# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {}; + void * window = {}; }; template <> - struct CppType + struct CppType { - using Type = SubpassBeginInfo; + using Type = ViSurfaceCreateInfoNN; }; - using SubpassBeginInfoKHR = SubpassBeginInfo; +#endif /*VK_USE_PLATFORM_VI_NN*/ - struct SubpassDescriptionDepthStencilResolve + struct VideoPictureResourceInfoKHR { - using NativeType = VkSubpassDescriptionDepthStencilResolve; + using NativeType = VkVideoPictureResourceInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescriptionDepthStencilResolve; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoPictureResourceInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - SubpassDescriptionDepthStencilResolve( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, - VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, - const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , depthResolveMode( depthResolveMode_ ) - , stencilResolveMode( stencilResolveMode_ ) - , pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoPictureResourceInfoKHR( VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, + uint32_t baseArrayLayer_ = {}, + VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , codedOffset{ codedOffset_ } + , codedExtent{ codedExtent_ } + , baseArrayLayer{ baseArrayLayer_ } + , imageViewBinding{ imageViewBinding_ } { } - VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoPictureResourceInfoKHR( VideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT - : SubpassDescriptionDepthStencilResolve( *reinterpret_cast( &rhs ) ) + VideoPictureResourceInfoKHR( VkVideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoPictureResourceInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SubpassDescriptionDepthStencilResolve & operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoPictureResourceInfoKHR & operator=( VideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SubpassDescriptionDepthStencilResolve & operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT + VideoPictureResourceInfoKHR & operator=( VkVideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & - setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT { - depthResolveMode = depthResolveMode_; + codedOffset = codedOffset_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & - setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT { - stencilResolveMode = stencilResolveMode_; + codedExtent = codedExtent_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & - setPDepthStencilResolveAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT { - pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_; + baseArrayLayer = baseArrayLayer_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSubpassDescriptionDepthStencilResolve const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setImageViewBinding( VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ ) VULKAN_HPP_NOEXCEPT + { + imageViewBinding = imageViewBinding_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoPictureResourceInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT + operator VkVideoPictureResourceInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -86522,1722 +128227,1724 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::Offset2D const &, + VULKAN_HPP_NAMESPACE::Extent2D const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ImageView const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, depthResolveMode, stencilResolveMode, pDepthStencilResolveAttachment ); + return std::tie( sType, pNext, codedOffset, codedExtent, baseArrayLayer, imageViewBinding ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubpassDescriptionDepthStencilResolve const & ) const = default; + auto operator<=>( VideoPictureResourceInfoKHR const & ) const = default; #else - bool operator==( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoPictureResourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthResolveMode == rhs.depthResolveMode ) && - ( stencilResolveMode == rhs.stencilResolveMode ) && ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( codedOffset == rhs.codedOffset ) && ( codedExtent == rhs.codedExtent ) && + ( baseArrayLayer == rhs.baseArrayLayer ) && ( imageViewBinding == rhs.imageViewBinding ); # endif } - bool operator!=( SubpassDescriptionDepthStencilResolve const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoPictureResourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; - VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone; - const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoPictureResourceInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Offset2D codedOffset = {}; + VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {}; + uint32_t baseArrayLayer = {}; + VULKAN_HPP_NAMESPACE::ImageView imageViewBinding = {}; }; template <> - struct CppType + struct CppType { - using Type = SubpassDescriptionDepthStencilResolve; + using Type = VideoPictureResourceInfoKHR; }; - using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve; - struct SubpassEndInfo + struct VideoReferenceSlotInfoKHR { - using NativeType = VkSubpassEndInfo; + using NativeType = VkVideoReferenceSlotInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoReferenceSlotInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubpassEndInfo( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext( pNext_ ) {} +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR( int32_t slotIndex_ = {}, + const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , slotIndex{ slotIndex_ } + , pPictureResource{ pPictureResource_ } + { + } - VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR( VideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SubpassEndInfo( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + VideoReferenceSlotInfoKHR( VkVideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoReferenceSlotInfoKHR( *reinterpret_cast( &rhs ) ) + { + } - SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoReferenceSlotInfoKHR & operator=( VideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT + VideoReferenceSlotInfoKHR & operator=( VkVideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSubpassEndInfo const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setSlotIndex( int32_t slotIndex_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + slotIndex = slotIndex_; + return *this; } - operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & + setPPictureResource( const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pPictureResource = pPictureResource_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoReferenceSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoReferenceSlotInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext ); + return std::tie( sType, pNext, slotIndex, pPictureResource ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubpassEndInfo const & ) const = default; + auto operator<=>( VideoReferenceSlotInfoKHR const & ) const = default; #else - bool operator==( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoReferenceSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) && ( pPictureResource == rhs.pPictureResource ); # endif } - bool operator!=( SubpassEndInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoReferenceSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo; - const void * pNext = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoReferenceSlotInfoKHR; + const void * pNext = {}; + int32_t slotIndex = {}; + const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource = {}; }; template <> - struct CppType + struct CppType { - using Type = SubpassEndInfo; + using Type = VideoReferenceSlotInfoKHR; }; - using SubpassEndInfoKHR = SubpassEndInfo; - struct SubpassFragmentDensityMapOffsetEndInfoQCOM + struct VideoBeginCodingInfoKHR { - using NativeType = VkSubpassFragmentDensityMapOffsetEndInfoQCOM; + using NativeType = VkVideoBeginCodingInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBeginCodingInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM( uint32_t fragmentDensityOffsetCount_ = {}, - const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentDensityOffsetCount( fragmentDensityOffsetCount_ ) - , pFragmentDensityOffsets( pFragmentDensityOffsets_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , videoSession{ videoSession_ } + , videoSessionParameters{ videoSessionParameters_ } + , referenceSlotCount{ referenceSlotCount_ } + , pReferenceSlots{ pReferenceSlots_ } { } - VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SubpassFragmentDensityMapOffsetEndInfoQCOM( VkSubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT - : SubpassFragmentDensityMapOffsetEndInfoQCOM( *reinterpret_cast( &rhs ) ) + VideoBeginCodingInfoKHR( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoBeginCodingInfoKHR( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubpassFragmentDensityMapOffsetEndInfoQCOM( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fragmentDensityOffsets_, const void * pNext_ = nullptr ) + VideoBeginCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_, + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , fragmentDensityOffsetCount( static_cast( fragmentDensityOffsets_.size() ) ) - , pFragmentDensityOffsets( fragmentDensityOffsets_.data() ) + , flags( flags_ ) + , videoSession( videoSession_ ) + , videoSessionParameters( videoSessionParameters_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SubpassFragmentDensityMapOffsetEndInfoQCOM & operator=( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoBeginCodingInfoKHR & operator=( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SubpassFragmentDensityMapOffsetEndInfoQCOM & operator=( VkSubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + VideoBeginCodingInfoKHR & operator=( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & - setFragmentDensityOffsetCount( uint32_t fragmentDensityOffsetCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { - fragmentDensityOffsetCount = fragmentDensityOffsetCount_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SubpassFragmentDensityMapOffsetEndInfoQCOM & - setPFragmentDensityOffsets( const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT { - pFragmentDensityOffsets = pFragmentDensityOffsets_; + videoSession = videoSession_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SubpassFragmentDensityMapOffsetEndInfoQCOM & setFragmentDensityOffsets( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & fragmentDensityOffsets_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & + setVideoSessionParameters( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT { - fragmentDensityOffsetCount = static_cast( fragmentDensityOffsets_.size() ); - pFragmentDensityOffsets = fragmentDensityOffsets_.data(); + videoSessionParameters = videoSessionParameters_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkSubpassFragmentDensityMapOffsetEndInfoQCOM &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, fragmentDensityOffsetCount, pFragmentDensityOffsets ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubpassFragmentDensityMapOffsetEndInfoQCOM const & ) const = default; -#else - bool operator==( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityOffsetCount == rhs.fragmentDensityOffsetCount ) && - ( pFragmentDensityOffsets == rhs.pFragmentDensityOffsets ); -# endif - } - - bool operator!=( SubpassFragmentDensityMapOffsetEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM; - const void * pNext = {}; - uint32_t fragmentDensityOffsetCount = {}; - const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets = {}; - }; - - template <> - struct CppType - { - using Type = SubpassFragmentDensityMapOffsetEndInfoQCOM; - }; - - struct SubpassResolvePerformanceQueryEXT - { - using NativeType = VkSubpassResolvePerformanceQueryEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassResolvePerformanceQueryEXT; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubpassResolvePerformanceQueryEXT( VULKAN_HPP_NAMESPACE::Bool32 optimal_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , optimal( optimal_ ) + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT { + referenceSlotCount = referenceSlotCount_; + return *this; } - VULKAN_HPP_CONSTEXPR SubpassResolvePerformanceQueryEXT( SubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - SubpassResolvePerformanceQueryEXT( VkSubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : SubpassResolvePerformanceQueryEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT { + pReferenceSlots = pReferenceSlots_; + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SubpassResolvePerformanceQueryEXT & operator=( SubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - SubpassResolvePerformanceQueryEXT & operator=( VkSubpassResolvePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoBeginCodingInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSubpassResolvePerformanceQueryEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoBeginCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSubpassResolvePerformanceQueryEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoBeginCodingInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, optimal ); + return std::tie( sType, pNext, flags, videoSession, videoSessionParameters, referenceSlotCount, pReferenceSlots ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubpassResolvePerformanceQueryEXT const & ) const = default; + auto operator<=>( VideoBeginCodingInfoKHR const & ) const = default; #else - bool operator==( SubpassResolvePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( optimal == rhs.optimal ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( videoSession == rhs.videoSession ) && + ( videoSessionParameters == rhs.videoSessionParameters ) && ( referenceSlotCount == rhs.referenceSlotCount ) && + ( pReferenceSlots == rhs.pReferenceSlots ); # endif } - bool operator!=( SubpassResolvePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassResolvePerformanceQueryEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 optimal = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBeginCodingInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {}; }; template <> - struct CppType + struct CppType { - using Type = SubpassResolvePerformanceQueryEXT; + using Type = VideoBeginCodingInfoKHR; }; - struct SubpassShadingPipelineCreateInfoHUAWEI + struct VideoCapabilitiesKHR { - using NativeType = VkSubpassShadingPipelineCreateInfoHUAWEI; + using NativeType = VkVideoCapabilitiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCapabilitiesKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, - uint32_t subpass_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , renderPass( renderPass_ ) - , subpass( subpass_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D pictureAccessGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minCodedExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {}, + uint32_t maxDpbSlots_ = {}, + uint32_t maxActiveReferencePictures_ = {}, + VULKAN_HPP_NAMESPACE::ExtensionProperties stdHeaderVersion_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , minBitstreamBufferOffsetAlignment{ minBitstreamBufferOffsetAlignment_ } + , minBitstreamBufferSizeAlignment{ minBitstreamBufferSizeAlignment_ } + , pictureAccessGranularity{ pictureAccessGranularity_ } + , minCodedExtent{ minCodedExtent_ } + , maxCodedExtent{ maxCodedExtent_ } + , maxDpbSlots{ maxDpbSlots_ } + , maxActiveReferencePictures{ maxActiveReferencePictures_ } + , stdHeaderVersion{ stdHeaderVersion_ } { } - VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SubpassShadingPipelineCreateInfoHUAWEI( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT - : SubpassShadingPipelineCreateInfoHUAWEI( *reinterpret_cast( &rhs ) ) + VideoCapabilitiesKHR( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoCapabilitiesKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SubpassShadingPipelineCreateInfoHUAWEI & operator=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoCapabilitiesKHR & operator=( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SubpassShadingPipelineCreateInfoHUAWEI & operator=( VkSubpassShadingPipelineCreateInfoHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + VideoCapabilitiesKHR & operator=( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSubpassShadingPipelineCreateInfoHUAWEI const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSubpassShadingPipelineCreateInfoHUAWEI &() VULKAN_HPP_NOEXCEPT + operator VkVideoCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, renderPass, subpass ); + return std::tie( sType, + pNext, + flags, + minBitstreamBufferOffsetAlignment, + minBitstreamBufferSizeAlignment, + pictureAccessGranularity, + minCodedExtent, + maxCodedExtent, + maxDpbSlots, + maxActiveReferencePictures, + stdHeaderVersion ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubpassShadingPipelineCreateInfoHUAWEI const & ) const = default; + auto operator<=>( VideoCapabilitiesKHR const & ) const = default; #else - bool operator==( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( minBitstreamBufferOffsetAlignment == rhs.minBitstreamBufferOffsetAlignment ) && + ( minBitstreamBufferSizeAlignment == rhs.minBitstreamBufferSizeAlignment ) && ( pictureAccessGranularity == rhs.pictureAccessGranularity ) && + ( minCodedExtent == rhs.minCodedExtent ) && ( maxCodedExtent == rhs.maxCodedExtent ) && ( maxDpbSlots == rhs.maxDpbSlots ) && + ( maxActiveReferencePictures == rhs.maxActiveReferencePictures ) && ( stdHeaderVersion == rhs.stdHeaderVersion ); # endif } - bool operator!=( SubpassShadingPipelineCreateInfoHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassShadingPipelineCreateInfoHUAWEI; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; - uint32_t subpass = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment = {}; + VULKAN_HPP_NAMESPACE::Extent2D pictureAccessGranularity = {}; + VULKAN_HPP_NAMESPACE::Extent2D minCodedExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {}; + uint32_t maxDpbSlots = {}; + uint32_t maxActiveReferencePictures = {}; + VULKAN_HPP_NAMESPACE::ExtensionProperties stdHeaderVersion = {}; }; template <> - struct CppType + struct CppType { - using Type = SubpassShadingPipelineCreateInfoHUAWEI; + using Type = VideoCapabilitiesKHR; }; - struct SubresourceLayout2EXT + struct VideoCodingControlInfoKHR { - using NativeType = VkSubresourceLayout2EXT; + using NativeType = VkVideoCodingControlInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceLayout2EXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCodingControlInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , subresourceLayout( subresourceLayout_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoCodingControlInfoKHR( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoCodingControlInfoKHR( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR SubresourceLayout2EXT( SubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoCodingControlInfoKHR & operator=( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SubresourceLayout2EXT( VkSubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT - : SubresourceLayout2EXT( *reinterpret_cast( &rhs ) ) + VideoCodingControlInfoKHR & operator=( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SubresourceLayout2EXT & operator=( SubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } - SubresourceLayout2EXT & operator=( VkSubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + flags = flags_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSubresourceLayout2EXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoCodingControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSubresourceLayout2EXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoCodingControlInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, subresourceLayout ); + return std::tie( sType, pNext, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubresourceLayout2EXT const & ) const = default; + auto operator<=>( VideoCodingControlInfoKHR const & ) const = default; #else - bool operator==( SubresourceLayout2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subresourceLayout == rhs.subresourceLayout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); # endif } - bool operator!=( SubresourceLayout2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceLayout2EXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCodingControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags = {}; }; template <> - struct CppType + struct CppType { - using Type = SubresourceLayout2EXT; + using Type = VideoCodingControlInfoKHR; }; - struct SurfaceCapabilities2EXT + struct VideoDecodeAV1CapabilitiesKHR { - using NativeType = VkSurfaceCapabilities2EXT; + using NativeType = VkVideoDecodeAV1CapabilitiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1CapabilitiesKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( - uint32_t minImageCount_ = {}, - uint32_t maxImageCount_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, - uint32_t maxImageArrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, - VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minImageCount( minImageCount_ ) - , maxImageCount( maxImageCount_ ) - , currentExtent( currentExtent_ ) - , minImageExtent( minImageExtent_ ) - , maxImageExtent( maxImageExtent_ ) - , maxImageArrayLayers( maxImageArrayLayers_ ) - , supportedTransforms( supportedTransforms_ ) - , currentTransform( currentTransform_ ) - , supportedCompositeAlpha( supportedCompositeAlpha_ ) - , supportedUsageFlags( supportedUsageFlags_ ) - , supportedSurfaceCounters( supportedSurfaceCounters_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeAV1CapabilitiesKHR( StdVideoAV1Level maxLevel_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxLevel{ maxLevel_ } { } - VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoDecodeAV1CapabilitiesKHR( VideoDecodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT - : SurfaceCapabilities2EXT( *reinterpret_cast( &rhs ) ) + VideoDecodeAV1CapabilitiesKHR( VkVideoDecodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeAV1CapabilitiesKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SurfaceCapabilities2EXT & operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoDecodeAV1CapabilitiesKHR & operator=( VideoDecodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoDecodeAV1CapabilitiesKHR & operator=( VkVideoDecodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSurfaceCapabilities2EXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeAV1CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeAV1CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - minImageCount, - maxImageCount, - currentExtent, - minImageExtent, - maxImageExtent, - maxImageArrayLayers, - supportedTransforms, - currentTransform, - supportedCompositeAlpha, - supportedUsageFlags, - supportedSurfaceCounters ); + return std::tie( sType, pNext, maxLevel ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SurfaceCapabilities2EXT const & ) const = default; -#else - bool operator==( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) && - ( currentExtent == rhs.currentExtent ) && ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) && - ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( supportedTransforms == rhs.supportedTransforms ) && - ( currentTransform == rhs.currentTransform ) && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && - ( supportedUsageFlags == rhs.supportedUsageFlags ) && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters ); -# endif - } - - bool operator!=( SurfaceCapabilities2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( VideoDecodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); - } -#endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT; - void * pNext = {}; - uint32_t minImageCount = {}; - uint32_t maxImageCount = {}; - VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; - uint32_t maxImageArrayLayers = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; - VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {}; + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoDecodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ) == 0 ); + } + + bool operator!=( VideoDecodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1CapabilitiesKHR; + void * pNext = {}; + StdVideoAV1Level maxLevel = {}; }; template <> - struct CppType + struct CppType { - using Type = SurfaceCapabilities2EXT; + using Type = VideoDecodeAV1CapabilitiesKHR; }; - struct SurfaceCapabilitiesKHR + struct VideoDecodeAV1DpbSlotInfoKHR { - using NativeType = VkSurfaceCapabilitiesKHR; + using NativeType = VkVideoDecodeAV1DpbSlotInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( - uint32_t minImageCount_ = {}, - uint32_t maxImageCount_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, - uint32_t maxImageArrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, - VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT - : minImageCount( minImageCount_ ) - , maxImageCount( maxImageCount_ ) - , currentExtent( currentExtent_ ) - , minImageExtent( minImageExtent_ ) - , maxImageExtent( maxImageExtent_ ) - , maxImageArrayLayers( maxImageArrayLayers_ ) - , supportedTransforms( supportedTransforms_ ) - , currentTransform( currentTransform_ ) - , supportedCompositeAlpha( supportedCompositeAlpha_ ) - , supportedUsageFlags( supportedUsageFlags_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1DpbSlotInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeAV1DpbSlotInfoKHR( const StdVideoDecodeAV1ReferenceInfo * pStdReferenceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } { } - VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoDecodeAV1DpbSlotInfoKHR( VideoDecodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : SurfaceCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + VideoDecodeAV1DpbSlotInfoKHR( VkVideoDecodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeAV1DpbSlotInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SurfaceCapabilitiesKHR & operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoDecodeAV1DpbSlotInfoKHR & operator=( VideoDecodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VideoDecodeAV1DpbSlotInfoKHR & operator=( VkVideoDecodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSurfaceCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1DpbSlotInfoKHR & setPStdReferenceInfo( const StdVideoDecodeAV1ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeAV1DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( minImageCount, - maxImageCount, - currentExtent, - minImageExtent, - maxImageExtent, - maxImageArrayLayers, - supportedTransforms, - currentTransform, - supportedCompositeAlpha, - supportedUsageFlags ); + return std::tie( sType, pNext, pStdReferenceInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SurfaceCapabilitiesKHR const & ) const = default; + auto operator<=>( VideoDecodeAV1DpbSlotInfoKHR const & ) const = default; #else - bool operator==( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoDecodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( minImageCount == rhs.minImageCount ) && ( maxImageCount == rhs.maxImageCount ) && ( currentExtent == rhs.currentExtent ) && - ( minImageExtent == rhs.minImageExtent ) && ( maxImageExtent == rhs.maxImageExtent ) && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && - ( supportedTransforms == rhs.supportedTransforms ) && ( currentTransform == rhs.currentTransform ) && - ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) && ( supportedUsageFlags == rhs.supportedUsageFlags ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); # endif } - bool operator!=( SurfaceCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t minImageCount = {}; - uint32_t maxImageCount = {}; - VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; - uint32_t maxImageArrayLayers = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1DpbSlotInfoKHR; + const void * pNext = {}; + const StdVideoDecodeAV1ReferenceInfo * pStdReferenceInfo = {}; }; - struct SurfaceCapabilities2KHR + template <> + struct CppType { - using NativeType = VkSurfaceCapabilities2KHR; + using Type = VideoDecodeAV1DpbSlotInfoKHR; + }; + + struct VideoDecodeAV1InlineSessionParametersInfoKHR + { + using NativeType = VkVideoDecodeAV1InlineSessionParametersInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1InlineSessionParametersInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , surfaceCapabilities( surfaceCapabilities_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeAV1InlineSessionParametersInfoKHR( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdSequenceHeader{ pStdSequenceHeader_ } { } - VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoDecodeAV1InlineSessionParametersInfoKHR( VideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - : SurfaceCapabilities2KHR( *reinterpret_cast( &rhs ) ) + VideoDecodeAV1InlineSessionParametersInfoKHR( VkVideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeAV1InlineSessionParametersInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SurfaceCapabilities2KHR & operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoDecodeAV1InlineSessionParametersInfoKHR & operator=( VideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + VideoDecodeAV1InlineSessionParametersInfoKHR & operator=( VkVideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSurfaceCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1InlineSessionParametersInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1InlineSessionParametersInfoKHR & + setPStdSequenceHeader( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pStdSequenceHeader = pStdSequenceHeader_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeAV1InlineSessionParametersInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1InlineSessionParametersInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, surfaceCapabilities ); + return std::tie( sType, pNext, pStdSequenceHeader ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SurfaceCapabilities2KHR const & ) const = default; + auto operator<=>( VideoDecodeAV1InlineSessionParametersInfoKHR const & ) const = default; #else - bool operator==( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCapabilities == rhs.surfaceCapabilities ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdSequenceHeader == rhs.pStdSequenceHeader ); # endif } - bool operator!=( SurfaceCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeAV1InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1InlineSessionParametersInfoKHR; + const void * pNext = {}; + const StdVideoAV1SequenceHeader * pStdSequenceHeader = {}; }; template <> - struct CppType + struct CppType { - using Type = SurfaceCapabilities2KHR; + using Type = VideoDecodeAV1InlineSessionParametersInfoKHR; }; -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - struct SurfaceCapabilitiesFullScreenExclusiveEXT + struct VideoDecodeAV1PictureInfoKHR { - using NativeType = VkSurfaceCapabilitiesFullScreenExclusiveEXT; + using NativeType = VkVideoDecodeAV1PictureInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1PictureInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fullScreenExclusiveSupported( fullScreenExclusiveSupported_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR( const StdVideoDecodeAV1PictureInfo * pStdPictureInfo_ = {}, + std::array const & referenceNameSlotIndices_ = {}, + uint32_t frameHeaderOffset_ = {}, + uint32_t tileCount_ = {}, + const uint32_t * pTileOffsets_ = {}, + const uint32_t * pTileSizes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdPictureInfo{ pStdPictureInfo_ } + , referenceNameSlotIndices{ referenceNameSlotIndices_ } + , frameHeaderOffset{ frameHeaderOffset_ } + , tileCount{ tileCount_ } + , pTileOffsets{ pTileOffsets_ } + , pTileSizes{ pTileSizes_ } { } - VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR( VideoDecodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : SurfaceCapabilitiesFullScreenExclusiveEXT( *reinterpret_cast( &rhs ) ) + VideoDecodeAV1PictureInfoKHR( VkVideoDecodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeAV1PictureInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeAV1PictureInfoKHR( const StdVideoDecodeAV1PictureInfo * pStdPictureInfo_, + std::array const & referenceNameSlotIndices_, + uint32_t frameHeaderOffset_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tileOffsets_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tileSizes_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , pStdPictureInfo( pStdPictureInfo_ ) + , referenceNameSlotIndices( referenceNameSlotIndices_ ) + , frameHeaderOffset( frameHeaderOffset_ ) + , tileCount( static_cast( tileOffsets_.size() ) ) + , pTileOffsets( tileOffsets_.data() ) + , pTileSizes( tileSizes_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( tileOffsets_.size() == tileSizes_.size() ); +# else + if ( tileOffsets_.size() != tileSizes_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::VideoDecodeAV1PictureInfoKHR::VideoDecodeAV1PictureInfoKHR: tileOffsets_.size() != tileSizes_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoDecodeAV1PictureInfoKHR & operator=( VideoDecodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeAV1PictureInfoKHR & operator=( VkVideoDecodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT & - setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setPStdPictureInfo( const StdVideoDecodeAV1PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT { - fullScreenExclusiveSupported = fullScreenExclusiveSupported_; + pStdPictureInfo = pStdPictureInfo_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & + setReferenceNameSlotIndices( std::array referenceNameSlotIndices_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + referenceNameSlotIndices = referenceNameSlotIndices_; + return *this; } - operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setFrameHeaderOffset( uint32_t frameHeaderOffset_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + frameHeaderOffset = frameHeaderOffset_; + return *this; } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setTileCount( uint32_t tileCount_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, fullScreenExclusiveSupported ); + tileCount = tileCount_; + return *this; } -# endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const & ) const = default; -# else - bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setPTileOffsets( const uint32_t * pTileOffsets_ ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported ); -# endif + pTileOffsets = pTileOffsets_; + return *this; } - bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeAV1PictureInfoKHR & setTileOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tileOffsets_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + tileCount = static_cast( tileOffsets_.size() ); + pTileOffsets = tileOffsets_.data(); + return *this; } -# endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {}; - }; - - template <> - struct CppType - { - using Type = SurfaceCapabilitiesFullScreenExclusiveEXT; - }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - struct SurfaceFormatKHR - { - using NativeType = VkSurfaceFormatKHR; +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT - : format( format_ ) - , colorSpace( colorSpace_ ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1PictureInfoKHR & setPTileSizes( const uint32_t * pTileSizes_ ) VULKAN_HPP_NOEXCEPT { + pTileSizes = pTileSizes_; + return *this; } - VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT : SurfaceFormatKHR( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeAV1PictureInfoKHR & setTileSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tileSizes_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + tileCount = static_cast( tileSizes_.size() ); + pTileSizes = tileSizes_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSurfaceFormatKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeAV1PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeAV1PictureInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &, + uint32_t const &, + uint32_t const &, + const uint32_t * const &, + const uint32_t * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( format, colorSpace ); + return std::tie( sType, pNext, pStdPictureInfo, referenceNameSlotIndices, frameHeaderOffset, tileCount, pTileOffsets, pTileSizes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SurfaceFormatKHR const & ) const = default; + auto operator<=>( VideoDecodeAV1PictureInfoKHR const & ) const = default; #else - bool operator==( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoDecodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( format == rhs.format ) && ( colorSpace == rhs.colorSpace ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && + ( referenceNameSlotIndices == rhs.referenceNameSlotIndices ) && ( frameHeaderOffset == rhs.frameHeaderOffset ) && ( tileCount == rhs.tileCount ) && + ( pTileOffsets == rhs.pTileOffsets ) && ( pTileSizes == rhs.pTileSizes ); # endif } - bool operator!=( SurfaceFormatKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1PictureInfoKHR; + const void * pNext = {}; + const StdVideoDecodeAV1PictureInfo * pStdPictureInfo = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D referenceNameSlotIndices = {}; + uint32_t frameHeaderOffset = {}; + uint32_t tileCount = {}; + const uint32_t * pTileOffsets = {}; + const uint32_t * pTileSizes = {}; }; - struct SurfaceFormat2KHR + template <> + struct CppType { - using NativeType = VkSurfaceFormat2KHR; + using Type = VideoDecodeAV1PictureInfoKHR; + }; + + struct VideoDecodeAV1ProfileInfoKHR + { + using NativeType = VkVideoDecodeAV1ProfileInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1ProfileInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , surfaceFormat( surfaceFormat_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeAV1ProfileInfoKHR( StdVideoAV1Profile stdProfile_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filmGrainSupport_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdProfile{ stdProfile_ } + , filmGrainSupport{ filmGrainSupport_ } { } - VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoDecodeAV1ProfileInfoKHR( VideoDecodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT : SurfaceFormat2KHR( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + VideoDecodeAV1ProfileInfoKHR( VkVideoDecodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeAV1ProfileInfoKHR( *reinterpret_cast( &rhs ) ) + { + } - SurfaceFormat2KHR & operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoDecodeAV1ProfileInfoKHR & operator=( VideoDecodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + VideoDecodeAV1ProfileInfoKHR & operator=( VkVideoDecodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSurfaceFormat2KHR const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1ProfileInfoKHR & setStdProfile( StdVideoAV1Profile stdProfile_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + stdProfile = stdProfile_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1ProfileInfoKHR & setFilmGrainSupport( VULKAN_HPP_NAMESPACE::Bool32 filmGrainSupport_ ) VULKAN_HPP_NOEXCEPT + { + filmGrainSupport = filmGrainSupport_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeAV1ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeAV1ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, surfaceFormat ); + return std::tie( sType, pNext, stdProfile, filmGrainSupport ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SurfaceFormat2KHR const & ) const = default; -#else - bool operator==( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( VideoDecodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceFormat == rhs.surfaceFormat ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = filmGrainSupport <=> rhs.filmGrainSupport; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; } +#endif - bool operator!=( SurfaceFormat2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoDecodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ) == 0 ) && + ( filmGrainSupport == rhs.filmGrainSupport ); + } + + bool operator!=( VideoDecodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1ProfileInfoKHR; + const void * pNext = {}; + StdVideoAV1Profile stdProfile = {}; + VULKAN_HPP_NAMESPACE::Bool32 filmGrainSupport = {}; }; template <> - struct CppType + struct CppType { - using Type = SurfaceFormat2KHR; + using Type = VideoDecodeAV1ProfileInfoKHR; }; -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - struct SurfaceFullScreenExclusiveInfoEXT + struct VideoDecodeAV1SessionParametersCreateInfoKHR { - using NativeType = VkSurfaceFullScreenExclusiveInfoEXT; + using NativeType = VkVideoDecodeAV1SessionParametersCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeAv1SessionParametersCreateInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( - VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fullScreenExclusive( fullScreenExclusive_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeAV1SessionParametersCreateInfoKHR( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdSequenceHeader{ pStdSequenceHeader_ } { } - VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoDecodeAV1SessionParametersCreateInfoKHR( VideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : SurfaceFullScreenExclusiveInfoEXT( *reinterpret_cast( &rhs ) ) + VideoDecodeAV1SessionParametersCreateInfoKHR( VkVideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeAV1SessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SurfaceFullScreenExclusiveInfoEXT & operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoDecodeAV1SessionParametersCreateInfoKHR & operator=( VideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoDecodeAV1SessionParametersCreateInfoKHR & operator=( VkVideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & - setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeAV1SessionParametersCreateInfoKHR & + setPStdSequenceHeader( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ ) VULKAN_HPP_NOEXCEPT { - fullScreenExclusive = fullScreenExclusive_; + pStdSequenceHeader = pStdSequenceHeader_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSurfaceFullScreenExclusiveInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeAV1SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeAV1SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, fullScreenExclusive ); + return std::tie( sType, pNext, pStdSequenceHeader ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const & ) const = default; -# else - bool operator==( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeAV1SessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fullScreenExclusive == rhs.fullScreenExclusive ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdSequenceHeader == rhs.pStdSequenceHeader ); +# endif } - bool operator!=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeAv1SessionParametersCreateInfoKHR; + const void * pNext = {}; + const StdVideoAV1SequenceHeader * pStdSequenceHeader = {}; }; template <> - struct CppType + struct CppType { - using Type = SurfaceFullScreenExclusiveInfoEXT; + using Type = VideoDecodeAV1SessionParametersCreateInfoKHR; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - struct SurfaceFullScreenExclusiveWin32InfoEXT + struct VideoDecodeCapabilitiesKHR { - using NativeType = VkSurfaceFullScreenExclusiveWin32InfoEXT; + using NativeType = VkVideoDecodeCapabilitiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , hmonitor( hmonitor_ ) - { - } - - VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeCapabilitiesKHR; - SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : SurfaceFullScreenExclusiveWin32InfoEXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SurfaceFullScreenExclusiveWin32InfoEXT & operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SurfaceFullScreenExclusiveWin32InfoEXT & operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoDecodeCapabilitiesKHR( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeCapabilitiesKHR( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + VideoDecodeCapabilitiesKHR & operator=( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & setHmonitor( HMONITOR hmonitor_ ) VULKAN_HPP_NOEXCEPT + VideoDecodeCapabilitiesKHR & operator=( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - hmonitor = hmonitor_; + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSurfaceFullScreenExclusiveWin32InfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, hmonitor ); + return std::tie( sType, pNext, flags ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const & ) const = default; -# else - bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hmonitor == rhs.hmonitor ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif } - bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; - const void * pNext = {}; - HMONITOR hmonitor = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags = {}; }; template <> - struct CppType + struct CppType { - using Type = SurfaceFullScreenExclusiveWin32InfoEXT; + using Type = VideoDecodeCapabilitiesKHR; }; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct SurfaceProtectedCapabilitiesKHR + struct VideoDecodeH264CapabilitiesKHR { - using NativeType = VkSurfaceProtectedCapabilitiesKHR; + using NativeType = VkVideoDecodeH264CapabilitiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceProtectedCapabilitiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264CapabilitiesKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , supportsProtected( supportsProtected_ ) - { - } - - VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : SurfaceProtectedCapabilitiesKHR( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesKHR( StdVideoH264LevelIdc maxLevelIdc_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxLevelIdc{ maxLevelIdc_ } + , fieldOffsetGranularity{ fieldOffsetGranularity_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SurfaceProtectedCapabilitiesKHR & operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesKHR( VideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VideoDecodeH264CapabilitiesKHR( VkVideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264CapabilitiesKHR( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + VideoDecodeH264CapabilitiesKHR & operator=( VideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR & setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT + VideoDecodeH264CapabilitiesKHR & operator=( VkVideoDecodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - supportsProtected = supportsProtected_; + *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSurfaceProtectedCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH264CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH264CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, supportsProtected ); + return std::tie( sType, pNext, maxLevelIdc, fieldOffsetGranularity ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SurfaceProtectedCapabilitiesKHR const & ) const = default; -#else - bool operator==( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportsProtected == rhs.supportsProtected ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = fieldOffsetGranularity <=> rhs.fieldOffsetGranularity; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; } +#endif - bool operator!=( SurfaceProtectedCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ) == 0 ) && + ( fieldOffsetGranularity == rhs.fieldOffsetGranularity ); + } + + bool operator!=( VideoDecodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264CapabilitiesKHR; + void * pNext = {}; + StdVideoH264LevelIdc maxLevelIdc = {}; + VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity = {}; }; template <> - struct CppType + struct CppType { - using Type = SurfaceProtectedCapabilitiesKHR; + using Type = VideoDecodeH264CapabilitiesKHR; }; - struct SwapchainCounterCreateInfoEXT + struct VideoDecodeH264DpbSlotInfoKHR { - using NativeType = VkSwapchainCounterCreateInfoEXT; + using NativeType = VkVideoDecodeH264DpbSlotInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , surfaceCounters( surfaceCounters_ ) + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264DpbSlotInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoKHR( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } { } - VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoKHR( VideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : SwapchainCounterCreateInfoEXT( *reinterpret_cast( &rhs ) ) + VideoDecodeH264DpbSlotInfoKHR( VkVideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264DpbSlotInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SwapchainCounterCreateInfoEXT & operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoDecodeH264DpbSlotInfoKHR & operator=( VideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoDecodeH264DpbSlotInfoKHR & operator=( VkVideoDecodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & - setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoKHR & + setPStdReferenceInfo( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT { - surfaceCounters = surfaceCounters_; + pStdReferenceInfo = pStdReferenceInfo_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSwapchainCounterCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH264DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH264DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, surfaceCounters ); + return std::tie( sType, pNext, pStdReferenceInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SwapchainCounterCreateInfoEXT const & ) const = default; + auto operator<=>( VideoDecodeH264DpbSlotInfoKHR const & ) const = default; #else - bool operator==( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoDecodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( surfaceCounters == rhs.surfaceCounters ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); # endif } - bool operator!=( SwapchainCounterCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264DpbSlotInfoKHR; + const void * pNext = {}; + const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo = {}; }; template <> - struct CppType + struct CppType { - using Type = SwapchainCounterCreateInfoEXT; + using Type = VideoDecodeH264DpbSlotInfoKHR; }; - struct SwapchainCreateInfoKHR + struct VideoDecodeH264InlineSessionParametersInfoKHR { - using NativeType = VkSwapchainCreateInfoKHR; + using NativeType = VkVideoDecodeH264InlineSessionParametersInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264InlineSessionParametersInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, - uint32_t minImageCount_ = {}, - VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, - VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, - uint32_t imageArrayLayers_ = {}, - VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, - VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, - uint32_t queueFamilyIndexCount_ = {}, - const uint32_t * pQueueFamilyIndices_ = {}, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, - VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, - VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, - VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , surface( surface_ ) - , minImageCount( minImageCount_ ) - , imageFormat( imageFormat_ ) - , imageColorSpace( imageColorSpace_ ) - , imageExtent( imageExtent_ ) - , imageArrayLayers( imageArrayLayers_ ) - , imageUsage( imageUsage_ ) - , imageSharingMode( imageSharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) - , preTransform( preTransform_ ) - , compositeAlpha( compositeAlpha_ ) - , presentMode( presentMode_ ) - , clipped( clipped_ ) - , oldSwapchain( oldSwapchain_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264InlineSessionParametersInfoKHR( const StdVideoH264SequenceParameterSet * pStdSPS_ = {}, + const StdVideoH264PictureParameterSet * pStdPPS_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdSPS{ pStdSPS_ } + , pStdPPS{ pStdPPS_ } { } - VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : SwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR + VideoDecodeH264InlineSessionParametersInfoKHR( VideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_, - VULKAN_HPP_NAMESPACE::SurfaceKHR surface_, - uint32_t minImageCount_, - VULKAN_HPP_NAMESPACE::Format imageFormat_, - VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_, - VULKAN_HPP_NAMESPACE::Extent2D imageExtent_, - uint32_t imageArrayLayers_, - VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_, - VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_, - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, - VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, - VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, - VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , surface( surface_ ) - , minImageCount( minImageCount_ ) - , imageFormat( imageFormat_ ) - , imageColorSpace( imageColorSpace_ ) - , imageExtent( imageExtent_ ) - , imageArrayLayers( imageArrayLayers_ ) - , imageUsage( imageUsage_ ) - , imageSharingMode( imageSharingMode_ ) - , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) - , pQueueFamilyIndices( queueFamilyIndices_.data() ) - , preTransform( preTransform_ ) - , compositeAlpha( compositeAlpha_ ) - , presentMode( presentMode_ ) - , clipped( clipped_ ) - , oldSwapchain( oldSwapchain_ ) + VideoDecodeH264InlineSessionParametersInfoKHR( VkVideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264InlineSessionParametersInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SwapchainCreateInfoKHR & operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoDecodeH264InlineSessionParametersInfoKHR & operator=( VideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VideoDecodeH264InlineSessionParametersInfoKHR & operator=( VkVideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264InlineSessionParametersInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264InlineSessionParametersInfoKHR & setPStdSPS( const StdVideoH264SequenceParameterSet * pStdSPS_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + pStdSPS = pStdSPS_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264InlineSessionParametersInfoKHR & setPStdPPS( const StdVideoH264PictureParameterSet * pStdPPS_ ) VULKAN_HPP_NOEXCEPT { - surface = surface_; + pStdPPS = pStdPPS_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH264InlineSessionParametersInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - minImageCount = minImageCount_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH264InlineSessionParametersInfoKHR &() VULKAN_HPP_NOEXCEPT { - imageFormat = imageFormat_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - imageColorSpace = imageColorSpace_; - return *this; + return std::tie( sType, pNext, pStdSPS, pStdPPS ); } +#endif - VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264InlineSessionParametersInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - imageExtent = imageExtent_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdSPS == rhs.pStdSPS ) && ( pStdPPS == rhs.pStdPPS ); +# endif } - VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeH264InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - imageArrayLayers = imageArrayLayers_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT - { - imageUsage = imageUsage_; - return *this; - } + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264InlineSessionParametersInfoKHR; + const void * pNext = {}; + const StdVideoH264SequenceParameterSet * pStdSPS = {}; + const StdVideoH264PictureParameterSet * pStdPPS = {}; + }; - VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT + template <> + struct CppType + { + using Type = VideoDecodeH264InlineSessionParametersInfoKHR; + }; + + struct VideoDecodeH264PictureInfoKHR + { + using NativeType = VkVideoDecodeH264PictureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264PictureInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoKHR( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ = {}, + uint32_t sliceCount_ = {}, + const uint32_t * pSliceOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdPictureInfo{ pStdPictureInfo_ } + , sliceCount{ sliceCount_ } + , pSliceOffsets{ pSliceOffsets_ } { - imageSharingMode = imageSharingMode_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoKHR( VideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264PictureInfoKHR( VkVideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264PictureInfoKHR( *reinterpret_cast( &rhs ) ) { - queueFamilyIndexCount = queueFamilyIndexCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264PictureInfoKHR( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sliceOffsets_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), pStdPictureInfo( pStdPictureInfo_ ), sliceCount( static_cast( sliceOffsets_.size() ) ), pSliceOffsets( sliceOffsets_.data() ) { - pQueueFamilyIndices = pQueueFamilyIndices_; - return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - SwapchainCreateInfoKHR & - setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + VideoDecodeH264PictureInfoKHR & operator=( VideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH264PictureInfoKHR & operator=( VkVideoDecodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); - pQueueFamilyIndices = queueFamilyIndices_.data(); + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - preTransform = preTransform_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPStdPictureInfo( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT { - compositeAlpha = compositeAlpha_; + pStdPictureInfo = pStdPictureInfo_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setSliceCount( uint32_t sliceCount_ ) VULKAN_HPP_NOEXCEPT { - presentMode = presentMode_; + sliceCount = sliceCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoKHR & setPSliceOffsets( const uint32_t * pSliceOffsets_ ) VULKAN_HPP_NOEXCEPT { - clipped = clipped_; + pSliceOffsets = pSliceOffsets_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264PictureInfoKHR & setSliceOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sliceOffsets_ ) VULKAN_HPP_NOEXCEPT { - oldSwapchain = oldSwapchain_; + sliceCount = static_cast( sliceOffsets_.size() ); + pSliceOffsets = sliceOffsets_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH264PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH264PictureInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -88246,343 +129953,279 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + const uint32_t * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - flags, - surface, - minImageCount, - imageFormat, - imageColorSpace, - imageExtent, - imageArrayLayers, - imageUsage, - imageSharingMode, - queueFamilyIndexCount, - pQueueFamilyIndices, - preTransform, - compositeAlpha, - presentMode, - clipped, - oldSwapchain ); + return std::tie( sType, pNext, pStdPictureInfo, sliceCount, pSliceOffsets ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SwapchainCreateInfoKHR const & ) const = default; + auto operator<=>( VideoDecodeH264PictureInfoKHR const & ) const = default; #else - bool operator==( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoDecodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( surface == rhs.surface ) && - ( minImageCount == rhs.minImageCount ) && ( imageFormat == rhs.imageFormat ) && ( imageColorSpace == rhs.imageColorSpace ) && - ( imageExtent == rhs.imageExtent ) && ( imageArrayLayers == rhs.imageArrayLayers ) && ( imageUsage == rhs.imageUsage ) && - ( imageSharingMode == rhs.imageSharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && - ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( preTransform == rhs.preTransform ) && ( compositeAlpha == rhs.compositeAlpha ) && - ( presentMode == rhs.presentMode ) && ( clipped == rhs.clipped ) && ( oldSwapchain == rhs.oldSwapchain ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && ( sliceCount == rhs.sliceCount ) && + ( pSliceOffsets == rhs.pSliceOffsets ); # endif } - bool operator!=( SwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; - uint32_t minImageCount = {}; - VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; - VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; - uint32_t imageArrayLayers = {}; - VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; - VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; - uint32_t queueFamilyIndexCount = {}; - const uint32_t * pQueueFamilyIndices = {}; - VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; - VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque; - VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate; - VULKAN_HPP_NAMESPACE::Bool32 clipped = {}; - VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264PictureInfoKHR; + const void * pNext = {}; + const StdVideoDecodeH264PictureInfo * pStdPictureInfo = {}; + uint32_t sliceCount = {}; + const uint32_t * pSliceOffsets = {}; }; template <> - struct CppType + struct CppType { - using Type = SwapchainCreateInfoKHR; + using Type = VideoDecodeH264PictureInfoKHR; }; - struct SwapchainDisplayNativeHdrCreateInfoAMD + struct VideoDecodeH264ProfileInfoKHR { - using NativeType = VkSwapchainDisplayNativeHdrCreateInfoAMD; + using NativeType = VkVideoDecodeH264ProfileInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264ProfileInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , localDimmingEnable( localDimmingEnable_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileInfoKHR( StdVideoH264ProfileIdc stdProfileIdc_ = {}, + VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout_ = + VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdProfileIdc{ stdProfileIdc_ } + , pictureLayout{ pictureLayout_ } { } - VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileInfoKHR( VideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT - : SwapchainDisplayNativeHdrCreateInfoAMD( *reinterpret_cast( &rhs ) ) + VideoDecodeH264ProfileInfoKHR( VkVideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264ProfileInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SwapchainDisplayNativeHdrCreateInfoAMD & operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoDecodeH264ProfileInfoKHR & operator=( VideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - SwapchainDisplayNativeHdrCreateInfoAMD & operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + VideoDecodeH264ProfileInfoKHR & operator=( VkVideoDecodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & - setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT { - localDimmingEnable = localDimmingEnable_; + stdProfileIdc = stdProfileIdc_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSwapchainDisplayNativeHdrCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoKHR & + setPictureLayout( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pictureLayout = pictureLayout_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH264ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeH264ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, localDimmingEnable ); + return std::tie( sType, pNext, stdProfileIdc, pictureLayout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const & ) const = default; -#else - bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingEnable == rhs.localDimmingEnable ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = pictureLayout <=> rhs.pictureLayout; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ) && + ( pictureLayout == rhs.pictureLayout ); } - bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264ProfileInfoKHR; + const void * pNext = {}; + StdVideoH264ProfileIdc stdProfileIdc = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout = VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive; }; template <> - struct CppType + struct CppType { - using Type = SwapchainDisplayNativeHdrCreateInfoAMD; + using Type = VideoDecodeH264ProfileInfoKHR; }; - struct TextureLODGatherFormatPropertiesAMD + struct VideoDecodeH264SessionParametersAddInfoKHR { - using NativeType = VkTextureLODGatherFormatPropertiesAMD; + using NativeType = VkVideoDecodeH264SessionParametersAddInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTextureLodGatherFormatPropertiesAMD; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersAddInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ ) - { - } - - VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - : TextureLODGatherFormatPropertiesAMD( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoKHR( uint32_t stdSPSCount_ = {}, + const StdVideoH264SequenceParameterSet * pStdSPSs_ = {}, + uint32_t stdPPSCount_ = {}, + const StdVideoH264PictureParameterSet * pStdPPSs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdSPSCount{ stdSPSCount_ } + , pStdSPSs{ pStdSPSs_ } + , stdPPSCount{ stdPPSCount_ } + , pStdPPSs{ pStdPPSs_ } { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - TextureLODGatherFormatPropertiesAMD & operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; - TextureLODGatherFormatPropertiesAMD & operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoKHR( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - operator VkTextureLODGatherFormatPropertiesAMD const &() const VULKAN_HPP_NOEXCEPT + VideoDecodeH264SessionParametersAddInfoKHR( VkVideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264SessionParametersAddInfoKHR( *reinterpret_cast( &rhs ) ) { - return *reinterpret_cast( this ); } - operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stdSPSCount( static_cast( stdSPSs_.size() ) ) + , pStdSPSs( stdSPSs_.data() ) + , stdPPSCount( static_cast( stdPPSs_.size() ) ) + , pStdPPSs( stdPPSs_.data() ) { - return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, supportsTextureGatherLODBiasAMD ); - } -#endif + VideoDecodeH264SessionParametersAddInfoKHR & operator=( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( TextureLODGatherFormatPropertiesAMD const & ) const = default; -#else - bool operator==( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + VideoDecodeH264SessionParametersAddInfoKHR & operator=( VkVideoDecodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD ); -# endif + *this = *reinterpret_cast( &rhs ); + return *this; } - bool operator!=( TextureLODGatherFormatPropertiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + pNext = pNext_; + return *this; } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {}; - }; - - template <> - struct CppType - { - using Type = TextureLODGatherFormatPropertiesAMD; - }; - struct TilePropertiesQCOM - { - using NativeType = VkTilePropertiesQCOM; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTilePropertiesQCOM; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR TilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Extent3D tileSize_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D apronSize_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D origin_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , tileSize( tileSize_ ) - , apronSize( apronSize_ ) - , origin( origin_ ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT { + stdSPSCount = stdSPSCount_; + return *this; } - VULKAN_HPP_CONSTEXPR TilePropertiesQCOM( TilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - TilePropertiesQCOM( VkTilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : TilePropertiesQCOM( *reinterpret_cast( &rhs ) ) {} -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - TilePropertiesQCOM & operator=( TilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - TilePropertiesQCOM & operator=( VkTilePropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH264SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + pStdSPSs = pStdSPSs_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoKHR & + setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + stdSPSCount = static_cast( stdSPSs_.size() ); + pStdSPSs = stdSPSs_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setTileSize( VULKAN_HPP_NAMESPACE::Extent3D const & tileSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT { - tileSize = tileSize_; + stdPPSCount = stdPPSCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setApronSize( VULKAN_HPP_NAMESPACE::Extent2D const & apronSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH264PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT { - apronSize = apronSize_; + pStdPPSs = pStdPPSs_; return *this; } - VULKAN_HPP_CONSTEXPR_14 TilePropertiesQCOM & setOrigin( VULKAN_HPP_NAMESPACE::Offset2D const & origin_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH264SessionParametersAddInfoKHR & + setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT { - origin = origin_; + stdPPSCount = static_cast( stdPPSs_.size() ); + pStdPPSs = stdPPSs_.data(); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkTilePropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH264SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkTilePropertiesQCOM &() VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH264SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -88590,159 +130233,124 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + const void * const &, + uint32_t const &, + const StdVideoH264SequenceParameterSet * const &, + uint32_t const &, + const StdVideoH264PictureParameterSet * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, tileSize, apronSize, origin ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( TilePropertiesQCOM const & ) const = default; -#else - bool operator==( TilePropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tileSize == rhs.tileSize ) && ( apronSize == rhs.apronSize ) && ( origin == rhs.origin ); -# endif - } - - bool operator!=( TilePropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); + return std::tie( sType, pNext, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs ); } #endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTilePropertiesQCOM; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Extent3D tileSize = {}; - VULKAN_HPP_NAMESPACE::Extent2D apronSize = {}; - VULKAN_HPP_NAMESPACE::Offset2D origin = {}; - }; - - template <> - struct CppType - { - using Type = TilePropertiesQCOM; - }; - - struct TimelineSemaphoreSubmitInfo - { - using NativeType = VkTimelineSemaphoreSubmitInfo; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( uint32_t waitSemaphoreValueCount_ = {}, - const uint64_t * pWaitSemaphoreValues_ = {}, - uint32_t signalSemaphoreValueCount_ = {}, - const uint64_t * pSignalSemaphoreValues_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , waitSemaphoreValueCount( waitSemaphoreValueCount_ ) - , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) - , signalSemaphoreValueCount( signalSemaphoreValueCount_ ) - , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) - { - } - - VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT - : TimelineSemaphoreSubmitInfo( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - TimelineSemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , waitSemaphoreValueCount( static_cast( waitSemaphoreValues_.size() ) ) - , pWaitSemaphoreValues( waitSemaphoreValues_.data() ) - , signalSemaphoreValueCount( static_cast( signalSemaphoreValues_.size() ) ) - , pSignalSemaphoreValues( signalSemaphoreValues_.data() ) + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeH264SessionParametersAddInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) && + ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs ); +# endif } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - TimelineSemaphoreSubmitInfo & operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); - return *this; + return !operator==( rhs ); } +#endif -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersAddInfoKHR; + const void * pNext = {}; + uint32_t stdSPSCount = {}; + const StdVideoH264SequenceParameterSet * pStdSPSs = {}; + uint32_t stdPPSCount = {}; + const StdVideoH264PictureParameterSet * pStdPPSs = {}; + }; + + template <> + struct CppType + { + using Type = VideoDecodeH264SessionParametersAddInfoKHR; + }; + + struct VideoDecodeH264SessionParametersCreateInfoKHR + { + using NativeType = VkVideoDecodeH264SessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoDecodeH264SessionParametersCreateInfoKHR( uint32_t maxStdSPSCount_ = {}, + uint32_t maxStdPPSCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxStdSPSCount{ maxStdSPSCount_ } + , maxStdPPSCount{ maxStdPPSCount_ } + , pParametersAddInfo{ pParametersAddInfo_ } { - pNext = pNext_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + VideoDecodeH264SessionParametersCreateInfoKHR( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH264SessionParametersCreateInfoKHR( VkVideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH264SessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) { - waitSemaphoreValueCount = waitSemaphoreValueCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + VideoDecodeH264SessionParametersCreateInfoKHR & operator=( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH264SessionParametersCreateInfoKHR & operator=( VkVideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - pWaitSemaphoreValues = pWaitSemaphoreValues_; + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - TimelineSemaphoreSubmitInfo & - setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - waitSemaphoreValueCount = static_cast( waitSemaphoreValues_.size() ); - pWaitSemaphoreValues = waitSemaphoreValues_.data(); + pNext = pNext_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT { - signalSemaphoreValueCount = signalSemaphoreValueCount_; + maxStdSPSCount = maxStdSPSCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT { - pSignalSemaphoreValues = pSignalSemaphoreValues_; + maxStdPPSCount = maxStdPPSCount_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - TimelineSemaphoreSubmitInfo & - setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoKHR & + setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT { - signalSemaphoreValueCount = static_cast( signalSemaphoreValues_.size() ); - pSignalSemaphoreValues = signalSemaphoreValues_.data(); + pParametersAddInfo = pParametersAddInfo_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkTimelineSemaphoreSubmitInfo const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH264SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH264SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -88752,479 +130360,445 @@ namespace VULKAN_HPP_NAMESPACE std::tuple + const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, waitSemaphoreValueCount, pWaitSemaphoreValues, signalSemaphoreValueCount, pSignalSemaphoreValues ); + return std::tie( sType, pNext, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( TimelineSemaphoreSubmitInfo const & ) const = default; + auto operator<=>( VideoDecodeH264SessionParametersCreateInfoKHR const & ) const = default; #else - bool operator==( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount ) && - ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount ) && - ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) && ( maxStdPPSCount == rhs.maxStdPPSCount ) && + ( pParametersAddInfo == rhs.pParametersAddInfo ); # endif } - bool operator!=( TimelineSemaphoreSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo; - const void * pNext = {}; - uint32_t waitSemaphoreValueCount = {}; - const uint64_t * pWaitSemaphoreValues = {}; - uint32_t signalSemaphoreValueCount = {}; - const uint64_t * pSignalSemaphoreValues = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR; + const void * pNext = {}; + uint32_t maxStdSPSCount = {}; + uint32_t maxStdPPSCount = {}; + const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo = {}; }; template <> - struct CppType + struct CppType { - using Type = TimelineSemaphoreSubmitInfo; + using Type = VideoDecodeH264SessionParametersCreateInfoKHR; }; - using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo; - struct TraceRaysIndirectCommand2KHR + struct VideoDecodeH265CapabilitiesKHR { - using NativeType = VkTraceRaysIndirectCommand2KHR; + using NativeType = VkVideoDecodeH265CapabilitiesKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommand2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize_ = {}, - VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride_ = {}, - VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride_ = {}, - VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride_ = {}, - uint32_t width_ = {}, - uint32_t height_ = {}, - uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT - : raygenShaderRecordAddress( raygenShaderRecordAddress_ ) - , raygenShaderRecordSize( raygenShaderRecordSize_ ) - , missShaderBindingTableAddress( missShaderBindingTableAddress_ ) - , missShaderBindingTableSize( missShaderBindingTableSize_ ) - , missShaderBindingTableStride( missShaderBindingTableStride_ ) - , hitShaderBindingTableAddress( hitShaderBindingTableAddress_ ) - , hitShaderBindingTableSize( hitShaderBindingTableSize_ ) - , hitShaderBindingTableStride( hitShaderBindingTableStride_ ) - , callableShaderBindingTableAddress( callableShaderBindingTableAddress_ ) - , callableShaderBindingTableSize( callableShaderBindingTableSize_ ) - , callableShaderBindingTableStride( callableShaderBindingTableStride_ ) - , width( width_ ) - , height( height_ ) - , depth( depth_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265CapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesKHR( StdVideoH265LevelIdc maxLevelIdc_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxLevelIdc{ maxLevelIdc_ } { } - VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommand2KHR( TraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesKHR( VideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - TraceRaysIndirectCommand2KHR( VkTraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - : TraceRaysIndirectCommand2KHR( *reinterpret_cast( &rhs ) ) + VideoDecodeH265CapabilitiesKHR( VkVideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265CapabilitiesKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - TraceRaysIndirectCommand2KHR & operator=( TraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoDecodeH265CapabilitiesKHR & operator=( VideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - TraceRaysIndirectCommand2KHR & operator=( VkTraceRaysIndirectCommand2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + VideoDecodeH265CapabilitiesKHR & operator=( VkVideoDecodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & - setRaygenShaderRecordAddress( VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH265CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - raygenShaderRecordAddress = raygenShaderRecordAddress_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & - setRaygenShaderRecordSize( VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH265CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - raygenShaderRecordSize = raygenShaderRecordSize_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & - setMissShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - missShaderBindingTableAddress = missShaderBindingTableAddress_; - return *this; + return std::tie( sType, pNext, maxLevelIdc ); } +#endif - VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & - setMissShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - missShaderBindingTableSize = missShaderBindingTableSize_; - return *this; - } + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & - setMissShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT - { - missShaderBindingTableStride = missShaderBindingTableStride_; - return *this; + return std::strong_ordering::equivalent; } +#endif - VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & - setHitShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT + bool operator==( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - hitShaderBindingTableAddress = hitShaderBindingTableAddress_; - return *this; + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ) == 0 ); } - VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & - setHitShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - hitShaderBindingTableSize = hitShaderBindingTableSize_; - return *this; + return !operator==( rhs ); } - VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & - setHitShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT - { - hitShaderBindingTableStride = hitShaderBindingTableStride_; - return *this; - } + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265CapabilitiesKHR; + void * pNext = {}; + StdVideoH265LevelIdc maxLevelIdc = {}; + }; - VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & - setCallableShaderBindingTableAddress( VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress_ ) VULKAN_HPP_NOEXCEPT - { - callableShaderBindingTableAddress = callableShaderBindingTableAddress_; - return *this; - } + template <> + struct CppType + { + using Type = VideoDecodeH265CapabilitiesKHR; + }; - VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & - setCallableShaderBindingTableSize( VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize_ ) VULKAN_HPP_NOEXCEPT + struct VideoDecodeH265DpbSlotInfoKHR + { + using NativeType = VkVideoDecodeH265DpbSlotInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265DpbSlotInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoKHR( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } { - callableShaderBindingTableSize = callableShaderBindingTableSize_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & - setCallableShaderBindingTableStride( VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoKHR( VideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265DpbSlotInfoKHR( VkVideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265DpbSlotInfoKHR( *reinterpret_cast( &rhs ) ) { - callableShaderBindingTableStride = callableShaderBindingTableStride_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + VideoDecodeH265DpbSlotInfoKHR & operator=( VideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH265DpbSlotInfoKHR & operator=( VkVideoDecodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - width = width_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - height = height_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommand2KHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoKHR & + setPStdReferenceInfo( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT { - depth = depth_; + pStdReferenceInfo = pStdReferenceInfo_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkTraceRaysIndirectCommand2KHR const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH265DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkTraceRaysIndirectCommand2KHR &() VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH265DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( raygenShaderRecordAddress, - raygenShaderRecordSize, - missShaderBindingTableAddress, - missShaderBindingTableSize, - missShaderBindingTableStride, - hitShaderBindingTableAddress, - hitShaderBindingTableSize, - hitShaderBindingTableStride, - callableShaderBindingTableAddress, - callableShaderBindingTableSize, - callableShaderBindingTableStride, - width, - height, - depth ); + return std::tie( sType, pNext, pStdReferenceInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( TraceRaysIndirectCommand2KHR const & ) const = default; + auto operator<=>( VideoDecodeH265DpbSlotInfoKHR const & ) const = default; #else - bool operator==( TraceRaysIndirectCommand2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoDecodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( raygenShaderRecordAddress == rhs.raygenShaderRecordAddress ) && ( raygenShaderRecordSize == rhs.raygenShaderRecordSize ) && - ( missShaderBindingTableAddress == rhs.missShaderBindingTableAddress ) && ( missShaderBindingTableSize == rhs.missShaderBindingTableSize ) && - ( missShaderBindingTableStride == rhs.missShaderBindingTableStride ) && ( hitShaderBindingTableAddress == rhs.hitShaderBindingTableAddress ) && - ( hitShaderBindingTableSize == rhs.hitShaderBindingTableSize ) && ( hitShaderBindingTableStride == rhs.hitShaderBindingTableStride ) && - ( callableShaderBindingTableAddress == rhs.callableShaderBindingTableAddress ) && - ( callableShaderBindingTableSize == rhs.callableShaderBindingTableSize ) && - ( callableShaderBindingTableStride == rhs.callableShaderBindingTableStride ) && ( width == rhs.width ) && ( height == rhs.height ) && - ( depth == rhs.depth ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); # endif } - bool operator!=( TraceRaysIndirectCommand2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::DeviceAddress raygenShaderRecordAddress = {}; - VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderRecordSize = {}; - VULKAN_HPP_NAMESPACE::DeviceAddress missShaderBindingTableAddress = {}; - VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableSize = {}; - VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingTableStride = {}; - VULKAN_HPP_NAMESPACE::DeviceAddress hitShaderBindingTableAddress = {}; - VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableSize = {}; - VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingTableStride = {}; - VULKAN_HPP_NAMESPACE::DeviceAddress callableShaderBindingTableAddress = {}; - VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableSize = {}; - VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingTableStride = {}; - uint32_t width = {}; - uint32_t height = {}; - uint32_t depth = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265DpbSlotInfoKHR; + const void * pNext = {}; + const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo = {}; }; - struct TraceRaysIndirectCommandKHR + template <> + struct CppType { - using NativeType = VkTraceRaysIndirectCommandKHR; + using Type = VideoDecodeH265DpbSlotInfoKHR; + }; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT - : width( width_ ) - , height( height_ ) - , depth( depth_ ) + struct VideoDecodeH265InlineSessionParametersInfoKHR + { + using NativeType = VkVideoDecodeH265InlineSessionParametersInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265InlineSessionParametersInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265InlineSessionParametersInfoKHR( const StdVideoH265VideoParameterSet * pStdVPS_ = {}, + const StdVideoH265SequenceParameterSet * pStdSPS_ = {}, + const StdVideoH265PictureParameterSet * pStdPPS_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdVPS{ pStdVPS_ } + , pStdSPS{ pStdSPS_ } + , pStdPPS{ pStdPPS_ } { } - VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + VideoDecodeH265InlineSessionParametersInfoKHR( VideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : TraceRaysIndirectCommandKHR( *reinterpret_cast( &rhs ) ) + VideoDecodeH265InlineSessionParametersInfoKHR( VkVideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265InlineSessionParametersInfoKHR( *reinterpret_cast( &rhs ) ) { } - explicit TraceRaysIndirectCommandKHR( Extent2D const & extent2D, uint32_t depth_ = {} ) - : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) + VideoDecodeH265InlineSessionParametersInfoKHR & operator=( VideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoDecodeH265InlineSessionParametersInfoKHR & operator=( VkVideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - TraceRaysIndirectCommandKHR & operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - TraceRaysIndirectCommandKHR & operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265InlineSessionParametersInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + pNext = pNext_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265InlineSessionParametersInfoKHR & setPStdVPS( const StdVideoH265VideoParameterSet * pStdVPS_ ) VULKAN_HPP_NOEXCEPT { - width = width_; + pStdVPS = pStdVPS_; return *this; } - VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265InlineSessionParametersInfoKHR & setPStdSPS( const StdVideoH265SequenceParameterSet * pStdSPS_ ) VULKAN_HPP_NOEXCEPT { - height = height_; + pStdSPS = pStdSPS_; return *this; } - VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265InlineSessionParametersInfoKHR & setPStdPPS( const StdVideoH265PictureParameterSet * pStdPPS_ ) VULKAN_HPP_NOEXCEPT { - depth = depth_; + pStdPPS = pStdPPS_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkTraceRaysIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH265InlineSessionParametersInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH265InlineSessionParametersInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( width, height, depth ); + return std::tie( sType, pNext, pStdVPS, pStdSPS, pStdPPS ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( TraceRaysIndirectCommandKHR const & ) const = default; + auto operator<=>( VideoDecodeH265InlineSessionParametersInfoKHR const & ) const = default; #else - bool operator==( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdVPS == rhs.pStdVPS ) && ( pStdSPS == rhs.pStdSPS ) && ( pStdPPS == rhs.pStdPPS ); # endif } - bool operator!=( TraceRaysIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeH265InlineSessionParametersInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - uint32_t width = {}; - uint32_t height = {}; - uint32_t depth = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265InlineSessionParametersInfoKHR; + const void * pNext = {}; + const StdVideoH265VideoParameterSet * pStdVPS = {}; + const StdVideoH265SequenceParameterSet * pStdSPS = {}; + const StdVideoH265PictureParameterSet * pStdPPS = {}; }; - struct ValidationCacheCreateInfoEXT + template <> + struct CppType { - using NativeType = VkValidationCacheCreateInfoEXT; + using Type = VideoDecodeH265InlineSessionParametersInfoKHR; + }; + + struct VideoDecodeH265PictureInfoKHR + { + using NativeType = VkVideoDecodeH265PictureInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265PictureInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, - size_t initialDataSize_ = {}, - const void * pInitialData_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , initialDataSize( initialDataSize_ ) - , pInitialData( pInitialData_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoKHR( const StdVideoDecodeH265PictureInfo * pStdPictureInfo_ = {}, + uint32_t sliceSegmentCount_ = {}, + const uint32_t * pSliceSegmentOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdPictureInfo{ pStdPictureInfo_ } + , sliceSegmentCount{ sliceSegmentCount_ } + , pSliceSegmentOffsets{ pSliceSegmentOffsets_ } { } - VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoKHR( VideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ValidationCacheCreateInfoEXT( *reinterpret_cast( &rhs ) ) + VideoDecodeH265PictureInfoKHR( VkVideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265PictureInfoKHR( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - template - ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), flags( flags_ ), initialDataSize( initialData_.size() * sizeof( T ) ), pInitialData( initialData_.data() ) + VideoDecodeH265PictureInfoKHR( const StdVideoDecodeH265PictureInfo * pStdPictureInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sliceSegmentOffsets_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , pStdPictureInfo( pStdPictureInfo_ ) + , sliceSegmentCount( static_cast( sliceSegmentOffsets_.size() ) ) + , pSliceSegmentOffsets( sliceSegmentOffsets_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ValidationCacheCreateInfoEXT & operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoDecodeH265PictureInfoKHR & operator=( VideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ValidationCacheCreateInfoEXT & operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoDecodeH265PictureInfoKHR & operator=( VkVideoDecodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPStdPictureInfo( const StdVideoDecodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + pStdPictureInfo = pStdPictureInfo_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setSliceSegmentCount( uint32_t sliceSegmentCount_ ) VULKAN_HPP_NOEXCEPT { - initialDataSize = initialDataSize_; + sliceSegmentCount = sliceSegmentCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & setPInitialData( const void * pInitialData_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoKHR & setPSliceSegmentOffsets( const uint32_t * pSliceSegmentOffsets_ ) VULKAN_HPP_NOEXCEPT { - pInitialData = pInitialData_; + pSliceSegmentOffsets = pSliceSegmentOffsets_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - template - ValidationCacheCreateInfoEXT & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialData_ ) VULKAN_HPP_NOEXCEPT + VideoDecodeH265PictureInfoKHR & + setSliceSegmentOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & sliceSegmentOffsets_ ) VULKAN_HPP_NOEXCEPT { - initialDataSize = initialData_.size() * sizeof( T ); - pInitialData = initialData_.data(); + sliceSegmentCount = static_cast( sliceSegmentOffsets_.size() ); + pSliceSegmentOffsets = sliceSegmentOffsets_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkValidationCacheCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH265PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH265PictureInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -89233,420 +130807,427 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + const StdVideoDecodeH265PictureInfo * const &, + uint32_t const &, + const uint32_t * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, initialDataSize, pInitialData ); + return std::tie( sType, pNext, pStdPictureInfo, sliceSegmentCount, pSliceSegmentOffsets ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ValidationCacheCreateInfoEXT const & ) const = default; + auto operator<=>( VideoDecodeH265PictureInfoKHR const & ) const = default; #else - bool operator==( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoDecodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( initialDataSize == rhs.initialDataSize ) && - ( pInitialData == rhs.pInitialData ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && ( sliceSegmentCount == rhs.sliceSegmentCount ) && + ( pSliceSegmentOffsets == rhs.pSliceSegmentOffsets ); # endif } - bool operator!=( ValidationCacheCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {}; - size_t initialDataSize = {}; - const void * pInitialData = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265PictureInfoKHR; + const void * pNext = {}; + const StdVideoDecodeH265PictureInfo * pStdPictureInfo = {}; + uint32_t sliceSegmentCount = {}; + const uint32_t * pSliceSegmentOffsets = {}; }; template <> - struct CppType + struct CppType { - using Type = ValidationCacheCreateInfoEXT; + using Type = VideoDecodeH265PictureInfoKHR; }; - struct ValidationFeaturesEXT + struct VideoDecodeH265ProfileInfoKHR { - using NativeType = VkValidationFeaturesEXT; + using NativeType = VkVideoDecodeH265ProfileInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265ProfileInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( uint32_t enabledValidationFeatureCount_ = {}, - const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ = {}, - uint32_t disabledValidationFeatureCount_ = {}, - const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , enabledValidationFeatureCount( enabledValidationFeatureCount_ ) - , pEnabledValidationFeatures( pEnabledValidationFeatures_ ) - , disabledValidationFeatureCount( disabledValidationFeatureCount_ ) - , pDisabledValidationFeatures( pDisabledValidationFeatures_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileInfoKHR( StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdProfileIdc{ stdProfileIdc_ } { } - VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ValidationFeaturesEXT( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileInfoKHR( VideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ValidationFeaturesEXT( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & enabledValidationFeatures_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationFeatures_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , enabledValidationFeatureCount( static_cast( enabledValidationFeatures_.size() ) ) - , pEnabledValidationFeatures( enabledValidationFeatures_.data() ) - , disabledValidationFeatureCount( static_cast( disabledValidationFeatures_.size() ) ) - , pDisabledValidationFeatures( disabledValidationFeatures_.data() ) + VideoDecodeH265ProfileInfoKHR( VkVideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265ProfileInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ValidationFeaturesEXT & operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoDecodeH265ProfileInfoKHR & operator=( VideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoDecodeH265ProfileInfoKHR & operator=( VkVideoDecodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT - { - enabledValidationFeatureCount = enabledValidationFeatureCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & - setPEnabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT - { - pEnabledValidationFeatures = pEnabledValidationFeatures_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ValidationFeaturesEXT & setEnabledValidationFeatures( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & enabledValidationFeatures_ ) - VULKAN_HPP_NOEXCEPT - { - enabledValidationFeatureCount = static_cast( enabledValidationFeatures_.size() ); - pEnabledValidationFeatures = enabledValidationFeatures_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT - { - disabledValidationFeatureCount = disabledValidationFeatureCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & - setPDisabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileInfoKHR & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT { - pDisabledValidationFeatures = pDisabledValidationFeatures_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ValidationFeaturesEXT & setDisabledValidationFeatures( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationFeatures_ ) - VULKAN_HPP_NOEXCEPT - { - disabledValidationFeatureCount = static_cast( disabledValidationFeatures_.size() ); - pDisabledValidationFeatures = disabledValidationFeatures_.data(); + stdProfileIdc = stdProfileIdc_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkValidationFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH265ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH265ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, enabledValidationFeatureCount, pEnabledValidationFeatures, disabledValidationFeatureCount, pDisabledValidationFeatures ); + return std::tie( sType, pNext, stdProfileIdc ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ValidationFeaturesEXT const & ) const = default; -#else - bool operator==( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount ) && - ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures ) && ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount ) && - ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; } +#endif - bool operator!=( ValidationFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 ); + } + + bool operator!=( VideoDecodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT; - const void * pNext = {}; - uint32_t enabledValidationFeatureCount = {}; - const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT * pEnabledValidationFeatures = {}; - uint32_t disabledValidationFeatureCount = {}; - const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265ProfileInfoKHR; + const void * pNext = {}; + StdVideoH265ProfileIdc stdProfileIdc = {}; }; template <> - struct CppType + struct CppType { - using Type = ValidationFeaturesEXT; + using Type = VideoDecodeH265ProfileInfoKHR; }; - struct ValidationFlagsEXT + struct VideoDecodeH265SessionParametersAddInfoKHR { - using NativeType = VkValidationFlagsEXT; + using NativeType = VkVideoDecodeH265SessionParametersAddInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFlagsEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersAddInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = {}, - const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoKHR( uint32_t stdVPSCount_ = {}, + const StdVideoH265VideoParameterSet * pStdVPSs_ = {}, + uint32_t stdSPSCount_ = {}, + const StdVideoH265SequenceParameterSet * pStdSPSs_ = {}, + uint32_t stdPPSCount_ = {}, + const StdVideoH265PictureParameterSet * pStdPPSs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdVPSCount{ stdVPSCount_ } + , pStdVPSs{ pStdVPSs_ } + , stdSPSCount{ stdSPSCount_ } + , pStdSPSs{ pStdSPSs_ } + , stdPPSCount{ stdPPSCount_ } + , pStdPPSs{ pStdPPSs_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoKHR( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoDecodeH265SessionParametersAddInfoKHR( VkVideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265SessionParametersAddInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdVPSs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , disabledValidationCheckCount( disabledValidationCheckCount_ ) - , pDisabledValidationChecks( pDisabledValidationChecks_ ) + , stdVPSCount( static_cast( stdVPSs_.size() ) ) + , pStdVPSs( stdVPSs_.data() ) + , stdSPSCount( static_cast( stdSPSs_.size() ) ) + , pStdSPSs( stdSPSs_.data() ) + , stdPPSCount( static_cast( stdPPSs_.size() ) ) + , pStdPPSs( stdPPSs_.data() ) { } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoDecodeH265SessionParametersAddInfoKHR & operator=( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ValidationFlagsEXT( *reinterpret_cast( &rhs ) ) {} + VideoDecodeH265SessionParametersAddInfoKHR & operator=( VkVideoDecodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdVPSCount( uint32_t stdVPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdVPSCount = stdVPSCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdVPSs( const StdVideoH265VideoParameterSet * pStdVPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdVPSs = pStdVPSs_; + return *this; + } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ValidationFlagsEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationChecks_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , disabledValidationCheckCount( static_cast( disabledValidationChecks_.size() ) ) - , pDisabledValidationChecks( disabledValidationChecks_.data() ) + VideoDecodeH265SessionParametersAddInfoKHR & + setStdVPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdVPSs_ ) VULKAN_HPP_NOEXCEPT { + stdVPSCount = static_cast( stdVPSs_.size() ); + pStdVPSs = stdVPSs_.data(); + return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ValidationFlagsEXT & operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT + { + stdSPSCount = stdSPSCount_; + return *this; + } - ValidationFlagsEXT & operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH265SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + pStdSPSs = pStdSPSs_; return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeH265SessionParametersAddInfoKHR & + setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + stdSPSCount = static_cast( stdSPSs_.size() ); + pStdSPSs = stdSPSs_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT { - disabledValidationCheckCount = disabledValidationCheckCount_; + stdPPSCount = stdPPSCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & - setPDisabledValidationChecks( const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH265PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT { - pDisabledValidationChecks = pDisabledValidationChecks_; + pStdPPSs = pStdPPSs_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ValidationFlagsEXT & setDisabledValidationChecks( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT + VideoDecodeH265SessionParametersAddInfoKHR & + setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT { - disabledValidationCheckCount = static_cast( disabledValidationChecks_.size() ); - pDisabledValidationChecks = disabledValidationChecks_.data(); + stdPPSCount = static_cast( stdPPSs_.size() ); + pStdPPSs = stdPPSs_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkValidationFlagsEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH265SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH265SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, disabledValidationCheckCount, pDisabledValidationChecks ); + return std::tie( sType, pNext, stdVPSCount, pStdVPSs, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ValidationFlagsEXT const & ) const = default; + auto operator<=>( VideoDecodeH265SessionParametersAddInfoKHR const & ) const = default; #else - bool operator==( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) && - ( pDisabledValidationChecks == rhs.pDisabledValidationChecks ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdVPSCount == rhs.stdVPSCount ) && ( pStdVPSs == rhs.pStdVPSs ) && + ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) && ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs ); # endif } - bool operator!=( ValidationFlagsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT; - const void * pNext = {}; - uint32_t disabledValidationCheckCount = {}; - const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersAddInfoKHR; + const void * pNext = {}; + uint32_t stdVPSCount = {}; + const StdVideoH265VideoParameterSet * pStdVPSs = {}; + uint32_t stdSPSCount = {}; + const StdVideoH265SequenceParameterSet * pStdSPSs = {}; + uint32_t stdPPSCount = {}; + const StdVideoH265PictureParameterSet * pStdPPSs = {}; }; template <> - struct CppType + struct CppType { - using Type = ValidationFlagsEXT; + using Type = VideoDecodeH265SessionParametersAddInfoKHR; }; - struct VertexInputAttributeDescription2EXT + struct VideoDecodeH265SessionParametersCreateInfoKHR { - using NativeType = VkVertexInputAttributeDescription2EXT; + using NativeType = VkVideoDecodeH265SessionParametersCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputAttributeDescription2EXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( uint32_t location_ = {}, - uint32_t binding_ = {}, - VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - uint32_t offset_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , location( location_ ) - , binding( binding_ ) - , format( format_ ) - , offset( offset_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoDecodeH265SessionParametersCreateInfoKHR( uint32_t maxStdVPSCount_ = {}, + uint32_t maxStdSPSCount_ = {}, + uint32_t maxStdPPSCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxStdVPSCount{ maxStdVPSCount_ } + , maxStdSPSCount{ maxStdSPSCount_ } + , maxStdPPSCount{ maxStdPPSCount_ } + , pParametersAddInfo{ pParametersAddInfo_ } { } - VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription2EXT( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + VideoDecodeH265SessionParametersCreateInfoKHR( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VertexInputAttributeDescription2EXT( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VertexInputAttributeDescription2EXT( *reinterpret_cast( &rhs ) ) + VideoDecodeH265SessionParametersCreateInfoKHR( VkVideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeH265SessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VertexInputAttributeDescription2EXT & operator=( VertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoDecodeH265SessionParametersCreateInfoKHR & operator=( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VertexInputAttributeDescription2EXT & operator=( VkVertexInputAttributeDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoDecodeH265SessionParametersCreateInfoKHR & operator=( VkVideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdVPSCount( uint32_t maxStdVPSCount_ ) VULKAN_HPP_NOEXCEPT { - location = location_; + maxStdVPSCount = maxStdVPSCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT { - binding = binding_; + maxStdSPSCount = maxStdSPSCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT { - format = format_; + maxStdPPSCount = maxStdPPSCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription2EXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoKHR & + setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT { - offset = offset_; + pParametersAddInfo = pParametersAddInfo_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVertexInputAttributeDescription2EXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH265SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVertexInputAttributeDescription2EXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeH265SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -89654,129 +131235,193 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + uint32_t const &, + const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, location, binding, format, offset ); + return std::tie( sType, pNext, maxStdVPSCount, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VertexInputAttributeDescription2EXT const & ) const = default; + auto operator<=>( VideoDecodeH265SessionParametersCreateInfoKHR const & ) const = default; #else - bool operator==( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) && - ( offset == rhs.offset ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdVPSCount == rhs.maxStdVPSCount ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) && + ( maxStdPPSCount == rhs.maxStdPPSCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo ); # endif } - bool operator!=( VertexInputAttributeDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputAttributeDescription2EXT; - void * pNext = {}; - uint32_t location = {}; - uint32_t binding = {}; - VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; - uint32_t offset = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR; + const void * pNext = {}; + uint32_t maxStdVPSCount = {}; + uint32_t maxStdSPSCount = {}; + uint32_t maxStdPPSCount = {}; + const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo = {}; }; template <> - struct CppType + struct CppType { - using Type = VertexInputAttributeDescription2EXT; + using Type = VideoDecodeH265SessionParametersCreateInfoKHR; }; - struct VertexInputBindingDescription2EXT + struct VideoDecodeInfoKHR { - using NativeType = VkVertexInputBindingDescription2EXT; + using NativeType = VkVideoDecodeInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVertexInputBindingDescription2EXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeInfoKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( uint32_t binding_ = {}, - uint32_t stride_ = {}, - VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex, - uint32_t divisor_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , binding( binding_ ) - , stride( stride_ ) - , inputRate( inputRate_ ) - , divisor( divisor_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ = {}, + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , srcBuffer{ srcBuffer_ } + , srcBufferOffset{ srcBufferOffset_ } + , srcBufferRange{ srcBufferRange_ } + , dstPictureResource{ dstPictureResource_ } + , pSetupReferenceSlot{ pSetupReferenceSlot_ } + , referenceSlotCount{ referenceSlotCount_ } + , pReferenceSlots{ pReferenceSlots_ } { } - VULKAN_HPP_CONSTEXPR VertexInputBindingDescription2EXT( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VertexInputBindingDescription2EXT( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VertexInputBindingDescription2EXT( *reinterpret_cast( &rhs ) ) + VideoDecodeInfoKHR( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoDecodeInfoKHR( *reinterpret_cast( &rhs ) ) {} + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_, + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_, + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource_, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , srcBuffer( srcBuffer_ ) + , srcBufferOffset( srcBufferOffset_ ) + , srcBufferRange( srcBufferRange_ ) + , dstPictureResource( dstPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) { } -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VertexInputBindingDescription2EXT & operator=( VertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoDecodeInfoKHR & operator=( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VertexInputBindingDescription2EXT & operator=( VkVertexInputBindingDescription2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoDecodeInfoKHR & operator=( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { - binding = binding_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT { - stride = stride_; + srcBuffer = srcBuffer_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ ) VULKAN_HPP_NOEXCEPT { - inputRate = inputRate_; + srcBufferOffset = srcBufferOffset_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription2EXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferRange( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ ) VULKAN_HPP_NOEXCEPT { - divisor = divisor_; + srcBufferRange = srcBufferRange_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVertexInputBindingDescription2EXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + setDstPictureResource( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & dstPictureResource_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + dstPictureResource = dstPictureResource_; + return *this; } - operator VkVertexInputBindingDescription2EXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + setPSetupReferenceSlot( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pSetupReferenceSlot = pSetupReferenceSlot_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = referenceSlotCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + pReferenceSlots = pReferenceSlots_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoDecodeInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT + { + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoDecodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoDecodeInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -89784,6877 +131429,7035 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, binding, stride, inputRate, divisor ); + return std::tie( + sType, pNext, flags, srcBuffer, srcBufferOffset, srcBufferRange, dstPictureResource, pSetupReferenceSlot, referenceSlotCount, pReferenceSlots ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VertexInputBindingDescription2EXT const & ) const = default; + auto operator<=>( VideoDecodeInfoKHR const & ) const = default; #else - bool operator==( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ) && - ( divisor == rhs.divisor ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcBuffer == rhs.srcBuffer ) && + ( srcBufferOffset == rhs.srcBufferOffset ) && ( srcBufferRange == rhs.srcBufferRange ) && ( dstPictureResource == rhs.dstPictureResource ) && + ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && ( referenceSlotCount == rhs.referenceSlotCount ) && + ( pReferenceSlots == rhs.pReferenceSlots ); # endif } - bool operator!=( VertexInputBindingDescription2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVertexInputBindingDescription2EXT; - void * pNext = {}; - uint32_t binding = {}; - uint32_t stride = {}; - VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; - uint32_t divisor = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange = {}; + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {}; }; template <> - struct CppType + struct CppType { - using Type = VertexInputBindingDescription2EXT; + using Type = VideoDecodeInfoKHR; }; -#if defined( VK_USE_PLATFORM_VI_NN ) - struct ViSurfaceCreateInfoNN + struct VideoDecodeUsageInfoKHR { - using NativeType = VkViSurfaceCreateInfoNN; + using NativeType = VkVideoDecodeUsageInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eViSurfaceCreateInfoNN; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeUsageInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - ViSurfaceCreateInfoNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, void * window_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , window( window_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoDecodeUsageInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoUsageHints{ videoUsageHints_ } { } - VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoDecodeUsageInfoKHR( VideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT - : ViSurfaceCreateInfoNN( *reinterpret_cast( &rhs ) ) + VideoDecodeUsageInfoKHR( VkVideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoDecodeUsageInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ViSurfaceCreateInfoNN & operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoDecodeUsageInfoKHR & operator=( VideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - ViSurfaceCreateInfoNN & operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT + VideoDecodeUsageInfoKHR & operator=( VkVideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoDecodeUsageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setFlags( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & setWindow( void * window_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoDecodeUsageInfoKHR & setVideoUsageHints( VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints_ ) VULKAN_HPP_NOEXCEPT { - window = window_; + videoUsageHints = videoUsageHints_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkViSurfaceCreateInfoNN const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeUsageInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT + operator VkVideoDecodeUsageInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, window ); + return std::tie( sType, pNext, videoUsageHints ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ViSurfaceCreateInfoNN const & ) const = default; -# else - bool operator==( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoDecodeUsageInfoKHR const & ) const = default; +#else + bool operator==( VideoDecodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoUsageHints == rhs.videoUsageHints ); +# endif } - bool operator!=( ViSurfaceCreateInfoNN const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoDecodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {}; - void * window = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeUsageInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints = {}; }; template <> - struct CppType + struct CppType { - using Type = ViSurfaceCreateInfoNN; + using Type = VideoDecodeUsageInfoKHR; }; -#endif /*VK_USE_PLATFORM_VI_NN*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoPictureResourceInfoKHR + struct VideoEncodeAV1CapabilitiesKHR { - using NativeType = VkVideoPictureResourceInfoKHR; + using NativeType = VkVideoEncodeAV1CapabilitiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoPictureResourceInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1CapabilitiesKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoPictureResourceInfoKHR( VULKAN_HPP_NAMESPACE::Offset2D codedOffset_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D codedExtent_ = {}, - uint32_t baseArrayLayer_ = {}, - VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , codedOffset( codedOffset_ ) - , codedExtent( codedExtent_ ) - , baseArrayLayer( baseArrayLayer_ ) - , imageViewBinding( imageViewBinding_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1CapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilityFlagsKHR flags_ = {}, + StdVideoAV1Level maxLevel_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D codedPictureAlignment_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxTiles_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minTileSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxTileSize_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR superblockSizes_ = {}, + uint32_t maxSingleReferenceCount_ = {}, + uint32_t singleReferenceNameMask_ = {}, + uint32_t maxUnidirectionalCompoundReferenceCount_ = {}, + uint32_t maxUnidirectionalCompoundGroup1ReferenceCount_ = {}, + uint32_t unidirectionalCompoundReferenceNameMask_ = {}, + uint32_t maxBidirectionalCompoundReferenceCount_ = {}, + uint32_t maxBidirectionalCompoundGroup1ReferenceCount_ = {}, + uint32_t maxBidirectionalCompoundGroup2ReferenceCount_ = {}, + uint32_t bidirectionalCompoundReferenceNameMask_ = {}, + uint32_t maxTemporalLayerCount_ = {}, + uint32_t maxSpatialLayerCount_ = {}, + uint32_t maxOperatingPoints_ = {}, + uint32_t minQIndex_ = {}, + uint32_t maxQIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1StdFlagsKHR stdSyntaxFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , maxLevel{ maxLevel_ } + , codedPictureAlignment{ codedPictureAlignment_ } + , maxTiles{ maxTiles_ } + , minTileSize{ minTileSize_ } + , maxTileSize{ maxTileSize_ } + , superblockSizes{ superblockSizes_ } + , maxSingleReferenceCount{ maxSingleReferenceCount_ } + , singleReferenceNameMask{ singleReferenceNameMask_ } + , maxUnidirectionalCompoundReferenceCount{ maxUnidirectionalCompoundReferenceCount_ } + , maxUnidirectionalCompoundGroup1ReferenceCount{ maxUnidirectionalCompoundGroup1ReferenceCount_ } + , unidirectionalCompoundReferenceNameMask{ unidirectionalCompoundReferenceNameMask_ } + , maxBidirectionalCompoundReferenceCount{ maxBidirectionalCompoundReferenceCount_ } + , maxBidirectionalCompoundGroup1ReferenceCount{ maxBidirectionalCompoundGroup1ReferenceCount_ } + , maxBidirectionalCompoundGroup2ReferenceCount{ maxBidirectionalCompoundGroup2ReferenceCount_ } + , bidirectionalCompoundReferenceNameMask{ bidirectionalCompoundReferenceNameMask_ } + , maxTemporalLayerCount{ maxTemporalLayerCount_ } + , maxSpatialLayerCount{ maxSpatialLayerCount_ } + , maxOperatingPoints{ maxOperatingPoints_ } + , minQIndex{ minQIndex_ } + , maxQIndex{ maxQIndex_ } + , prefersGopRemainingFrames{ prefersGopRemainingFrames_ } + , requiresGopRemainingFrames{ requiresGopRemainingFrames_ } + , stdSyntaxFlags{ stdSyntaxFlags_ } { } - VULKAN_HPP_CONSTEXPR VideoPictureResourceInfoKHR( VideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeAV1CapabilitiesKHR( VideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoPictureResourceInfoKHR( VkVideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoPictureResourceInfoKHR( *reinterpret_cast( &rhs ) ) + VideoEncodeAV1CapabilitiesKHR( VkVideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1CapabilitiesKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoPictureResourceInfoKHR & operator=( VideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeAV1CapabilitiesKHR & operator=( VideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoPictureResourceInfoKHR & operator=( VkVideoPictureResourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeAV1CapabilitiesKHR & operator=( VkVideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setCodedOffset( VULKAN_HPP_NAMESPACE::Offset2D const & codedOffset_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - codedOffset = codedOffset_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setCodedExtent( VULKAN_HPP_NAMESPACE::Extent2D const & codedExtent_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - codedExtent = codedExtent_; + return std::tie( sType, + pNext, + flags, + maxLevel, + codedPictureAlignment, + maxTiles, + minTileSize, + maxTileSize, + superblockSizes, + maxSingleReferenceCount, + singleReferenceNameMask, + maxUnidirectionalCompoundReferenceCount, + maxUnidirectionalCompoundGroup1ReferenceCount, + unidirectionalCompoundReferenceNameMask, + maxBidirectionalCompoundReferenceCount, + maxBidirectionalCompoundGroup1ReferenceCount, + maxBidirectionalCompoundGroup2ReferenceCount, + bidirectionalCompoundReferenceNameMask, + maxTemporalLayerCount, + maxSpatialLayerCount, + maxOperatingPoints, + minQIndex, + maxQIndex, + prefersGopRemainingFrames, + requiresGopRemainingFrames, + stdSyntaxFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = codedPictureAlignment <=> rhs.codedPictureAlignment; cmp != 0 ) + return cmp; + if ( auto cmp = maxTiles <=> rhs.maxTiles; cmp != 0 ) + return cmp; + if ( auto cmp = minTileSize <=> rhs.minTileSize; cmp != 0 ) + return cmp; + if ( auto cmp = maxTileSize <=> rhs.maxTileSize; cmp != 0 ) + return cmp; + if ( auto cmp = superblockSizes <=> rhs.superblockSizes; cmp != 0 ) + return cmp; + if ( auto cmp = maxSingleReferenceCount <=> rhs.maxSingleReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = singleReferenceNameMask <=> rhs.singleReferenceNameMask; cmp != 0 ) + return cmp; + if ( auto cmp = maxUnidirectionalCompoundReferenceCount <=> rhs.maxUnidirectionalCompoundReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxUnidirectionalCompoundGroup1ReferenceCount <=> rhs.maxUnidirectionalCompoundGroup1ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = unidirectionalCompoundReferenceNameMask <=> rhs.unidirectionalCompoundReferenceNameMask; cmp != 0 ) + return cmp; + if ( auto cmp = maxBidirectionalCompoundReferenceCount <=> rhs.maxBidirectionalCompoundReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxBidirectionalCompoundGroup1ReferenceCount <=> rhs.maxBidirectionalCompoundGroup1ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxBidirectionalCompoundGroup2ReferenceCount <=> rhs.maxBidirectionalCompoundGroup2ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = bidirectionalCompoundReferenceNameMask <=> rhs.bidirectionalCompoundReferenceNameMask; cmp != 0 ) + return cmp; + if ( auto cmp = maxTemporalLayerCount <=> rhs.maxTemporalLayerCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxSpatialLayerCount <=> rhs.maxSpatialLayerCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxOperatingPoints <=> rhs.maxOperatingPoints; cmp != 0 ) + return cmp; + if ( auto cmp = minQIndex <=> rhs.minQIndex; cmp != 0 ) + return cmp; + if ( auto cmp = maxQIndex <=> rhs.maxQIndex; cmp != 0 ) + return cmp; + if ( auto cmp = prefersGopRemainingFrames <=> rhs.prefersGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = requiresGopRemainingFrames <=> rhs.requiresGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = stdSyntaxFlags <=> rhs.stdSyntaxFlags; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ) == 0 ) && ( codedPictureAlignment == rhs.codedPictureAlignment ) && + ( maxTiles == rhs.maxTiles ) && ( minTileSize == rhs.minTileSize ) && ( maxTileSize == rhs.maxTileSize ) && + ( superblockSizes == rhs.superblockSizes ) && ( maxSingleReferenceCount == rhs.maxSingleReferenceCount ) && + ( singleReferenceNameMask == rhs.singleReferenceNameMask ) && + ( maxUnidirectionalCompoundReferenceCount == rhs.maxUnidirectionalCompoundReferenceCount ) && + ( maxUnidirectionalCompoundGroup1ReferenceCount == rhs.maxUnidirectionalCompoundGroup1ReferenceCount ) && + ( unidirectionalCompoundReferenceNameMask == rhs.unidirectionalCompoundReferenceNameMask ) && + ( maxBidirectionalCompoundReferenceCount == rhs.maxBidirectionalCompoundReferenceCount ) && + ( maxBidirectionalCompoundGroup1ReferenceCount == rhs.maxBidirectionalCompoundGroup1ReferenceCount ) && + ( maxBidirectionalCompoundGroup2ReferenceCount == rhs.maxBidirectionalCompoundGroup2ReferenceCount ) && + ( bidirectionalCompoundReferenceNameMask == rhs.bidirectionalCompoundReferenceNameMask ) && + ( maxTemporalLayerCount == rhs.maxTemporalLayerCount ) && ( maxSpatialLayerCount == rhs.maxSpatialLayerCount ) && + ( maxOperatingPoints == rhs.maxOperatingPoints ) && ( minQIndex == rhs.minQIndex ) && ( maxQIndex == rhs.maxQIndex ) && + ( prefersGopRemainingFrames == rhs.prefersGopRemainingFrames ) && ( requiresGopRemainingFrames == rhs.requiresGopRemainingFrames ) && + ( stdSyntaxFlags == rhs.stdSyntaxFlags ); + } + + bool operator!=( VideoEncodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1CapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilityFlagsKHR flags = {}; + StdVideoAV1Level maxLevel = {}; + VULKAN_HPP_NAMESPACE::Extent2D codedPictureAlignment = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxTiles = {}; + VULKAN_HPP_NAMESPACE::Extent2D minTileSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxTileSize = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR superblockSizes = {}; + uint32_t maxSingleReferenceCount = {}; + uint32_t singleReferenceNameMask = {}; + uint32_t maxUnidirectionalCompoundReferenceCount = {}; + uint32_t maxUnidirectionalCompoundGroup1ReferenceCount = {}; + uint32_t unidirectionalCompoundReferenceNameMask = {}; + uint32_t maxBidirectionalCompoundReferenceCount = {}; + uint32_t maxBidirectionalCompoundGroup1ReferenceCount = {}; + uint32_t maxBidirectionalCompoundGroup2ReferenceCount = {}; + uint32_t bidirectionalCompoundReferenceNameMask = {}; + uint32_t maxTemporalLayerCount = {}; + uint32_t maxSpatialLayerCount = {}; + uint32_t maxOperatingPoints = {}; + uint32_t minQIndex = {}; + uint32_t maxQIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1StdFlagsKHR stdSyntaxFlags = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1CapabilitiesKHR; + }; + + struct VideoEncodeAV1DpbSlotInfoKHR + { + using NativeType = VkVideoEncodeAV1DpbSlotInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1DpbSlotInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1DpbSlotInfoKHR( const StdVideoEncodeAV1ReferenceInfo * pStdReferenceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1DpbSlotInfoKHR( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1DpbSlotInfoKHR( VkVideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1DpbSlotInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1DpbSlotInfoKHR & operator=( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeAV1DpbSlotInfoKHR & operator=( VkVideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - baseArrayLayer = baseArrayLayer_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoPictureResourceInfoKHR & setImageViewBinding( VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1DpbSlotInfoKHR & setPStdReferenceInfo( const StdVideoEncodeAV1ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT { - imageViewBinding = imageViewBinding_; + pStdReferenceInfo = pStdReferenceInfo_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoPictureResourceInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoPictureResourceInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, codedOffset, codedExtent, baseArrayLayer, imageViewBinding ); + return std::tie( sType, pNext, pStdReferenceInfo ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoPictureResourceInfoKHR const & ) const = default; -# else - bool operator==( VideoPictureResourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1DpbSlotInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( codedOffset == rhs.codedOffset ) && ( codedExtent == rhs.codedExtent ) && - ( baseArrayLayer == rhs.baseArrayLayer ) && ( imageViewBinding == rhs.imageViewBinding ); -# endif +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); +# endif } - bool operator!=( VideoPictureResourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoPictureResourceInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Offset2D codedOffset = {}; - VULKAN_HPP_NAMESPACE::Extent2D codedExtent = {}; - uint32_t baseArrayLayer = {}; - VULKAN_HPP_NAMESPACE::ImageView imageViewBinding = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1DpbSlotInfoKHR; + const void * pNext = {}; + const StdVideoEncodeAV1ReferenceInfo * pStdReferenceInfo = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoPictureResourceInfoKHR; + using Type = VideoEncodeAV1DpbSlotInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoReferenceSlotInfoKHR + struct VideoEncodeAV1FrameSizeKHR { - using NativeType = VkVideoReferenceSlotInfoKHR; + using NativeType = VkVideoEncodeAV1FrameSizeKHR; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoReferenceSlotInfoKHR; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR( int8_t slotIndex_ = {}, - const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , slotIndex( slotIndex_ ) - , pPictureResource( pPictureResource_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeAV1FrameSizeKHR( uint32_t intraFrameSize_ = {}, uint32_t predictiveFrameSize_ = {}, uint32_t bipredictiveFrameSize_ = {} ) VULKAN_HPP_NOEXCEPT + : intraFrameSize{ intraFrameSize_ } + , predictiveFrameSize{ predictiveFrameSize_ } + , bipredictiveFrameSize{ bipredictiveFrameSize_ } { } - VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR( VideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeAV1FrameSizeKHR( VideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoReferenceSlotInfoKHR( VkVideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoReferenceSlotInfoKHR( *reinterpret_cast( &rhs ) ) + VideoEncodeAV1FrameSizeKHR( VkVideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1FrameSizeKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoReferenceSlotInfoKHR & operator=( VideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeAV1FrameSizeKHR & operator=( VideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoReferenceSlotInfoKHR & operator=( VkVideoReferenceSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeAV1FrameSizeKHR & operator=( VkVideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1FrameSizeKHR & setIntraFrameSize( uint32_t intraFrameSize_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + intraFrameSize = intraFrameSize_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1FrameSizeKHR & setPredictiveFrameSize( uint32_t predictiveFrameSize_ ) VULKAN_HPP_NOEXCEPT { - slotIndex = slotIndex_; + predictiveFrameSize = predictiveFrameSize_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoReferenceSlotInfoKHR & - setPPictureResource( const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1FrameSizeKHR & setBipredictiveFrameSize( uint32_t bipredictiveFrameSize_ ) VULKAN_HPP_NOEXCEPT { - pPictureResource = pPictureResource_; + bipredictiveFrameSize = bipredictiveFrameSize_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoReferenceSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1FrameSizeKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoReferenceSlotInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1FrameSizeKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, slotIndex, pPictureResource ); + return std::tie( intraFrameSize, predictiveFrameSize, bipredictiveFrameSize ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoReferenceSlotInfoKHR const & ) const = default; -# else - bool operator==( VideoReferenceSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1FrameSizeKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) && ( pPictureResource == rhs.pPictureResource ); -# endif +# else + return ( intraFrameSize == rhs.intraFrameSize ) && ( predictiveFrameSize == rhs.predictiveFrameSize ) && + ( bipredictiveFrameSize == rhs.bipredictiveFrameSize ); +# endif } - bool operator!=( VideoReferenceSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeAV1FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoReferenceSlotInfoKHR; - const void * pNext = {}; - int8_t slotIndex = {}; - const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource = {}; - }; - - template <> - struct CppType - { - using Type = VideoReferenceSlotInfoKHR; + uint32_t intraFrameSize = {}; + uint32_t predictiveFrameSize = {}; + uint32_t bipredictiveFrameSize = {}; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoBeginCodingInfoKHR + struct VideoEncodeAV1GopRemainingFrameInfoKHR { - using NativeType = VkVideoBeginCodingInfoKHR; + using NativeType = VkVideoEncodeAV1GopRemainingFrameInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoBeginCodingInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1GopRemainingFrameInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, - VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {}, - uint32_t referenceSlotCount_ = {}, - const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , videoSession( videoSession_ ) - , videoSessionParameters( videoSessionParameters_ ) - , referenceSlotCount( referenceSlotCount_ ) - , pReferenceSlots( pReferenceSlots_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1GopRemainingFrameInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ = {}, + uint32_t gopRemainingIntra_ = {}, + uint32_t gopRemainingPredictive_ = {}, + uint32_t gopRemainingBipredictive_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useGopRemainingFrames{ useGopRemainingFrames_ } + , gopRemainingIntra{ gopRemainingIntra_ } + , gopRemainingPredictive{ gopRemainingPredictive_ } + , gopRemainingBipredictive{ gopRemainingBipredictive_ } { } - VULKAN_HPP_CONSTEXPR VideoBeginCodingInfoKHR( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - VideoBeginCodingInfoKHR( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoBeginCodingInfoKHR( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR VideoEncodeAV1GopRemainingFrameInfoKHR( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoBeginCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_, - VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_, - VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , videoSession( videoSession_ ) - , videoSessionParameters( videoSessionParameters_ ) - , referenceSlotCount( static_cast( referenceSlots_.size() ) ) - , pReferenceSlots( referenceSlots_.data() ) + VideoEncodeAV1GopRemainingFrameInfoKHR( VkVideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1GopRemainingFrameInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoBeginCodingInfoKHR & operator=( VideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeAV1GopRemainingFrameInfoKHR & operator=( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoBeginCodingInfoKHR & operator=( VkVideoBeginCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeAV1GopRemainingFrameInfoKHR & operator=( VkVideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setVideoSession( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ ) VULKAN_HPP_NOEXCEPT - { - videoSession = videoSession_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & - setVideoSessionParameters( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & + setUseGopRemainingFrames( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ ) VULKAN_HPP_NOEXCEPT { - videoSessionParameters = videoSessionParameters_; + useGopRemainingFrames = useGopRemainingFrames_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setGopRemainingIntra( uint32_t gopRemainingIntra_ ) VULKAN_HPP_NOEXCEPT { - referenceSlotCount = referenceSlotCount_; + gopRemainingIntra = gopRemainingIntra_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoBeginCodingInfoKHR & - setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setGopRemainingPredictive( uint32_t gopRemainingPredictive_ ) VULKAN_HPP_NOEXCEPT { - pReferenceSlots = pReferenceSlots_; + gopRemainingPredictive = gopRemainingPredictive_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoBeginCodingInfoKHR & setReferenceSlots( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setGopRemainingBipredictive( uint32_t gopRemainingBipredictive_ ) VULKAN_HPP_NOEXCEPT { - referenceSlotCount = static_cast( referenceSlots_.size() ); - pReferenceSlots = referenceSlots_.data(); + gopRemainingBipredictive = gopRemainingBipredictive_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoBeginCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1GopRemainingFrameInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoBeginCodingInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1GopRemainingFrameInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + uint32_t const &, + uint32_t const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, videoSession, videoSessionParameters, referenceSlotCount, pReferenceSlots ); + return std::tie( sType, pNext, useGopRemainingFrames, gopRemainingIntra, gopRemainingPredictive, gopRemainingBipredictive ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoBeginCodingInfoKHR const & ) const = default; -# else - bool operator==( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1GopRemainingFrameInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( videoSession == rhs.videoSession ) && - ( videoSessionParameters == rhs.videoSessionParameters ) && ( referenceSlotCount == rhs.referenceSlotCount ) && - ( pReferenceSlots == rhs.pReferenceSlots ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useGopRemainingFrames == rhs.useGopRemainingFrames ) && + ( gopRemainingIntra == rhs.gopRemainingIntra ) && ( gopRemainingPredictive == rhs.gopRemainingPredictive ) && + ( gopRemainingBipredictive == rhs.gopRemainingBipredictive ); +# endif } - bool operator!=( VideoBeginCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoBeginCodingInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession = {}; - VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters = {}; - uint32_t referenceSlotCount = {}; - const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1GopRemainingFrameInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames = {}; + uint32_t gopRemainingIntra = {}; + uint32_t gopRemainingPredictive = {}; + uint32_t gopRemainingBipredictive = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoBeginCodingInfoKHR; + using Type = VideoEncodeAV1GopRemainingFrameInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoCapabilitiesKHR + struct VideoEncodeAV1PictureInfoKHR { - using NativeType = VkVideoCapabilitiesKHR; + using NativeType = VkVideoEncodeAV1PictureInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCapabilitiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1PictureInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR capabilityFlags_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D videoPictureExtentGranularity_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D minExtent_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D maxExtent_ = {}, - uint32_t maxReferencePicturesSlotsCount_ = {}, - uint32_t maxReferencePicturesActiveCount_ = {}, - VULKAN_HPP_NAMESPACE::ExtensionProperties stdHeaderVersion_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , capabilityFlags( capabilityFlags_ ) - , minBitstreamBufferOffsetAlignment( minBitstreamBufferOffsetAlignment_ ) - , minBitstreamBufferSizeAlignment( minBitstreamBufferSizeAlignment_ ) - , videoPictureExtentGranularity( videoPictureExtentGranularity_ ) - , minExtent( minExtent_ ) - , maxExtent( maxExtent_ ) - , maxReferencePicturesSlotsCount( maxReferencePicturesSlotsCount_ ) - , maxReferencePicturesActiveCount( maxReferencePicturesActiveCount_ ) - , stdHeaderVersion( stdHeaderVersion_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR( + VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR predictionMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR::eIntraOnly, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR rateControlGroup_ = VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR::eIntra, + uint32_t constantQIndex_ = {}, + const StdVideoEncodeAV1PictureInfo * pStdPictureInfo_ = {}, + std::array const & referenceNameSlotIndices_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primaryReferenceCdfOnly_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 generateObuExtensionHeader_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , predictionMode{ predictionMode_ } + , rateControlGroup{ rateControlGroup_ } + , constantQIndex{ constantQIndex_ } + , pStdPictureInfo{ pStdPictureInfo_ } + , referenceNameSlotIndices{ referenceNameSlotIndices_ } + , primaryReferenceCdfOnly{ primaryReferenceCdfOnly_ } + , generateObuExtensionHeader{ generateObuExtensionHeader_ } { } - VULKAN_HPP_CONSTEXPR_14 VideoCapabilitiesKHR( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR( VideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoCapabilitiesKHR( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + VideoEncodeAV1PictureInfoKHR( VkVideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1PictureInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoCapabilitiesKHR & operator=( VideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeAV1PictureInfoKHR & operator=( VideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoCapabilitiesKHR & operator=( VkVideoCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeAV1PictureInfoKHR & operator=( VkVideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkVideoCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkVideoCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & + setPredictionMode( VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR predictionMode_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + predictionMode = predictionMode_; + return *this; } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & + setRateControlGroup( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR rateControlGroup_ ) VULKAN_HPP_NOEXCEPT + { + rateControlGroup = rateControlGroup_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setConstantQIndex( uint32_t constantQIndex_ ) VULKAN_HPP_NOEXCEPT + { + constantQIndex = constantQIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setPStdPictureInfo( const StdVideoEncodeAV1PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & + setReferenceNameSlotIndices( std::array referenceNameSlotIndices_ ) VULKAN_HPP_NOEXCEPT + { + referenceNameSlotIndices = referenceNameSlotIndices_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & + setPrimaryReferenceCdfOnly( VULKAN_HPP_NAMESPACE::Bool32 primaryReferenceCdfOnly_ ) VULKAN_HPP_NOEXCEPT + { + primaryReferenceCdfOnly = primaryReferenceCdfOnly_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & + setGenerateObuExtensionHeader( VULKAN_HPP_NAMESPACE::Bool32 generateObuExtensionHeader_ ) VULKAN_HPP_NOEXCEPT + { + generateObuExtensionHeader = generateObuExtensionHeader_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeAV1PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1PictureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + const StdVideoEncodeAV1PictureInfo * const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, - capabilityFlags, - minBitstreamBufferOffsetAlignment, - minBitstreamBufferSizeAlignment, - videoPictureExtentGranularity, - minExtent, - maxExtent, - maxReferencePicturesSlotsCount, - maxReferencePicturesActiveCount, - stdHeaderVersion ); + predictionMode, + rateControlGroup, + constantQIndex, + pStdPictureInfo, + referenceNameSlotIndices, + primaryReferenceCdfOnly, + generateObuExtensionHeader ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoCapabilitiesKHR const & ) const = default; -# else - bool operator==( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1PictureInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilityFlags == rhs.capabilityFlags ) && - ( minBitstreamBufferOffsetAlignment == rhs.minBitstreamBufferOffsetAlignment ) && - ( minBitstreamBufferSizeAlignment == rhs.minBitstreamBufferSizeAlignment ) && - ( videoPictureExtentGranularity == rhs.videoPictureExtentGranularity ) && ( minExtent == rhs.minExtent ) && ( maxExtent == rhs.maxExtent ) && - ( maxReferencePicturesSlotsCount == rhs.maxReferencePicturesSlotsCount ) && - ( maxReferencePicturesActiveCount == rhs.maxReferencePicturesActiveCount ) && ( stdHeaderVersion == rhs.stdHeaderVersion ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( predictionMode == rhs.predictionMode ) && ( rateControlGroup == rhs.rateControlGroup ) && + ( constantQIndex == rhs.constantQIndex ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && + ( referenceNameSlotIndices == rhs.referenceNameSlotIndices ) && ( primaryReferenceCdfOnly == rhs.primaryReferenceCdfOnly ) && + ( generateObuExtensionHeader == rhs.generateObuExtensionHeader ); +# endif } - bool operator!=( VideoCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCapabilitiesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR capabilityFlags = {}; - VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferOffsetAlignment = {}; - VULKAN_HPP_NAMESPACE::DeviceSize minBitstreamBufferSizeAlignment = {}; - VULKAN_HPP_NAMESPACE::Extent2D videoPictureExtentGranularity = {}; - VULKAN_HPP_NAMESPACE::Extent2D minExtent = {}; - VULKAN_HPP_NAMESPACE::Extent2D maxExtent = {}; - uint32_t maxReferencePicturesSlotsCount = {}; - uint32_t maxReferencePicturesActiveCount = {}; - VULKAN_HPP_NAMESPACE::ExtensionProperties stdHeaderVersion = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1PictureInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR predictionMode = VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR::eIntraOnly; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR rateControlGroup = VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR::eIntra; + uint32_t constantQIndex = {}; + const StdVideoEncodeAV1PictureInfo * pStdPictureInfo = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D referenceNameSlotIndices = {}; + VULKAN_HPP_NAMESPACE::Bool32 primaryReferenceCdfOnly = {}; + VULKAN_HPP_NAMESPACE::Bool32 generateObuExtensionHeader = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoCapabilitiesKHR; + using Type = VideoEncodeAV1PictureInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoCodingControlInfoKHR + struct VideoEncodeAV1ProfileInfoKHR { - using NativeType = VkVideoCodingControlInfoKHR; + using NativeType = VkVideoEncodeAV1ProfileInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoCodingControlInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1ProfileInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1ProfileInfoKHR( StdVideoAV1Profile stdProfile_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdProfile{ stdProfile_ } { } - VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeAV1ProfileInfoKHR( VideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoCodingControlInfoKHR( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoCodingControlInfoKHR( *reinterpret_cast( &rhs ) ) + VideoEncodeAV1ProfileInfoKHR( VkVideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1ProfileInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoCodingControlInfoKHR & operator=( VideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeAV1ProfileInfoKHR & operator=( VideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoCodingControlInfoKHR & operator=( VkVideoCodingControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeAV1ProfileInfoKHR & operator=( VkVideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoCodingControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1ProfileInfoKHR & setStdProfile( StdVideoAV1Profile stdProfile_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + stdProfile = stdProfile_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoCodingControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoCodingControlInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags ); + return std::tie( sType, pNext, stdProfile ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoCodingControlInfoKHR const & ) const = default; -# else - bool operator==( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; } +#endif - bool operator!=( VideoCodingControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoEncodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ) == 0 ); + } + + bool operator!=( VideoEncodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoCodingControlInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1ProfileInfoKHR; + const void * pNext = {}; + StdVideoAV1Profile stdProfile = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoCodingControlInfoKHR; + using Type = VideoEncodeAV1ProfileInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoDecodeCapabilitiesKHR + struct VideoEncodeAV1QIndexKHR { - using NativeType = VkVideoDecodeCapabilitiesKHR; + using NativeType = VkVideoEncodeAV1QIndexKHR; - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeCapabilitiesKHR; +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeAV1QIndexKHR( uint32_t intraQIndex_ = {}, uint32_t predictiveQIndex_ = {}, uint32_t bipredictiveQIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : intraQIndex{ intraQIndex_ } + , predictiveQIndex{ predictiveQIndex_ } + , bipredictiveQIndex{ bipredictiveQIndex_ } + { + } -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1QIndexKHR( VideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1QIndexKHR( VkVideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1QIndexKHR( *reinterpret_cast( &rhs ) ) { } - VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeAV1QIndexKHR & operator=( VideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoDecodeCapabilitiesKHR( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoDecodeCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + VideoEncodeAV1QIndexKHR & operator=( VkVideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoDecodeCapabilitiesKHR & operator=( VideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1QIndexKHR & setIntraQIndex( uint32_t intraQIndex_ ) VULKAN_HPP_NOEXCEPT + { + intraQIndex = intraQIndex_; + return *this; + } - VideoDecodeCapabilitiesKHR & operator=( VkVideoDecodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1QIndexKHR & setPredictiveQIndex( uint32_t predictiveQIndex_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + predictiveQIndex = predictiveQIndex_; return *this; } - operator VkVideoDecodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1QIndexKHR & setBipredictiveQIndex( uint32_t bipredictiveQIndex_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + bipredictiveQIndex = bipredictiveQIndex_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoDecodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1QIndexKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkVideoEncodeAV1QIndexKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags ); + return std::tie( intraQIndex, predictiveQIndex, bipredictiveQIndex ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoDecodeCapabilitiesKHR const & ) const = default; -# else - bool operator==( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1QIndexKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1QIndexKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); -# endif +# else + return ( intraQIndex == rhs.intraQIndex ) && ( predictiveQIndex == rhs.predictiveQIndex ) && ( bipredictiveQIndex == rhs.bipredictiveQIndex ); +# endif } - bool operator!=( VideoDecodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeAV1QIndexKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeCapabilitiesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags = {}; - }; - - template <> - struct CppType - { - using Type = VideoDecodeCapabilitiesKHR; + uint32_t intraQIndex = {}; + uint32_t predictiveQIndex = {}; + uint32_t bipredictiveQIndex = {}; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoDecodeH264CapabilitiesEXT + struct VideoEncodeAV1QualityLevelPropertiesKHR { - using NativeType = VkVideoDecodeH264CapabilitiesEXT; + using NativeType = VkVideoEncodeAV1QualityLevelPropertiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264CapabilitiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1QualityLevelPropertiesKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesEXT( StdVideoH264Level maxLevel_ = {}, - VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxLevel( maxLevel_ ) - , fieldOffsetGranularity( fieldOffsetGranularity_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1QualityLevelPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR preferredRateControlFlags_ = {}, + uint32_t preferredGopFrameCount_ = {}, + uint32_t preferredKeyFramePeriod_ = {}, + uint32_t preferredConsecutiveBipredictiveFrameCount_ = {}, + uint32_t preferredTemporalLayerCount_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR preferredConstantQIndex_ = {}, + uint32_t preferredMaxSingleReferenceCount_ = {}, + uint32_t preferredSingleReferenceNameMask_ = {}, + uint32_t preferredMaxUnidirectionalCompoundReferenceCount_ = {}, + uint32_t preferredMaxUnidirectionalCompoundGroup1ReferenceCount_ = {}, + uint32_t preferredUnidirectionalCompoundReferenceNameMask_ = {}, + uint32_t preferredMaxBidirectionalCompoundReferenceCount_ = {}, + uint32_t preferredMaxBidirectionalCompoundGroup1ReferenceCount_ = {}, + uint32_t preferredMaxBidirectionalCompoundGroup2ReferenceCount_ = {}, + uint32_t preferredBidirectionalCompoundReferenceNameMask_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , preferredRateControlFlags{ preferredRateControlFlags_ } + , preferredGopFrameCount{ preferredGopFrameCount_ } + , preferredKeyFramePeriod{ preferredKeyFramePeriod_ } + , preferredConsecutiveBipredictiveFrameCount{ preferredConsecutiveBipredictiveFrameCount_ } + , preferredTemporalLayerCount{ preferredTemporalLayerCount_ } + , preferredConstantQIndex{ preferredConstantQIndex_ } + , preferredMaxSingleReferenceCount{ preferredMaxSingleReferenceCount_ } + , preferredSingleReferenceNameMask{ preferredSingleReferenceNameMask_ } + , preferredMaxUnidirectionalCompoundReferenceCount{ preferredMaxUnidirectionalCompoundReferenceCount_ } + , preferredMaxUnidirectionalCompoundGroup1ReferenceCount{ preferredMaxUnidirectionalCompoundGroup1ReferenceCount_ } + , preferredUnidirectionalCompoundReferenceNameMask{ preferredUnidirectionalCompoundReferenceNameMask_ } + , preferredMaxBidirectionalCompoundReferenceCount{ preferredMaxBidirectionalCompoundReferenceCount_ } + , preferredMaxBidirectionalCompoundGroup1ReferenceCount{ preferredMaxBidirectionalCompoundGroup1ReferenceCount_ } + , preferredMaxBidirectionalCompoundGroup2ReferenceCount{ preferredMaxBidirectionalCompoundGroup2ReferenceCount_ } + , preferredBidirectionalCompoundReferenceNameMask{ preferredBidirectionalCompoundReferenceNameMask_ } { } - VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesEXT( VideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeAV1QualityLevelPropertiesKHR( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoDecodeH264CapabilitiesEXT( VkVideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoDecodeH264CapabilitiesEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeAV1QualityLevelPropertiesKHR( VkVideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1QualityLevelPropertiesKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoDecodeH264CapabilitiesEXT & operator=( VideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeAV1QualityLevelPropertiesKHR & operator=( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoDecodeH264CapabilitiesEXT & operator=( VkVideoDecodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeAV1QualityLevelPropertiesKHR & operator=( VkVideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkVideoDecodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1QualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoDecodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1QualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxLevel, fieldOffsetGranularity ); + return std::tie( sType, + pNext, + preferredRateControlFlags, + preferredGopFrameCount, + preferredKeyFramePeriod, + preferredConsecutiveBipredictiveFrameCount, + preferredTemporalLayerCount, + preferredConstantQIndex, + preferredMaxSingleReferenceCount, + preferredSingleReferenceNameMask, + preferredMaxUnidirectionalCompoundReferenceCount, + preferredMaxUnidirectionalCompoundGroup1ReferenceCount, + preferredUnidirectionalCompoundReferenceNameMask, + preferredMaxBidirectionalCompoundReferenceCount, + preferredMaxBidirectionalCompoundGroup1ReferenceCount, + preferredMaxBidirectionalCompoundGroup2ReferenceCount, + preferredBidirectionalCompoundReferenceNameMask ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1QualityLevelPropertiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) - return cmp; - if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) - return cmp; - if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoH264Level ) ); cmp != 0 ) - return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - if ( auto cmp = fieldOffsetGranularity <=> rhs.fieldOffsetGranularity; cmp != 0 ) - return cmp; - - return std::strong_ordering::equivalent; - } +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredRateControlFlags == rhs.preferredRateControlFlags ) && + ( preferredGopFrameCount == rhs.preferredGopFrameCount ) && ( preferredKeyFramePeriod == rhs.preferredKeyFramePeriod ) && + ( preferredConsecutiveBipredictiveFrameCount == rhs.preferredConsecutiveBipredictiveFrameCount ) && + ( preferredTemporalLayerCount == rhs.preferredTemporalLayerCount ) && ( preferredConstantQIndex == rhs.preferredConstantQIndex ) && + ( preferredMaxSingleReferenceCount == rhs.preferredMaxSingleReferenceCount ) && + ( preferredSingleReferenceNameMask == rhs.preferredSingleReferenceNameMask ) && + ( preferredMaxUnidirectionalCompoundReferenceCount == rhs.preferredMaxUnidirectionalCompoundReferenceCount ) && + ( preferredMaxUnidirectionalCompoundGroup1ReferenceCount == rhs.preferredMaxUnidirectionalCompoundGroup1ReferenceCount ) && + ( preferredUnidirectionalCompoundReferenceNameMask == rhs.preferredUnidirectionalCompoundReferenceNameMask ) && + ( preferredMaxBidirectionalCompoundReferenceCount == rhs.preferredMaxBidirectionalCompoundReferenceCount ) && + ( preferredMaxBidirectionalCompoundGroup1ReferenceCount == rhs.preferredMaxBidirectionalCompoundGroup1ReferenceCount ) && + ( preferredMaxBidirectionalCompoundGroup2ReferenceCount == rhs.preferredMaxBidirectionalCompoundGroup2ReferenceCount ) && + ( preferredBidirectionalCompoundReferenceNameMask == rhs.preferredBidirectionalCompoundReferenceNameMask ); # endif - - bool operator==( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoH264Level ) ) == 0 ) && - ( fieldOffsetGranularity == rhs.fieldOffsetGranularity ); } - bool operator!=( VideoDecodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264CapabilitiesEXT; - void * pNext = {}; - StdVideoH264Level maxLevel = {}; - VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1QualityLevelPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR preferredRateControlFlags = {}; + uint32_t preferredGopFrameCount = {}; + uint32_t preferredKeyFramePeriod = {}; + uint32_t preferredConsecutiveBipredictiveFrameCount = {}; + uint32_t preferredTemporalLayerCount = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR preferredConstantQIndex = {}; + uint32_t preferredMaxSingleReferenceCount = {}; + uint32_t preferredSingleReferenceNameMask = {}; + uint32_t preferredMaxUnidirectionalCompoundReferenceCount = {}; + uint32_t preferredMaxUnidirectionalCompoundGroup1ReferenceCount = {}; + uint32_t preferredUnidirectionalCompoundReferenceNameMask = {}; + uint32_t preferredMaxBidirectionalCompoundReferenceCount = {}; + uint32_t preferredMaxBidirectionalCompoundGroup1ReferenceCount = {}; + uint32_t preferredMaxBidirectionalCompoundGroup2ReferenceCount = {}; + uint32_t preferredBidirectionalCompoundReferenceNameMask = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoDecodeH264CapabilitiesEXT; + using Type = VideoEncodeAV1QualityLevelPropertiesKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoDecodeH264DpbSlotInfoEXT + struct VideoEncodeAV1QuantizationMapCapabilitiesKHR { - using NativeType = VkVideoDecodeH264DpbSlotInfoEXT; + using NativeType = VkVideoEncodeAV1QuantizationMapCapabilitiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264DpbSlotInfoEXT; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoEXT( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pStdReferenceInfo( pStdReferenceInfo_ ) - { - } - - VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoEXT( VideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1QuantizationMapCapabilitiesKHR; - VideoDecodeH264DpbSlotInfoEXT( VkVideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoDecodeH264DpbSlotInfoEXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeAV1QuantizationMapCapabilitiesKHR( int32_t minQIndexDelta_ = {}, int32_t maxQIndexDelta_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minQIndexDelta{ minQIndexDelta_ } + , maxQIndexDelta{ maxQIndexDelta_ } { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoDecodeH264DpbSlotInfoEXT & operator=( VideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeAV1QuantizationMapCapabilitiesKHR( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoDecodeH264DpbSlotInfoEXT & operator=( VkVideoDecodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeAV1QuantizationMapCapabilitiesKHR( VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1QuantizationMapCapabilitiesKHR( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + VideoEncodeAV1QuantizationMapCapabilitiesKHR & operator=( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264DpbSlotInfoEXT & - setPStdReferenceInfo( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + VideoEncodeAV1QuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - pStdReferenceInfo = pStdReferenceInfo_; + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVideoDecodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoDecodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1QuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pStdReferenceInfo ); + return std::tie( sType, pNext, minQIndexDelta, maxQIndexDelta ); } -# endif - -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoDecodeH264DpbSlotInfoEXT const & ) const = default; -# else - bool operator==( VideoDecodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minQIndexDelta == rhs.minQIndexDelta ) && ( maxQIndexDelta == rhs.maxQIndexDelta ); +# endif } - bool operator!=( VideoDecodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264DpbSlotInfoEXT; - const void * pNext = {}; - const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1QuantizationMapCapabilitiesKHR; + void * pNext = {}; + int32_t minQIndexDelta = {}; + int32_t maxQIndexDelta = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoDecodeH264DpbSlotInfoEXT; + using Type = VideoEncodeAV1QuantizationMapCapabilitiesKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoDecodeH264MvcInfoEXT + struct VideoEncodeAV1RateControlInfoKHR { - using NativeType = VkVideoDecodeH264MvcInfoEXT; + using NativeType = VkVideoEncodeAV1RateControlInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264MvcInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1RateControlInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoDecodeH264MvcInfoEXT( const StdVideoDecodeH264Mvc * pStdMvc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pStdMvc( pStdMvc_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR flags_ = {}, + uint32_t gopFrameCount_ = {}, + uint32_t keyFramePeriod_ = {}, + uint32_t consecutiveBipredictiveFrameCount_ = {}, + uint32_t temporalLayerCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , gopFrameCount{ gopFrameCount_ } + , keyFramePeriod{ keyFramePeriod_ } + , consecutiveBipredictiveFrameCount{ consecutiveBipredictiveFrameCount_ } + , temporalLayerCount{ temporalLayerCount_ } { } - VULKAN_HPP_CONSTEXPR VideoDecodeH264MvcInfoEXT( VideoDecodeH264MvcInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlInfoKHR( VideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoDecodeH264MvcInfoEXT( VkVideoDecodeH264MvcInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoDecodeH264MvcInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeAV1RateControlInfoKHR( VkVideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1RateControlInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoDecodeH264MvcInfoEXT & operator=( VideoDecodeH264MvcInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeAV1RateControlInfoKHR & operator=( VideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoDecodeH264MvcInfoEXT & operator=( VkVideoDecodeH264MvcInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeAV1RateControlInfoKHR & operator=( VkVideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264MvcInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264MvcInfoEXT & setPStdMvc( const StdVideoDecodeH264Mvc * pStdMvc_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { - pStdMvc = pStdMvc_; + flags = flags_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVideoDecodeH264MvcInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + gopFrameCount = gopFrameCount_; + return *this; } - operator VkVideoDecodeH264MvcInfoEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setKeyFramePeriod( uint32_t keyFramePeriod_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + keyFramePeriod = keyFramePeriod_; + return *this; } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & + setConsecutiveBipredictiveFrameCount( uint32_t consecutiveBipredictiveFrameCount_ ) VULKAN_HPP_NOEXCEPT + { + consecutiveBipredictiveFrameCount = consecutiveBipredictiveFrameCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setTemporalLayerCount( uint32_t temporalLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + temporalLayerCount = temporalLayerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeAV1RateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1RateControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pStdMvc ); + return std::tie( sType, pNext, flags, gopFrameCount, keyFramePeriod, consecutiveBipredictiveFrameCount, temporalLayerCount ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoDecodeH264MvcInfoEXT const & ) const = default; -# else - bool operator==( VideoDecodeH264MvcInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1RateControlInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdMvc == rhs.pStdMvc ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( gopFrameCount == rhs.gopFrameCount ) && + ( keyFramePeriod == rhs.keyFramePeriod ) && ( consecutiveBipredictiveFrameCount == rhs.consecutiveBipredictiveFrameCount ) && + ( temporalLayerCount == rhs.temporalLayerCount ); +# endif } - bool operator!=( VideoDecodeH264MvcInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeAV1RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264MvcInfoEXT; - const void * pNext = {}; - const StdVideoDecodeH264Mvc * pStdMvc = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1RateControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR flags = {}; + uint32_t gopFrameCount = {}; + uint32_t keyFramePeriod = {}; + uint32_t consecutiveBipredictiveFrameCount = {}; + uint32_t temporalLayerCount = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoDecodeH264MvcInfoEXT; + using Type = VideoEncodeAV1RateControlInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoDecodeH264PictureInfoEXT + struct VideoEncodeAV1RateControlLayerInfoKHR { - using NativeType = VkVideoDecodeH264PictureInfoEXT; + using NativeType = VkVideoEncodeAV1RateControlLayerInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264PictureInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1RateControlLayerInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoEXT( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ = {}, - uint32_t slicesCount_ = {}, - const uint32_t * pSlicesDataOffsets_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pStdPictureInfo( pStdPictureInfo_ ) - , slicesCount( slicesCount_ ) - , pSlicesDataOffsets( pSlicesDataOffsets_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlLayerInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMinQIndex_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR minQIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxQIndex_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR maxQIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR maxFrameSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useMinQIndex{ useMinQIndex_ } + , minQIndex{ minQIndex_ } + , useMaxQIndex{ useMaxQIndex_ } + , maxQIndex{ maxQIndex_ } + , useMaxFrameSize{ useMaxFrameSize_ } + , maxFrameSize{ maxFrameSize_ } { } - VULKAN_HPP_CONSTEXPR VideoDecodeH264PictureInfoEXT( VideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlLayerInfoKHR( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoDecodeH264PictureInfoEXT( VkVideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoDecodeH264PictureInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeAV1RateControlLayerInfoKHR( VkVideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1RateControlLayerInfoKHR( *reinterpret_cast( &rhs ) ) { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoDecodeH264PictureInfoEXT( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & slicesDataOffsets_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , pStdPictureInfo( pStdPictureInfo_ ) - , slicesCount( static_cast( slicesDataOffsets_.size() ) ) - , pSlicesDataOffsets( slicesDataOffsets_.data() ) + VideoEncodeAV1RateControlLayerInfoKHR & operator=( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeAV1RateControlLayerInfoKHR & operator=( VkVideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoDecodeH264PictureInfoEXT & operator=( VideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } - VideoDecodeH264PictureInfoEXT & operator=( VkVideoDecodeH264PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setUseMinQIndex( VULKAN_HPP_NAMESPACE::Bool32 useMinQIndex_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + useMinQIndex = useMinQIndex_; return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & + setMinQIndex( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR const & minQIndex_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + minQIndex = minQIndex_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & setPStdPictureInfo( const StdVideoDecodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setUseMaxQIndex( VULKAN_HPP_NAMESPACE::Bool32 useMaxQIndex_ ) VULKAN_HPP_NOEXCEPT { - pStdPictureInfo = pStdPictureInfo_; + useMaxQIndex = useMaxQIndex_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & setSlicesCount( uint32_t slicesCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & + setMaxQIndex( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR const & maxQIndex_ ) VULKAN_HPP_NOEXCEPT { - slicesCount = slicesCount_; + maxQIndex = maxQIndex_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264PictureInfoEXT & setPSlicesDataOffsets( const uint32_t * pSlicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT { - pSlicesDataOffsets = pSlicesDataOffsets_; + useMaxFrameSize = useMaxFrameSize_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoDecodeH264PictureInfoEXT & - setSlicesDataOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & slicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & + setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT { - slicesCount = static_cast( slicesDataOffsets_.size() ); - pSlicesDataOffsets = slicesDataOffsets_.data(); + maxFrameSize = maxFrameSize_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoDecodeH264PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1RateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoDecodeH264PictureInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1RateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pStdPictureInfo, slicesCount, pSlicesDataOffsets ); + return std::tie( sType, pNext, useMinQIndex, minQIndex, useMaxQIndex, maxQIndex, useMaxFrameSize, maxFrameSize ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoDecodeH264PictureInfoEXT const & ) const = default; -# else - bool operator==( VideoDecodeH264PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1RateControlLayerInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && ( slicesCount == rhs.slicesCount ) && - ( pSlicesDataOffsets == rhs.pSlicesDataOffsets ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMinQIndex == rhs.useMinQIndex ) && ( minQIndex == rhs.minQIndex ) && + ( useMaxQIndex == rhs.useMaxQIndex ) && ( maxQIndex == rhs.maxQIndex ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) && + ( maxFrameSize == rhs.maxFrameSize ); +# endif } - bool operator!=( VideoDecodeH264PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264PictureInfoEXT; - const void * pNext = {}; - const StdVideoDecodeH264PictureInfo * pStdPictureInfo = {}; - uint32_t slicesCount = {}; - const uint32_t * pSlicesDataOffsets = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1RateControlLayerInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMinQIndex = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR minQIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxQIndex = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR maxQIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR maxFrameSize = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoDecodeH264PictureInfoEXT; + using Type = VideoEncodeAV1RateControlLayerInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoDecodeH264ProfileInfoEXT + struct VideoEncodeAV1SessionCreateInfoKHR { - using NativeType = VkVideoDecodeH264ProfileInfoEXT; + using NativeType = VkVideoEncodeAV1SessionCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264ProfileInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1SessionCreateInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileInfoEXT( StdVideoH264ProfileIdc stdProfileIdc_ = {}, - VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsEXT pictureLayout_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stdProfileIdc( stdProfileIdc_ ) - , pictureLayout( pictureLayout_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionCreateInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevel_ = {}, + StdVideoAV1Level maxLevel_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useMaxLevel{ useMaxLevel_ } + , maxLevel{ maxLevel_ } { } - VULKAN_HPP_CONSTEXPR VideoDecodeH264ProfileInfoEXT( VideoDecodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionCreateInfoKHR( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoDecodeH264ProfileInfoEXT( VkVideoDecodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoDecodeH264ProfileInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeAV1SessionCreateInfoKHR( VkVideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1SessionCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoDecodeH264ProfileInfoEXT & operator=( VideoDecodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeAV1SessionCreateInfoKHR & operator=( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoDecodeH264ProfileInfoEXT & operator=( VkVideoDecodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeAV1SessionCreateInfoKHR & operator=( VkVideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoEXT & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionCreateInfoKHR & setUseMaxLevel( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevel_ ) VULKAN_HPP_NOEXCEPT { - stdProfileIdc = stdProfileIdc_; + useMaxLevel = useMaxLevel_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264ProfileInfoEXT & - setPictureLayout( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsEXT pictureLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionCreateInfoKHR & setMaxLevel( StdVideoAV1Level maxLevel_ ) VULKAN_HPP_NOEXCEPT { - pictureLayout = pictureLayout_; + maxLevel = maxLevel_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoDecodeH264ProfileInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1SessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoDecodeH264ProfileInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1SessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, stdProfileIdc, pictureLayout ); + return std::tie( sType, pNext, useMaxLevel, maxLevel ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( VideoDecodeH264ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; - if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 ) - return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - if ( auto cmp = pictureLayout <=> rhs.pictureLayout; cmp != 0 ) + if ( auto cmp = useMaxLevel <=> rhs.useMaxLevel; cmp != 0 ) return cmp; + if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } -# endif +#endif - bool operator==( VideoDecodeH264ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ) && - ( pictureLayout == rhs.pictureLayout ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMaxLevel == rhs.useMaxLevel ) && + ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ) == 0 ); } - bool operator!=( VideoDecodeH264ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264ProfileInfoEXT; - const void * pNext = {}; - StdVideoH264ProfileIdc stdProfileIdc = {}; - VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsEXT pictureLayout = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1SessionCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxLevel = {}; + StdVideoAV1Level maxLevel = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoDecodeH264ProfileInfoEXT; + using Type = VideoEncodeAV1SessionCreateInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoDecodeH264SessionParametersAddInfoEXT + struct VideoEncodeAV1SessionParametersCreateInfoKHR { - using NativeType = VkVideoDecodeH264SessionParametersAddInfoEXT; + using NativeType = VkVideoEncodeAV1SessionParametersCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersAddInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1SessionParametersCreateInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT( uint32_t spsStdCount_ = {}, - const StdVideoH264SequenceParameterSet * pSpsStd_ = {}, - uint32_t ppsStdCount_ = {}, - const StdVideoH264PictureParameterSet * pPpsStd_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , spsStdCount( spsStdCount_ ) - , pSpsStd( pSpsStd_ ) - , ppsStdCount( ppsStdCount_ ) - , pPpsStd( pPpsStd_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionParametersCreateInfoKHR( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ = {}, + const StdVideoEncodeAV1DecoderModelInfo * pStdDecoderModelInfo_ = {}, + uint32_t stdOperatingPointCount_ = {}, + const StdVideoEncodeAV1OperatingPointInfo * pStdOperatingPoints_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdSequenceHeader{ pStdSequenceHeader_ } + , pStdDecoderModelInfo{ pStdDecoderModelInfo_ } + , stdOperatingPointCount{ stdOperatingPointCount_ } + , pStdOperatingPoints{ pStdOperatingPoints_ } { } - VULKAN_HPP_CONSTEXPR VideoDecodeH264SessionParametersAddInfoEXT( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionParametersCreateInfoKHR( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoDecodeH264SessionParametersAddInfoEXT( VkVideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoDecodeH264SessionParametersAddInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeAV1SessionParametersCreateInfoKHR( VkVideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1SessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoDecodeH264SessionParametersAddInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ = {}, - const void * pNext_ = nullptr ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeAV1SessionParametersCreateInfoKHR( + const StdVideoAV1SequenceHeader * pStdSequenceHeader_, + const StdVideoEncodeAV1DecoderModelInfo * pStdDecoderModelInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdOperatingPoints_, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , spsStdCount( static_cast( spsStd_.size() ) ) - , pSpsStd( spsStd_.data() ) - , ppsStdCount( static_cast( ppsStd_.size() ) ) - , pPpsStd( ppsStd_.data() ) + , pStdSequenceHeader( pStdSequenceHeader_ ) + , pStdDecoderModelInfo( pStdDecoderModelInfo_ ) + , stdOperatingPointCount( static_cast( stdOperatingPoints_.size() ) ) + , pStdOperatingPoints( stdOperatingPoints_.data() ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VideoDecodeH264SessionParametersAddInfoEXT & operator=( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeAV1SessionParametersCreateInfoKHR & operator=( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoDecodeH264SessionParametersAddInfoEXT & operator=( VkVideoDecodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeAV1SessionParametersCreateInfoKHR & operator=( VkVideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT - { - spsStdCount = spsStdCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & setPSpsStd( const StdVideoH264SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & + setPStdSequenceHeader( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ ) VULKAN_HPP_NOEXCEPT { - pSpsStd = pSpsStd_; + pStdSequenceHeader = pStdSequenceHeader_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoDecodeH264SessionParametersAddInfoEXT & - setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & + setPStdDecoderModelInfo( const StdVideoEncodeAV1DecoderModelInfo * pStdDecoderModelInfo_ ) VULKAN_HPP_NOEXCEPT { - spsStdCount = static_cast( spsStd_.size() ); - pSpsStd = spsStd_.data(); + pStdDecoderModelInfo = pStdDecoderModelInfo_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & setStdOperatingPointCount( uint32_t stdOperatingPointCount_ ) VULKAN_HPP_NOEXCEPT { - ppsStdCount = ppsStdCount_; + stdOperatingPointCount = stdOperatingPointCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersAddInfoEXT & setPPpsStd( const StdVideoH264PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & + setPStdOperatingPoints( const StdVideoEncodeAV1OperatingPointInfo * pStdOperatingPoints_ ) VULKAN_HPP_NOEXCEPT { - pPpsStd = pPpsStd_; + pStdOperatingPoints = pStdOperatingPoints_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoDecodeH264SessionParametersAddInfoEXT & - setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeAV1SessionParametersCreateInfoKHR & setStdOperatingPoints( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdOperatingPoints_ ) VULKAN_HPP_NOEXCEPT { - ppsStdCount = static_cast( ppsStd_.size() ); - pPpsStd = ppsStd_.data(); + stdOperatingPointCount = static_cast( stdOperatingPoints_.size() ); + pStdOperatingPoints = stdOperatingPoints_.data(); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoDecodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoDecodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeAV1SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + const StdVideoEncodeAV1OperatingPointInfo * const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, spsStdCount, pSpsStd, ppsStdCount, pPpsStd ); + return std::tie( sType, pNext, pStdSequenceHeader, pStdDecoderModelInfo, stdOperatingPointCount, pStdOperatingPoints ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoDecodeH264SessionParametersAddInfoEXT const & ) const = default; -# else - bool operator==( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1SessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsStdCount == rhs.spsStdCount ) && ( pSpsStd == rhs.pSpsStd ) && - ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdSequenceHeader == rhs.pStdSequenceHeader ) && + ( pStdDecoderModelInfo == rhs.pStdDecoderModelInfo ) && ( stdOperatingPointCount == rhs.stdOperatingPointCount ) && + ( pStdOperatingPoints == rhs.pStdOperatingPoints ); +# endif } - bool operator!=( VideoDecodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersAddInfoEXT; - const void * pNext = {}; - uint32_t spsStdCount = {}; - const StdVideoH264SequenceParameterSet * pSpsStd = {}; - uint32_t ppsStdCount = {}; - const StdVideoH264PictureParameterSet * pPpsStd = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1SessionParametersCreateInfoKHR; + const void * pNext = {}; + const StdVideoAV1SequenceHeader * pStdSequenceHeader = {}; + const StdVideoEncodeAV1DecoderModelInfo * pStdDecoderModelInfo = {}; + uint32_t stdOperatingPointCount = {}; + const StdVideoEncodeAV1OperatingPointInfo * pStdOperatingPoints = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoDecodeH264SessionParametersAddInfoEXT; + using Type = VideoEncodeAV1SessionParametersCreateInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoDecodeH264SessionParametersCreateInfoEXT + struct VideoEncodeCapabilitiesKHR { - using NativeType = VkVideoDecodeH264SessionParametersCreateInfoEXT; + using NativeType = VkVideoEncodeCapabilitiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - VideoDecodeH264SessionParametersCreateInfoEXT( uint32_t maxSpsStdCount_ = {}, - uint32_t maxPpsStdCount_ = {}, - const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxSpsStdCount( maxSpsStdCount_ ) - , maxPpsStdCount( maxPpsStdCount_ ) - , pParametersAddInfo( pParametersAddInfo_ ) - { - } - - VULKAN_HPP_CONSTEXPR - VideoDecodeH264SessionParametersCreateInfoEXT( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - VideoDecodeH264SessionParametersCreateInfoEXT( VkVideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoDecodeH264SessionParametersCreateInfoEXT( *reinterpret_cast( &rhs ) ) - { - } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - VideoDecodeH264SessionParametersCreateInfoEXT & operator=( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeCapabilitiesKHR; - VideoDecodeH264SessionParametersCreateInfoEXT & operator=( VkVideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes_ = {}, + uint32_t maxRateControlLayers_ = {}, + uint64_t maxBitrate_ = {}, + uint32_t maxQualityLevels_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D encodeInputPictureGranularity_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR supportedEncodeFeedbackFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , rateControlModes{ rateControlModes_ } + , maxRateControlLayers{ maxRateControlLayers_ } + , maxBitrate{ maxBitrate_ } + , maxQualityLevels{ maxQualityLevels_ } + , encodeInputPictureGranularity{ encodeInputPictureGranularity_ } + , supportedEncodeFeedbackFlags{ supportedEncodeFeedbackFlags_ } { - *this = *reinterpret_cast( &rhs ); - return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT + VideoEncodeCapabilitiesKHR( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeCapabilitiesKHR( *reinterpret_cast( &rhs ) ) { - maxSpsStdCount = maxSpsStdCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT - { - maxPpsStdCount = maxPpsStdCount_; - return *this; - } + VideoEncodeCapabilitiesKHR & operator=( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH264SessionParametersCreateInfoEXT & - setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + VideoEncodeCapabilitiesKHR & operator=( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - pParametersAddInfo = pParametersAddInfo_; + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVideoDecodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoDecodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + VULKAN_HPP_NAMESPACE::Extent2D const &, + VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo ); + return std::tie( sType, + pNext, + flags, + rateControlModes, + maxRateControlLayers, + maxBitrate, + maxQualityLevels, + encodeInputPictureGranularity, + supportedEncodeFeedbackFlags ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoDecodeH264SessionParametersCreateInfoEXT const & ) const = default; -# else - bool operator==( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) && ( maxPpsStdCount == rhs.maxPpsStdCount ) && - ( pParametersAddInfo == rhs.pParametersAddInfo ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rateControlModes == rhs.rateControlModes ) && + ( maxRateControlLayers == rhs.maxRateControlLayers ) && ( maxBitrate == rhs.maxBitrate ) && ( maxQualityLevels == rhs.maxQualityLevels ) && + ( encodeInputPictureGranularity == rhs.encodeInputPictureGranularity ) && ( supportedEncodeFeedbackFlags == rhs.supportedEncodeFeedbackFlags ); +# endif } - bool operator!=( VideoDecodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT; - const void * pNext = {}; - uint32_t maxSpsStdCount = {}; - uint32_t maxPpsStdCount = {}; - const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoEXT * pParametersAddInfo = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes = {}; + uint32_t maxRateControlLayers = {}; + uint64_t maxBitrate = {}; + uint32_t maxQualityLevels = {}; + VULKAN_HPP_NAMESPACE::Extent2D encodeInputPictureGranularity = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR supportedEncodeFeedbackFlags = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoDecodeH264SessionParametersCreateInfoEXT; + using Type = VideoEncodeCapabilitiesKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoDecodeH265CapabilitiesEXT + struct VideoEncodeH264CapabilitiesKHR { - using NativeType = VkVideoDecodeH265CapabilitiesEXT; + using NativeType = VkVideoEncodeH264CapabilitiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265CapabilitiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264CapabilitiesKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesEXT( StdVideoH265Level maxLevel_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxLevel( maxLevel_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsKHR flags_ = {}, + StdVideoH264LevelIdc maxLevelIdc_ = {}, + uint32_t maxSliceCount_ = {}, + uint32_t maxPPictureL0ReferenceCount_ = {}, + uint32_t maxBPictureL0ReferenceCount_ = {}, + uint32_t maxL1ReferenceCount_ = {}, + uint32_t maxTemporalLayerCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 expectDyadicTemporalLayerPattern_ = {}, + int32_t minQp_ = {}, + int32_t maxQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagsKHR stdSyntaxFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , maxLevelIdc{ maxLevelIdc_ } + , maxSliceCount{ maxSliceCount_ } + , maxPPictureL0ReferenceCount{ maxPPictureL0ReferenceCount_ } + , maxBPictureL0ReferenceCount{ maxBPictureL0ReferenceCount_ } + , maxL1ReferenceCount{ maxL1ReferenceCount_ } + , maxTemporalLayerCount{ maxTemporalLayerCount_ } + , expectDyadicTemporalLayerPattern{ expectDyadicTemporalLayerPattern_ } + , minQp{ minQp_ } + , maxQp{ maxQp_ } + , prefersGopRemainingFrames{ prefersGopRemainingFrames_ } + , requiresGopRemainingFrames{ requiresGopRemainingFrames_ } + , stdSyntaxFlags{ stdSyntaxFlags_ } { } - VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesEXT( VideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesKHR( VideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoDecodeH265CapabilitiesEXT( VkVideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoDecodeH265CapabilitiesEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH264CapabilitiesKHR( VkVideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264CapabilitiesKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoDecodeH265CapabilitiesEXT & operator=( VideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH264CapabilitiesKHR & operator=( VideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoDecodeH265CapabilitiesEXT & operator=( VkVideoDecodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264CapabilitiesKHR & operator=( VkVideoEncodeH264CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkVideoDecodeH265CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoDecodeH265CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxLevel ); + return std::tie( sType, + pNext, + flags, + maxLevelIdc, + maxSliceCount, + maxPPictureL0ReferenceCount, + maxBPictureL0ReferenceCount, + maxL1ReferenceCount, + maxTemporalLayerCount, + expectDyadicTemporalLayerPattern, + minQp, + maxQp, + prefersGopRemainingFrames, + requiresGopRemainingFrames, + stdSyntaxFlags ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( VideoDecodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; - if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoH265Level ) ); cmp != 0 ) + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = maxSliceCount <=> rhs.maxSliceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxPPictureL0ReferenceCount <=> rhs.maxPPictureL0ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxBPictureL0ReferenceCount <=> rhs.maxBPictureL0ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxL1ReferenceCount <=> rhs.maxL1ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxTemporalLayerCount <=> rhs.maxTemporalLayerCount; cmp != 0 ) + return cmp; + if ( auto cmp = expectDyadicTemporalLayerPattern <=> rhs.expectDyadicTemporalLayerPattern; cmp != 0 ) + return cmp; + if ( auto cmp = minQp <=> rhs.minQp; cmp != 0 ) + return cmp; + if ( auto cmp = maxQp <=> rhs.maxQp; cmp != 0 ) + return cmp; + if ( auto cmp = prefersGopRemainingFrames <=> rhs.prefersGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = requiresGopRemainingFrames <=> rhs.requiresGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = stdSyntaxFlags <=> rhs.stdSyntaxFlags; cmp != 0 ) + return cmp; return std::strong_ordering::equivalent; } -# endif +#endif - bool operator==( VideoDecodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoEncodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoH265Level ) ) == 0 ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ) == 0 ) && ( maxSliceCount == rhs.maxSliceCount ) && + ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount ) && ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount ) && + ( maxL1ReferenceCount == rhs.maxL1ReferenceCount ) && ( maxTemporalLayerCount == rhs.maxTemporalLayerCount ) && + ( expectDyadicTemporalLayerPattern == rhs.expectDyadicTemporalLayerPattern ) && ( minQp == rhs.minQp ) && ( maxQp == rhs.maxQp ) && + ( prefersGopRemainingFrames == rhs.prefersGopRemainingFrames ) && ( requiresGopRemainingFrames == rhs.requiresGopRemainingFrames ) && + ( stdSyntaxFlags == rhs.stdSyntaxFlags ); } - bool operator!=( VideoDecodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH264CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265CapabilitiesEXT; - void * pNext = {}; - StdVideoH265Level maxLevel = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264CapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsKHR flags = {}; + StdVideoH264LevelIdc maxLevelIdc = {}; + uint32_t maxSliceCount = {}; + uint32_t maxPPictureL0ReferenceCount = {}; + uint32_t maxBPictureL0ReferenceCount = {}; + uint32_t maxL1ReferenceCount = {}; + uint32_t maxTemporalLayerCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 expectDyadicTemporalLayerPattern = {}; + int32_t minQp = {}; + int32_t maxQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagsKHR stdSyntaxFlags = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoDecodeH265CapabilitiesEXT; + using Type = VideoEncodeH264CapabilitiesKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoDecodeH265DpbSlotInfoEXT + struct VideoEncodeH264DpbSlotInfoKHR { - using NativeType = VkVideoDecodeH265DpbSlotInfoEXT; + using NativeType = VkVideoEncodeH264DpbSlotInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265DpbSlotInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264DpbSlotInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoEXT( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ = {}, +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoKHR( const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pStdReferenceInfo( pStdReferenceInfo_ ) + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } { } - VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoEXT( VideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoKHR( VideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoDecodeH265DpbSlotInfoEXT( VkVideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoDecodeH265DpbSlotInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH264DpbSlotInfoKHR( VkVideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264DpbSlotInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoDecodeH265DpbSlotInfoEXT & operator=( VideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH264DpbSlotInfoKHR & operator=( VideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoDecodeH265DpbSlotInfoEXT & operator=( VkVideoDecodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264DpbSlotInfoKHR & operator=( VkVideoEncodeH264DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265DpbSlotInfoEXT & - setPStdReferenceInfo( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoKHR & + setPStdReferenceInfo( const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT { pStdReferenceInfo = pStdReferenceInfo_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoDecodeH265DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoDecodeH265DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pStdReferenceInfo ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoDecodeH265DpbSlotInfoEXT const & ) const = default; -# else - bool operator==( VideoDecodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264DpbSlotInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else +# else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); -# endif +# endif } - bool operator!=( VideoDecodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH264DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265DpbSlotInfoEXT; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264DpbSlotInfoKHR; const void * pNext = {}; - const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo = {}; + const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoDecodeH265DpbSlotInfoEXT; + using Type = VideoEncodeH264DpbSlotInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoDecodeH265PictureInfoEXT + struct VideoEncodeH264FrameSizeKHR { - using NativeType = VkVideoDecodeH265PictureInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265PictureInfoEXT; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoEXT( StdVideoDecodeH265PictureInfo * pStdPictureInfo_ = {}, - uint32_t slicesCount_ = {}, - const uint32_t * pSlicesDataOffsets_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pStdPictureInfo( pStdPictureInfo_ ) - , slicesCount( slicesCount_ ) - , pSlicesDataOffsets( pSlicesDataOffsets_ ) - { - } - - VULKAN_HPP_CONSTEXPR VideoDecodeH265PictureInfoEXT( VideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - VideoDecodeH265PictureInfoEXT( VkVideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoDecodeH265PictureInfoEXT( *reinterpret_cast( &rhs ) ) - { - } + using NativeType = VkVideoEncodeH264FrameSizeKHR; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoDecodeH265PictureInfoEXT( StdVideoDecodeH265PictureInfo * pStdPictureInfo_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & slicesDataOffsets_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , pStdPictureInfo( pStdPictureInfo_ ) - , slicesCount( static_cast( slicesDataOffsets_.size() ) ) - , pSlicesDataOffsets( slicesDataOffsets_.data() ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeKHR( uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT + : frameISize{ frameISize_ } + , framePSize{ framePSize_ } + , frameBSize{ frameBSize_ } { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoDecodeH265PictureInfoEXT & operator=( VideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeKHR( VideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoDecodeH265PictureInfoEXT & operator=( VkVideoDecodeH265PictureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264FrameSizeKHR( VkVideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264FrameSizeKHR( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + VideoEncodeH264FrameSizeKHR & operator=( VideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & setPStdPictureInfo( StdVideoDecodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264FrameSizeKHR & operator=( VkVideoEncodeH264FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - pStdPictureInfo = pStdPictureInfo_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & setSlicesCount( uint32_t slicesCount_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeKHR & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT { - slicesCount = slicesCount_; + frameISize = frameISize_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265PictureInfoEXT & setPSlicesDataOffsets( const uint32_t * pSlicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeKHR & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT { - pSlicesDataOffsets = pSlicesDataOffsets_; + framePSize = framePSize_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoDecodeH265PictureInfoEXT & - setSlicesDataOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & slicesDataOffsets_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeKHR & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT { - slicesCount = static_cast( slicesDataOffsets_.size() ); - pSlicesDataOffsets = slicesDataOffsets_.data(); + frameBSize = frameBSize_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoDecodeH265PictureInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264FrameSizeKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoDecodeH265PictureInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264FrameSizeKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pStdPictureInfo, slicesCount, pSlicesDataOffsets ); + return std::tie( frameISize, framePSize, frameBSize ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoDecodeH265PictureInfoEXT const & ) const = default; -# else - bool operator==( VideoDecodeH265PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264FrameSizeKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && ( slicesCount == rhs.slicesCount ) && - ( pSlicesDataOffsets == rhs.pSlicesDataOffsets ); -# endif +# else + return ( frameISize == rhs.frameISize ) && ( framePSize == rhs.framePSize ) && ( frameBSize == rhs.frameBSize ); +# endif } - bool operator!=( VideoDecodeH265PictureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH264FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265PictureInfoEXT; - const void * pNext = {}; - StdVideoDecodeH265PictureInfo * pStdPictureInfo = {}; - uint32_t slicesCount = {}; - const uint32_t * pSlicesDataOffsets = {}; - }; - - template <> - struct CppType - { - using Type = VideoDecodeH265PictureInfoEXT; + uint32_t frameISize = {}; + uint32_t framePSize = {}; + uint32_t frameBSize = {}; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoDecodeH265ProfileInfoEXT + struct VideoEncodeH264GopRemainingFrameInfoKHR { - using NativeType = VkVideoDecodeH265ProfileInfoEXT; + using NativeType = VkVideoEncodeH264GopRemainingFrameInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265ProfileInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264GopRemainingFrameInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileInfoEXT( StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stdProfileIdc( stdProfileIdc_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264GopRemainingFrameInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ = {}, + uint32_t gopRemainingI_ = {}, + uint32_t gopRemainingP_ = {}, + uint32_t gopRemainingB_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useGopRemainingFrames{ useGopRemainingFrames_ } + , gopRemainingI{ gopRemainingI_ } + , gopRemainingP{ gopRemainingP_ } + , gopRemainingB{ gopRemainingB_ } { } - VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileInfoEXT( VideoDecodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH264GopRemainingFrameInfoKHR( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoDecodeH265ProfileInfoEXT( VkVideoDecodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoDecodeH265ProfileInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH264GopRemainingFrameInfoKHR( VkVideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264GopRemainingFrameInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoDecodeH265ProfileInfoEXT & operator=( VideoDecodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH264GopRemainingFrameInfoKHR & operator=( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoDecodeH265ProfileInfoEXT & operator=( VkVideoDecodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264GopRemainingFrameInfoKHR & operator=( VkVideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265ProfileInfoEXT & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & + setUseGopRemainingFrames( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ ) VULKAN_HPP_NOEXCEPT { - stdProfileIdc = stdProfileIdc_; + useGopRemainingFrames = useGopRemainingFrames_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVideoDecodeH265ProfileInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setGopRemainingI( uint32_t gopRemainingI_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + gopRemainingI = gopRemainingI_; + return *this; } - operator VkVideoDecodeH265ProfileInfoEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setGopRemainingP( uint32_t gopRemainingP_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + gopRemainingP = gopRemainingP_; + return *this; } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264GopRemainingFrameInfoKHR & setGopRemainingB( uint32_t gopRemainingB_ ) VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, stdProfileIdc ); + gopRemainingB = gopRemainingB_; + return *this; } -# endif +#endif /*VULKAN_HPP_NO_SETTERS*/ -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( VideoDecodeH265ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264GopRemainingFrameInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) - return cmp; - if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) - return cmp; - if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 ) - return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + return *reinterpret_cast( this ); + } - return std::strong_ordering::equivalent; + operator VkVideoEncodeH264GopRemainingFrameInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple # endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useGopRemainingFrames, gopRemainingI, gopRemainingP, gopRemainingB ); + } +#endif - bool operator==( VideoDecodeH265ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264GopRemainingFrameInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 ); +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useGopRemainingFrames == rhs.useGopRemainingFrames ) && + ( gopRemainingI == rhs.gopRemainingI ) && ( gopRemainingP == rhs.gopRemainingP ) && ( gopRemainingB == rhs.gopRemainingB ); +# endif } - bool operator!=( VideoDecodeH265ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH264GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265ProfileInfoEXT; - const void * pNext = {}; - StdVideoH265ProfileIdc stdProfileIdc = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264GopRemainingFrameInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames = {}; + uint32_t gopRemainingI = {}; + uint32_t gopRemainingP = {}; + uint32_t gopRemainingB = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoDecodeH265ProfileInfoEXT; + using Type = VideoEncodeH264GopRemainingFrameInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoDecodeH265SessionParametersAddInfoEXT + struct VideoEncodeH264NaluSliceInfoKHR { - using NativeType = VkVideoDecodeH265SessionParametersAddInfoEXT; + using NativeType = VkVideoEncodeH264NaluSliceInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersAddInfoEXT; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT( uint32_t vpsStdCount_ = {}, - const StdVideoH265VideoParameterSet * pVpsStd_ = {}, - uint32_t spsStdCount_ = {}, - const StdVideoH265SequenceParameterSet * pSpsStd_ = {}, - uint32_t ppsStdCount_ = {}, - const StdVideoH265PictureParameterSet * pPpsStd_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , vpsStdCount( vpsStdCount_ ) - , pVpsStd( pVpsStd_ ) - , spsStdCount( spsStdCount_ ) - , pSpsStd( pSpsStd_ ) - , ppsStdCount( ppsStdCount_ ) - , pPpsStd( pPpsStd_ ) - { - } - - VULKAN_HPP_CONSTEXPR VideoDecodeH265SessionParametersAddInfoEXT( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - VideoDecodeH265SessionParametersAddInfoEXT( VkVideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoDecodeH265SessionParametersAddInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoDecodeH265SessionParametersAddInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vpsStd_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , vpsStdCount( static_cast( vpsStd_.size() ) ) - , pVpsStd( vpsStd_.data() ) - , spsStdCount( static_cast( spsStd_.size() ) ) - , pSpsStd( spsStd_.data() ) - , ppsStdCount( static_cast( ppsStd_.size() ) ) - , pPpsStd( ppsStd_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - VideoDecodeH265SessionParametersAddInfoEXT & operator=( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - VideoDecodeH265SessionParametersAddInfoEXT & operator=( VkVideoDecodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setVpsStdCount( uint32_t vpsStdCount_ ) VULKAN_HPP_NOEXCEPT - { - vpsStdCount = vpsStdCount_; - return *this; - } + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264NaluSliceInfoKHR; - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setPVpsStd( const StdVideoH265VideoParameterSet * pVpsStd_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceInfoKHR( int32_t constantQp_ = {}, + const StdVideoEncodeH264SliceHeader * pStdSliceHeader_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , constantQp{ constantQp_ } + , pStdSliceHeader{ pStdSliceHeader_ } { - pVpsStd = pVpsStd_; - return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoDecodeH265SessionParametersAddInfoEXT & - setVpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vpsStd_ ) VULKAN_HPP_NOEXCEPT - { - vpsStdCount = static_cast( vpsStd_.size() ); - pVpsStd = vpsStd_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceInfoKHR( VideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264NaluSliceInfoKHR( VkVideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264NaluSliceInfoKHR( *reinterpret_cast( &rhs ) ) { - spsStdCount = spsStdCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setPSpsStd( const StdVideoH265SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT - { - pSpsStd = pSpsStd_; - return *this; - } + VideoEncodeH264NaluSliceInfoKHR & operator=( VideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoDecodeH265SessionParametersAddInfoEXT & - setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_ ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264NaluSliceInfoKHR & operator=( VkVideoEncodeH264NaluSliceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - spsStdCount = static_cast( spsStd_.size() ); - pSpsStd = spsStd_.data(); + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - ppsStdCount = ppsStdCount_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersAddInfoEXT & setPPpsStd( const StdVideoH265PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoKHR & setConstantQp( int32_t constantQp_ ) VULKAN_HPP_NOEXCEPT { - pPpsStd = pPpsStd_; + constantQp = constantQp_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoDecodeH265SessionParametersAddInfoEXT & - setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoKHR & setPStdSliceHeader( const StdVideoEncodeH264SliceHeader * pStdSliceHeader_ ) VULKAN_HPP_NOEXCEPT { - ppsStdCount = static_cast( ppsStd_.size() ); - pPpsStd = ppsStd_.data(); + pStdSliceHeader = pStdSliceHeader_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoDecodeH265SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264NaluSliceInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoDecodeH265SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264NaluSliceInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, vpsStdCount, pVpsStd, spsStdCount, pSpsStd, ppsStdCount, pPpsStd ); + return std::tie( sType, pNext, constantQp, pStdSliceHeader ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoDecodeH265SessionParametersAddInfoEXT const & ) const = default; -# else - bool operator==( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264NaluSliceInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264NaluSliceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vpsStdCount == rhs.vpsStdCount ) && ( pVpsStd == rhs.pVpsStd ) && - ( spsStdCount == rhs.spsStdCount ) && ( pSpsStd == rhs.pSpsStd ) && ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( constantQp == rhs.constantQp ) && ( pStdSliceHeader == rhs.pStdSliceHeader ); +# endif } - bool operator!=( VideoDecodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH264NaluSliceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersAddInfoEXT; - const void * pNext = {}; - uint32_t vpsStdCount = {}; - const StdVideoH265VideoParameterSet * pVpsStd = {}; - uint32_t spsStdCount = {}; - const StdVideoH265SequenceParameterSet * pSpsStd = {}; - uint32_t ppsStdCount = {}; - const StdVideoH265PictureParameterSet * pPpsStd = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264NaluSliceInfoKHR; + const void * pNext = {}; + int32_t constantQp = {}; + const StdVideoEncodeH264SliceHeader * pStdSliceHeader = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoDecodeH265SessionParametersAddInfoEXT; + using Type = VideoEncodeH264NaluSliceInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoDecodeH265SessionParametersCreateInfoEXT + struct VideoEncodeH264PictureInfoKHR { - using NativeType = VkVideoDecodeH265SessionParametersCreateInfoEXT; + using NativeType = VkVideoEncodeH264PictureInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264PictureInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - VideoDecodeH265SessionParametersCreateInfoEXT( uint32_t maxVpsStdCount_ = {}, - uint32_t maxSpsStdCount_ = {}, - uint32_t maxPpsStdCount_ = {}, - const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxVpsStdCount( maxVpsStdCount_ ) - , maxSpsStdCount( maxSpsStdCount_ ) - , maxPpsStdCount( maxPpsStdCount_ ) - , pParametersAddInfo( pParametersAddInfo_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264PictureInfoKHR( uint32_t naluSliceEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR * pNaluSliceEntries_ = {}, + const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 generatePrefixNalu_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , naluSliceEntryCount{ naluSliceEntryCount_ } + , pNaluSliceEntries{ pNaluSliceEntries_ } + , pStdPictureInfo{ pStdPictureInfo_ } + , generatePrefixNalu{ generatePrefixNalu_ } { } - VULKAN_HPP_CONSTEXPR - VideoDecodeH265SessionParametersCreateInfoEXT( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH264PictureInfoKHR( VideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264PictureInfoKHR( VkVideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264PictureInfoKHR( *reinterpret_cast( &rhs ) ) + { + } - VideoDecodeH265SessionParametersCreateInfoEXT( VkVideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoDecodeH265SessionParametersCreateInfoEXT( *reinterpret_cast( &rhs ) ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264PictureInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & naluSliceEntries_, + const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 generatePrefixNalu_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , naluSliceEntryCount( static_cast( naluSliceEntries_.size() ) ) + , pNaluSliceEntries( naluSliceEntries_.data() ) + , pStdPictureInfo( pStdPictureInfo_ ) + , generatePrefixNalu( generatePrefixNalu_ ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VideoDecodeH265SessionParametersCreateInfoEXT & operator=( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH264PictureInfoKHR & operator=( VideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoDecodeH265SessionParametersCreateInfoEXT & operator=( VkVideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264PictureInfoKHR & operator=( VkVideoEncodeH264PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & setMaxVpsStdCount( uint32_t maxVpsStdCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setNaluSliceEntryCount( uint32_t naluSliceEntryCount_ ) VULKAN_HPP_NOEXCEPT { - maxVpsStdCount = maxVpsStdCount_; + naluSliceEntryCount = naluSliceEntryCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & + setPNaluSliceEntries( const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR * pNaluSliceEntries_ ) VULKAN_HPP_NOEXCEPT { - maxSpsStdCount = maxSpsStdCount_; + pNaluSliceEntries = pNaluSliceEntries_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264PictureInfoKHR & setNaluSliceEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & naluSliceEntries_ ) VULKAN_HPP_NOEXCEPT { - maxPpsStdCount = maxPpsStdCount_; + naluSliceEntryCount = static_cast( naluSliceEntries_.size() ); + pNaluSliceEntries = naluSliceEntries_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 VideoDecodeH265SessionParametersCreateInfoEXT & - setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setPStdPictureInfo( const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT { - pParametersAddInfo = pParametersAddInfo_; + pStdPictureInfo = pStdPictureInfo_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVideoDecodeH265SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264PictureInfoKHR & setGeneratePrefixNalu( VULKAN_HPP_NAMESPACE::Bool32 generatePrefixNalu_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + generatePrefixNalu = generatePrefixNalu_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoDecodeH265SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkVideoEncodeH264PictureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR * const &, + const StdVideoEncodeH264PictureInfo * const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxVpsStdCount, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo ); + return std::tie( sType, pNext, naluSliceEntryCount, pNaluSliceEntries, pStdPictureInfo, generatePrefixNalu ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoDecodeH265SessionParametersCreateInfoEXT const & ) const = default; -# else - bool operator==( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264PictureInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVpsStdCount == rhs.maxVpsStdCount ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) && - ( maxPpsStdCount == rhs.maxPpsStdCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( naluSliceEntryCount == rhs.naluSliceEntryCount ) && + ( pNaluSliceEntries == rhs.pNaluSliceEntries ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && ( generatePrefixNalu == rhs.generatePrefixNalu ); +# endif } - bool operator!=( VideoDecodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH264PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT; - const void * pNext = {}; - uint32_t maxVpsStdCount = {}; - uint32_t maxSpsStdCount = {}; - uint32_t maxPpsStdCount = {}; - const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoEXT * pParametersAddInfo = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264PictureInfoKHR; + const void * pNext = {}; + uint32_t naluSliceEntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoKHR * pNaluSliceEntries = {}; + const StdVideoEncodeH264PictureInfo * pStdPictureInfo = {}; + VULKAN_HPP_NAMESPACE::Bool32 generatePrefixNalu = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoDecodeH265SessionParametersCreateInfoEXT; + using Type = VideoEncodeH264PictureInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoDecodeInfoKHR + struct VideoEncodeH264ProfileInfoKHR { - using NativeType = VkVideoDecodeInfoKHR; + using NativeType = VkVideoEncodeH264ProfileInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ProfileInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ = {}, - VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource_ = {}, - const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ = {}, - uint32_t referenceSlotCount_ = {}, - const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , srcBuffer( srcBuffer_ ) - , srcBufferOffset( srcBufferOffset_ ) - , srcBufferRange( srcBufferRange_ ) - , dstPictureResource( dstPictureResource_ ) - , pSetupReferenceSlot( pSetupReferenceSlot_ ) - , referenceSlotCount( referenceSlotCount_ ) - , pReferenceSlots( pReferenceSlots_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileInfoKHR( StdVideoH264ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdProfileIdc{ stdProfileIdc_ } { } - VULKAN_HPP_CONSTEXPR VideoDecodeInfoKHR( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - VideoDecodeInfoKHR( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoDecodeInfoKHR( *reinterpret_cast( &rhs ) ) {} + VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileInfoKHR( VideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoDecodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_, - VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, - VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_, - VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_, - VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource_, - const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , srcBuffer( srcBuffer_ ) - , srcBufferOffset( srcBufferOffset_ ) - , srcBufferRange( srcBufferRange_ ) - , dstPictureResource( dstPictureResource_ ) - , pSetupReferenceSlot( pSetupReferenceSlot_ ) - , referenceSlotCount( static_cast( referenceSlots_.size() ) ) - , pReferenceSlots( referenceSlots_.data() ) + VideoEncodeH264ProfileInfoKHR( VkVideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264ProfileInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoDecodeInfoKHR & operator=( VideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH264ProfileInfoKHR & operator=( VideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoDecodeInfoKHR & operator=( VkVideoDecodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264ProfileInfoKHR & operator=( VkVideoEncodeH264ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT - { - flags = flags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT - { - srcBuffer = srcBuffer_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset_ ) VULKAN_HPP_NOEXCEPT - { - srcBufferOffset = srcBufferOffset_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setSrcBufferRange( VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange_ ) VULKAN_HPP_NOEXCEPT - { - srcBufferRange = srcBufferRange_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & - setDstPictureResource( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & dstPictureResource_ ) VULKAN_HPP_NOEXCEPT - { - dstPictureResource = dstPictureResource_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & - setPSetupReferenceSlot( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT - { - pSetupReferenceSlot = pSetupReferenceSlot_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileInfoKHR & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT { - referenceSlotCount = referenceSlotCount_; + stdProfileIdc = stdProfileIdc_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 VideoDecodeInfoKHR & - setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - pReferenceSlots = pReferenceSlots_; - return *this; + return *reinterpret_cast( this ); } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoDecodeInfoKHR & setReferenceSlots( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT { - referenceSlotCount = static_cast( referenceSlots_.size() ); - pReferenceSlots = referenceSlots_.data(); - return *this; + return *reinterpret_cast( this ); } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVideoDecodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return std::tie( sType, pNext, stdProfileIdc ); } +#endif - operator VkVideoDecodeInfoKHR &() VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); - } + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( - sType, pNext, flags, srcBuffer, srcBufferOffset, srcBufferRange, dstPictureResource, pSetupReferenceSlot, referenceSlotCount, pReferenceSlots ); + return std::strong_ordering::equivalent; } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoDecodeInfoKHR const & ) const = default; -# else - bool operator==( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoEncodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcBuffer == rhs.srcBuffer ) && - ( srcBufferOffset == rhs.srcBufferOffset ) && ( srcBufferRange == rhs.srcBufferRange ) && ( dstPictureResource == rhs.dstPictureResource ) && - ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && ( referenceSlotCount == rhs.referenceSlotCount ) && - ( pReferenceSlots == rhs.pReferenceSlots ); -# endif + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ); } - bool operator!=( VideoDecodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH264ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize srcBufferOffset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize srcBufferRange = {}; - VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR dstPictureResource = {}; - const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot = {}; - uint32_t referenceSlotCount = {}; - const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ProfileInfoKHR; + const void * pNext = {}; + StdVideoH264ProfileIdc stdProfileIdc = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoDecodeInfoKHR; + using Type = VideoEncodeH264ProfileInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoDecodeUsageInfoKHR + struct VideoEncodeH264QpKHR { - using NativeType = VkVideoDecodeUsageInfoKHR; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoDecodeUsageInfoKHR; + using NativeType = VkVideoEncodeH264QpKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoDecodeUsageInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , videoUsageHints( videoUsageHints_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264QpKHR( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT + : qpI{ qpI_ } + , qpP{ qpP_ } + , qpB{ qpB_ } { } - VULKAN_HPP_CONSTEXPR VideoDecodeUsageInfoKHR( VideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH264QpKHR( VideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoDecodeUsageInfoKHR( VkVideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoDecodeUsageInfoKHR( *reinterpret_cast( &rhs ) ) + VideoEncodeH264QpKHR( VkVideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264QpKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoDecodeUsageInfoKHR & operator=( VideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH264QpKHR & operator=( VideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoDecodeUsageInfoKHR & operator=( VkVideoDecodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264QpKHR & operator=( VkVideoEncodeH264QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoDecodeUsageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpKHR & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + qpI = qpI_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoDecodeUsageInfoKHR & setVideoUsageHints( VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpKHR & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT { - videoUsageHints = videoUsageHints_; + qpP = qpP_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVideoDecodeUsageInfoKHR const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpKHR & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + qpB = qpB_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoDecodeUsageInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264QpKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkVideoEncodeH264QpKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, videoUsageHints ); + return std::tie( qpI, qpP, qpB ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoDecodeUsageInfoKHR const & ) const = default; -# else - bool operator==( VideoDecodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264QpKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoUsageHints == rhs.videoUsageHints ); -# endif +# else + return ( qpI == rhs.qpI ) && ( qpP == rhs.qpP ) && ( qpB == rhs.qpB ); +# endif } - bool operator!=( VideoDecodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH264QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoDecodeUsageInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints = {}; - }; - - template <> - struct CppType - { - using Type = VideoDecodeUsageInfoKHR; + int32_t qpI = {}; + int32_t qpP = {}; + int32_t qpB = {}; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeCapabilitiesKHR + struct VideoEncodeH264QualityLevelPropertiesKHR { - using NativeType = VkVideoEncodeCapabilitiesKHR; + using NativeType = VkVideoEncodeH264QualityLevelPropertiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeCapabilitiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264QualityLevelPropertiesKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes_ = {}, - uint8_t rateControlLayerCount_ = {}, - uint8_t qualityLevelCount_ = {}, - VULKAN_HPP_NAMESPACE::Extent2D inputImageDataFillAlignment_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , rateControlModes( rateControlModes_ ) - , rateControlLayerCount( rateControlLayerCount_ ) - , qualityLevelCount( qualityLevelCount_ ) - , inputImageDataFillAlignment( inputImageDataFillAlignment_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264QualityLevelPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR preferredRateControlFlags_ = {}, + uint32_t preferredGopFrameCount_ = {}, + uint32_t preferredIdrPeriod_ = {}, + uint32_t preferredConsecutiveBFrameCount_ = {}, + uint32_t preferredTemporalLayerCount_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR preferredConstantQp_ = {}, + uint32_t preferredMaxL0ReferenceCount_ = {}, + uint32_t preferredMaxL1ReferenceCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 preferredStdEntropyCodingModeFlag_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , preferredRateControlFlags{ preferredRateControlFlags_ } + , preferredGopFrameCount{ preferredGopFrameCount_ } + , preferredIdrPeriod{ preferredIdrPeriod_ } + , preferredConsecutiveBFrameCount{ preferredConsecutiveBFrameCount_ } + , preferredTemporalLayerCount{ preferredTemporalLayerCount_ } + , preferredConstantQp{ preferredConstantQp_ } + , preferredMaxL0ReferenceCount{ preferredMaxL0ReferenceCount_ } + , preferredMaxL1ReferenceCount{ preferredMaxL1ReferenceCount_ } + , preferredStdEntropyCodingModeFlag{ preferredStdEntropyCodingModeFlag_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeCapabilitiesKHR( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH264QualityLevelPropertiesKHR( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeCapabilitiesKHR( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + VideoEncodeH264QualityLevelPropertiesKHR( VkVideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264QualityLevelPropertiesKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeCapabilitiesKHR & operator=( VideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH264QualityLevelPropertiesKHR & operator=( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeCapabilitiesKHR & operator=( VkVideoEncodeCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264QualityLevelPropertiesKHR & operator=( VkVideoEncodeH264QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkVideoEncodeCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264QualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264QualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, rateControlModes, rateControlLayerCount, qualityLevelCount, inputImageDataFillAlignment ); + return std::tie( sType, + pNext, + preferredRateControlFlags, + preferredGopFrameCount, + preferredIdrPeriod, + preferredConsecutiveBFrameCount, + preferredTemporalLayerCount, + preferredConstantQp, + preferredMaxL0ReferenceCount, + preferredMaxL1ReferenceCount, + preferredStdEntropyCodingModeFlag ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeCapabilitiesKHR const & ) const = default; -# else - bool operator==( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264QualityLevelPropertiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rateControlModes == rhs.rateControlModes ) && - ( rateControlLayerCount == rhs.rateControlLayerCount ) && ( qualityLevelCount == rhs.qualityLevelCount ) && - ( inputImageDataFillAlignment == rhs.inputImageDataFillAlignment ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredRateControlFlags == rhs.preferredRateControlFlags ) && + ( preferredGopFrameCount == rhs.preferredGopFrameCount ) && ( preferredIdrPeriod == rhs.preferredIdrPeriod ) && + ( preferredConsecutiveBFrameCount == rhs.preferredConsecutiveBFrameCount ) && ( preferredTemporalLayerCount == rhs.preferredTemporalLayerCount ) && + ( preferredConstantQp == rhs.preferredConstantQp ) && ( preferredMaxL0ReferenceCount == rhs.preferredMaxL0ReferenceCount ) && + ( preferredMaxL1ReferenceCount == rhs.preferredMaxL1ReferenceCount ) && + ( preferredStdEntropyCodingModeFlag == rhs.preferredStdEntropyCodingModeFlag ); +# endif } - bool operator!=( VideoEncodeCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH264QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeCapabilitiesKHR; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR rateControlModes = {}; - uint8_t rateControlLayerCount = {}; - uint8_t qualityLevelCount = {}; - VULKAN_HPP_NAMESPACE::Extent2D inputImageDataFillAlignment = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264QualityLevelPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR preferredRateControlFlags = {}; + uint32_t preferredGopFrameCount = {}; + uint32_t preferredIdrPeriod = {}; + uint32_t preferredConsecutiveBFrameCount = {}; + uint32_t preferredTemporalLayerCount = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR preferredConstantQp = {}; + uint32_t preferredMaxL0ReferenceCount = {}; + uint32_t preferredMaxL1ReferenceCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 preferredStdEntropyCodingModeFlag = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeCapabilitiesKHR; + using Type = VideoEncodeH264QualityLevelPropertiesKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH264CapabilitiesEXT + struct VideoEncodeH264QuantizationMapCapabilitiesKHR { - using NativeType = VkVideoEncodeH264CapabilitiesEXT; + using NativeType = VkVideoEncodeH264QuantizationMapCapabilitiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264CapabilitiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264QuantizationMapCapabilitiesKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesEXT( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags_ = {}, - uint8_t maxPPictureL0ReferenceCount_ = {}, - uint8_t maxBPictureL0ReferenceCount_ = {}, - uint8_t maxL1ReferenceCount_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 motionVectorsOverPicBoundariesFlag_ = {}, - uint32_t maxBytesPerPicDenom_ = {}, - uint32_t maxBitsPerMbDenom_ = {}, - uint32_t log2MaxMvLengthHorizontal_ = {}, - uint32_t log2MaxMvLengthVertical_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , inputModeFlags( inputModeFlags_ ) - , outputModeFlags( outputModeFlags_ ) - , maxPPictureL0ReferenceCount( maxPPictureL0ReferenceCount_ ) - , maxBPictureL0ReferenceCount( maxBPictureL0ReferenceCount_ ) - , maxL1ReferenceCount( maxL1ReferenceCount_ ) - , motionVectorsOverPicBoundariesFlag( motionVectorsOverPicBoundariesFlag_ ) - , maxBytesPerPicDenom( maxBytesPerPicDenom_ ) - , maxBitsPerMbDenom( maxBitsPerMbDenom_ ) - , log2MaxMvLengthHorizontal( log2MaxMvLengthHorizontal_ ) - , log2MaxMvLengthVertical( log2MaxMvLengthVertical_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH264QuantizationMapCapabilitiesKHR( int32_t minQpDelta_ = {}, int32_t maxQpDelta_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minQpDelta{ minQpDelta_ } + , maxQpDelta{ maxQpDelta_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH264CapabilitiesEXT( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + VideoEncodeH264QuantizationMapCapabilitiesKHR( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH264CapabilitiesEXT( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH264CapabilitiesEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH264QuantizationMapCapabilitiesKHR( VkVideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264QuantizationMapCapabilitiesKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH264CapabilitiesEXT & operator=( VideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH264QuantizationMapCapabilitiesKHR & operator=( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH264CapabilitiesEXT & operator=( VkVideoEncodeH264CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264QuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkVideoEncodeH264CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264QuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH264CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264QuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - flags, - inputModeFlags, - outputModeFlags, - maxPPictureL0ReferenceCount, - maxBPictureL0ReferenceCount, - maxL1ReferenceCount, - motionVectorsOverPicBoundariesFlag, - maxBytesPerPicDenom, - maxBitsPerMbDenom, - log2MaxMvLengthHorizontal, - log2MaxMvLengthVertical ); + return std::tie( sType, pNext, minQpDelta, maxQpDelta ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH264CapabilitiesEXT const & ) const = default; -# else - bool operator==( VideoEncodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264QuantizationMapCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( inputModeFlags == rhs.inputModeFlags ) && - ( outputModeFlags == rhs.outputModeFlags ) && ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount ) && - ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount ) && ( maxL1ReferenceCount == rhs.maxL1ReferenceCount ) && - ( motionVectorsOverPicBoundariesFlag == rhs.motionVectorsOverPicBoundariesFlag ) && ( maxBytesPerPicDenom == rhs.maxBytesPerPicDenom ) && - ( maxBitsPerMbDenom == rhs.maxBitsPerMbDenom ) && ( log2MaxMvLengthHorizontal == rhs.log2MaxMvLengthHorizontal ) && - ( log2MaxMvLengthVertical == rhs.log2MaxMvLengthVertical ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minQpDelta == rhs.minQpDelta ) && ( maxQpDelta == rhs.maxQpDelta ); +# endif } - bool operator!=( VideoEncodeH264CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264CapabilitiesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH264InputModeFlagsEXT inputModeFlags = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH264OutputModeFlagsEXT outputModeFlags = {}; - uint8_t maxPPictureL0ReferenceCount = {}; - uint8_t maxBPictureL0ReferenceCount = {}; - uint8_t maxL1ReferenceCount = {}; - VULKAN_HPP_NAMESPACE::Bool32 motionVectorsOverPicBoundariesFlag = {}; - uint32_t maxBytesPerPicDenom = {}; - uint32_t maxBitsPerMbDenom = {}; - uint32_t log2MaxMvLengthHorizontal = {}; - uint32_t log2MaxMvLengthVertical = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264QuantizationMapCapabilitiesKHR; + void * pNext = {}; + int32_t minQpDelta = {}; + int32_t maxQpDelta = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH264CapabilitiesEXT; + using Type = VideoEncodeH264QuantizationMapCapabilitiesKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH264DpbSlotInfoEXT + struct VideoEncodeH264RateControlInfoKHR { - using NativeType = VkVideoEncodeH264DpbSlotInfoEXT; + using NativeType = VkVideoEncodeH264RateControlInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264DpbSlotInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoEXT( int8_t slotIndex_ = {}, - const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , slotIndex( slotIndex_ ) - , pStdReferenceInfo( pStdReferenceInfo_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR flags_ = {}, + uint32_t gopFrameCount_ = {}, + uint32_t idrPeriod_ = {}, + uint32_t consecutiveBFrameCount_ = {}, + uint32_t temporalLayerCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , gopFrameCount{ gopFrameCount_ } + , idrPeriod{ idrPeriod_ } + , consecutiveBFrameCount{ consecutiveBFrameCount_ } + , temporalLayerCount{ temporalLayerCount_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoEXT( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoKHR( VideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH264DpbSlotInfoEXT( VkVideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH264DpbSlotInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH264RateControlInfoKHR( VkVideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264RateControlInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH264DpbSlotInfoEXT & operator=( VideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH264RateControlInfoKHR & operator=( VideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH264DpbSlotInfoEXT & operator=( VkVideoEncodeH264DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264RateControlInfoKHR & operator=( VkVideoEncodeH264RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { - slotIndex = slotIndex_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264DpbSlotInfoEXT & - setPStdReferenceInfo( const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT { - pStdReferenceInfo = pStdReferenceInfo_; + gopFrameCount = gopFrameCount_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVideoEncodeH264DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + idrPeriod = idrPeriod_; + return *this; } - operator VkVideoEncodeH264DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + consecutiveBFrameCount = consecutiveBFrameCount_; + return *this; } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoKHR & setTemporalLayerCount( uint32_t temporalLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + temporalLayerCount = temporalLayerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH264RateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264RateControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, slotIndex, pStdReferenceInfo ); + return std::tie( sType, pNext, flags, gopFrameCount, idrPeriod, consecutiveBFrameCount, temporalLayerCount ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH264DpbSlotInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264RateControlInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( gopFrameCount == rhs.gopFrameCount ) && + ( idrPeriod == rhs.idrPeriod ) && ( consecutiveBFrameCount == rhs.consecutiveBFrameCount ) && ( temporalLayerCount == rhs.temporalLayerCount ); +# endif } - bool operator!=( VideoEncodeH264DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH264RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264DpbSlotInfoEXT; - const void * pNext = {}; - int8_t slotIndex = {}; - const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsKHR flags = {}; + uint32_t gopFrameCount = {}; + uint32_t idrPeriod = {}; + uint32_t consecutiveBFrameCount = {}; + uint32_t temporalLayerCount = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH264DpbSlotInfoEXT; + using Type = VideoEncodeH264RateControlInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH264EmitPictureParametersInfoEXT + struct VideoEncodeH264RateControlLayerInfoKHR { - using NativeType = VkVideoEncodeH264EmitPictureParametersInfoEXT; + using NativeType = VkVideoEncodeH264RateControlLayerInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264EmitPictureParametersInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlLayerInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH264EmitPictureParametersInfoEXT( uint8_t spsId_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ = {}, - uint32_t ppsIdEntryCount_ = {}, - const uint8_t * ppsIdEntries_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , spsId( spsId_ ) - , emitSpsEnable( emitSpsEnable_ ) - , ppsIdEntryCount( ppsIdEntryCount_ ) - , ppsIdEntries( ppsIdEntries_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR minQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR maxQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR maxFrameSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useMinQp{ useMinQp_ } + , minQp{ minQp_ } + , useMaxQp{ useMaxQp_ } + , maxQp{ maxQp_ } + , useMaxFrameSize{ useMaxFrameSize_ } + , maxFrameSize{ maxFrameSize_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH264EmitPictureParametersInfoEXT( VideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - VideoEncodeH264EmitPictureParametersInfoEXT( VkVideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH264EmitPictureParametersInfoEXT( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoKHR( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH264EmitPictureParametersInfoEXT( uint8_t spsId_, - VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & psIdEntries_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , spsId( spsId_ ) - , emitSpsEnable( emitSpsEnable_ ) - , ppsIdEntryCount( static_cast( psIdEntries_.size() ) ) - , ppsIdEntries( psIdEntries_.data() ) + VideoEncodeH264RateControlLayerInfoKHR( VkVideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264RateControlLayerInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH264EmitPictureParametersInfoEXT & operator=( VideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH264RateControlLayerInfoKHR & operator=( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH264EmitPictureParametersInfoEXT & operator=( VkVideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264RateControlLayerInfoKHR & operator=( VkVideoEncodeH264RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersInfoEXT & setSpsId( uint8_t spsId_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT { - spsId = spsId_; + useMinQp = useMinQp_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersInfoEXT & setEmitSpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const & minQp_ ) VULKAN_HPP_NOEXCEPT { - emitSpsEnable = emitSpsEnable_; + minQp = minQp_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersInfoEXT & setPpsIdEntryCount( uint32_t ppsIdEntryCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT { - ppsIdEntryCount = ppsIdEntryCount_; + useMaxQp = useMaxQp_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264EmitPictureParametersInfoEXT & setPpsIdEntries( const uint8_t * ppsIdEntries_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const & maxQp_ ) VULKAN_HPP_NOEXCEPT { - ppsIdEntries = ppsIdEntries_; + maxQp = maxQp_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH264EmitPictureParametersInfoEXT & - setPsIdEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & psIdEntries_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT { - ppsIdEntryCount = static_cast( psIdEntries_.size() ); - ppsIdEntries = psIdEntries_.data(); + useMaxFrameSize = useMaxFrameSize_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVideoEncodeH264EmitPictureParametersInfoEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoKHR & + setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + maxFrameSize = maxFrameSize_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoEncodeH264EmitPictureParametersInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264RateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkVideoEncodeH264RateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, spsId, emitSpsEnable, ppsIdEntryCount, ppsIdEntries ); + return std::tie( sType, pNext, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH264EmitPictureParametersInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264RateControlLayerInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsId == rhs.spsId ) && ( emitSpsEnable == rhs.emitSpsEnable ) && - ( ppsIdEntryCount == rhs.ppsIdEntryCount ) && ( ppsIdEntries == rhs.ppsIdEntries ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMinQp == rhs.useMinQp ) && ( minQp == rhs.minQp ) && ( useMaxQp == rhs.useMaxQp ) && + ( maxQp == rhs.maxQp ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) && ( maxFrameSize == rhs.maxFrameSize ); +# endif } - bool operator!=( VideoEncodeH264EmitPictureParametersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH264RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264EmitPictureParametersInfoEXT; - const void * pNext = {}; - uint8_t spsId = {}; - VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable = {}; - uint32_t ppsIdEntryCount = {}; - const uint8_t * ppsIdEntries = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlLayerInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR minQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264QpKHR maxQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR maxFrameSize = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH264EmitPictureParametersInfoEXT; + using Type = VideoEncodeH264RateControlLayerInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH264FrameSizeEXT + struct VideoEncodeH264SessionCreateInfoKHR { - using NativeType = VkVideoEncodeH264FrameSizeEXT; + using NativeType = VkVideoEncodeH264SessionCreateInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeEXT( uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT - : frameISize( frameISize_ ) - , framePSize( framePSize_ ) - , frameBSize( frameBSize_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionCreateInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ = {}, + StdVideoH264LevelIdc maxLevelIdc_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useMaxLevelIdc{ useMaxLevelIdc_ } + , maxLevelIdc{ maxLevelIdc_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeEXT( VideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionCreateInfoKHR( VideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH264FrameSizeEXT( VkVideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH264FrameSizeEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH264SessionCreateInfoKHR( VkVideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH264FrameSizeEXT & operator=( VideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH264SessionCreateInfoKHR & operator=( VideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH264FrameSizeEXT & operator=( VkVideoEncodeH264FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264SessionCreateInfoKHR & operator=( VkVideoEncodeH264SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeEXT & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - frameISize = frameISize_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeEXT & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoKHR & setUseMaxLevelIdc( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ ) VULKAN_HPP_NOEXCEPT { - framePSize = framePSize_; + useMaxLevelIdc = useMaxLevelIdc_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264FrameSizeEXT & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionCreateInfoKHR & setMaxLevelIdc( StdVideoH264LevelIdc maxLevelIdc_ ) VULKAN_HPP_NOEXCEPT { - frameBSize = frameBSize_; + maxLevelIdc = maxLevelIdc_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoEncodeH264FrameSizeEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264SessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH264FrameSizeEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264SessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( frameISize, framePSize, frameBSize ); + return std::tie( sType, pNext, useMaxLevelIdc, maxLevelIdc ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH264FrameSizeEXT const & ) const = default; -# else - bool operator==( VideoEncodeH264FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH264SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( frameISize == rhs.frameISize ) && ( framePSize == rhs.framePSize ) && ( frameBSize == rhs.frameBSize ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = useMaxLevelIdc <=> rhs.useMaxLevelIdc; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeH264SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMaxLevelIdc == rhs.useMaxLevelIdc ) && + ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH264LevelIdc ) ) == 0 ); } - bool operator!=( VideoEncodeH264FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH264SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif public: - uint32_t frameISize = {}; - uint32_t framePSize = {}; - uint32_t frameBSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc = {}; + StdVideoH264LevelIdc maxLevelIdc = {}; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH264ReferenceListsInfoEXT + template <> + struct CppType + { + using Type = VideoEncodeH264SessionCreateInfoKHR; + }; + + struct VideoEncodeH264SessionParametersAddInfoKHR { - using NativeType = VkVideoEncodeH264ReferenceListsInfoEXT; + using NativeType = VkVideoEncodeH264SessionParametersAddInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ReferenceListsInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersAddInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH264ReferenceListsInfoEXT( uint8_t referenceList0EntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList0Entries_ = {}, - uint8_t referenceList1EntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList1Entries_ = {}, - const StdVideoEncodeH264RefMemMgmtCtrlOperations * pMemMgmtCtrlOperations_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , referenceList0EntryCount( referenceList0EntryCount_ ) - , pReferenceList0Entries( pReferenceList0Entries_ ) - , referenceList1EntryCount( referenceList1EntryCount_ ) - , pReferenceList1Entries( pReferenceList1Entries_ ) - , pMemMgmtCtrlOperations( pMemMgmtCtrlOperations_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoKHR( uint32_t stdSPSCount_ = {}, + const StdVideoH264SequenceParameterSet * pStdSPSs_ = {}, + uint32_t stdPPSCount_ = {}, + const StdVideoH264PictureParameterSet * pStdPPSs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdSPSCount{ stdSPSCount_ } + , pStdSPSs{ pStdSPSs_ } + , stdPPSCount{ stdPPSCount_ } + , pStdPPSs{ pStdPPSs_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH264ReferenceListsInfoEXT( VideoEncodeH264ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoKHR( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH264ReferenceListsInfoEXT( VkVideoEncodeH264ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH264ReferenceListsInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH264SessionParametersAddInfoKHR( VkVideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersAddInfoKHR( *reinterpret_cast( &rhs ) ) { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH264ReferenceListsInfoEXT( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceList0Entries_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceList1Entries_ = {}, - const StdVideoEncodeH264RefMemMgmtCtrlOperations * pMemMgmtCtrlOperations_ = {}, - const void * pNext_ = nullptr ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , referenceList0EntryCount( static_cast( referenceList0Entries_.size() ) ) - , pReferenceList0Entries( referenceList0Entries_.data() ) - , referenceList1EntryCount( static_cast( referenceList1Entries_.size() ) ) - , pReferenceList1Entries( referenceList1Entries_.data() ) - , pMemMgmtCtrlOperations( pMemMgmtCtrlOperations_ ) + , stdSPSCount( static_cast( stdSPSs_.size() ) ) + , pStdSPSs( stdSPSs_.data() ) + , stdPPSCount( static_cast( stdPPSs_.size() ) ) + , pStdPPSs( stdPPSs_.data() ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VideoEncodeH264ReferenceListsInfoEXT & operator=( VideoEncodeH264ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH264SessionParametersAddInfoKHR & operator=( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH264ReferenceListsInfoEXT & operator=( VkVideoEncodeH264ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264SessionParametersAddInfoKHR & operator=( VkVideoEncodeH264SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT & setReferenceList0EntryCount( uint8_t referenceList0EntryCount_ ) VULKAN_HPP_NOEXCEPT - { - referenceList0EntryCount = referenceList0EntryCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT & - setPReferenceList0Entries( const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList0Entries_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT { - pReferenceList0Entries = pReferenceList0Entries_; + stdSPSCount = stdSPSCount_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH264ReferenceListsInfoEXT & setReferenceList0Entries( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceList0Entries_ ) - VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH264SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT { - referenceList0EntryCount = static_cast( referenceList0Entries_.size() ); - pReferenceList0Entries = referenceList0Entries_.data(); + pStdSPSs = pStdSPSs_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT & setReferenceList1EntryCount( uint8_t referenceList1EntryCount_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoKHR & + setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT { - referenceList1EntryCount = referenceList1EntryCount_; + stdSPSCount = static_cast( stdSPSs_.size() ); + pStdSPSs = stdSPSs_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT & - setPReferenceList1Entries( const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList1Entries_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT { - pReferenceList1Entries = pReferenceList1Entries_; + stdPPSCount = stdPPSCount_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH264ReferenceListsInfoEXT & setReferenceList1Entries( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceList1Entries_ ) - VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH264PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT { - referenceList1EntryCount = static_cast( referenceList1Entries_.size() ); - pReferenceList1Entries = referenceList1Entries_.data(); + pStdPPSs = pStdPPSs_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ReferenceListsInfoEXT & - setPMemMgmtCtrlOperations( const StdVideoEncodeH264RefMemMgmtCtrlOperations * pMemMgmtCtrlOperations_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH264SessionParametersAddInfoKHR & + setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT { - pMemMgmtCtrlOperations = pMemMgmtCtrlOperations_; + stdPPSCount = static_cast( stdPPSs_.size() ); + pStdPPSs = stdPPSs_.data(); return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoEncodeH264ReferenceListsInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH264ReferenceListsInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + uint32_t const &, + const StdVideoH264SequenceParameterSet * const &, + uint32_t const &, + const StdVideoH264PictureParameterSet * const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( - sType, pNext, referenceList0EntryCount, pReferenceList0Entries, referenceList1EntryCount, pReferenceList1Entries, pMemMgmtCtrlOperations ); + return std::tie( sType, pNext, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH264ReferenceListsInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH264ReferenceListsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersAddInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( referenceList0EntryCount == rhs.referenceList0EntryCount ) && - ( pReferenceList0Entries == rhs.pReferenceList0Entries ) && ( referenceList1EntryCount == rhs.referenceList1EntryCount ) && - ( pReferenceList1Entries == rhs.pReferenceList1Entries ) && ( pMemMgmtCtrlOperations == rhs.pMemMgmtCtrlOperations ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) && + ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs ); +# endif } - bool operator!=( VideoEncodeH264ReferenceListsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH264SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ReferenceListsInfoEXT; - const void * pNext = {}; - uint8_t referenceList0EntryCount = {}; - const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList0Entries = {}; - uint8_t referenceList1EntryCount = {}; - const VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT * pReferenceList1Entries = {}; - const StdVideoEncodeH264RefMemMgmtCtrlOperations * pMemMgmtCtrlOperations = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersAddInfoKHR; + const void * pNext = {}; + uint32_t stdSPSCount = {}; + const StdVideoH264SequenceParameterSet * pStdSPSs = {}; + uint32_t stdPPSCount = {}; + const StdVideoH264PictureParameterSet * pStdPPSs = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH264ReferenceListsInfoEXT; + using Type = VideoEncodeH264SessionParametersAddInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH264NaluSliceInfoEXT + struct VideoEncodeH264SessionParametersCreateInfoKHR { - using NativeType = VkVideoEncodeH264NaluSliceInfoEXT; + using NativeType = VkVideoEncodeH264SessionParametersCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264NaluSliceInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersCreateInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceInfoEXT( uint32_t mbCount_ = {}, - const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists_ = {}, - const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , mbCount( mbCount_ ) - , pReferenceFinalLists( pReferenceFinalLists_ ) - , pSliceHeaderStd( pSliceHeaderStd_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH264SessionParametersCreateInfoKHR( uint32_t maxStdSPSCount_ = {}, + uint32_t maxStdPPSCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxStdSPSCount{ maxStdSPSCount_ } + , maxStdPPSCount{ maxStdPPSCount_ } + , pParametersAddInfo{ pParametersAddInfo_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceInfoEXT( VideoEncodeH264NaluSliceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + VideoEncodeH264SessionParametersCreateInfoKHR( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH264NaluSliceInfoEXT( VkVideoEncodeH264NaluSliceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH264NaluSliceInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH264SessionParametersCreateInfoKHR( VkVideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH264NaluSliceInfoEXT & operator=( VideoEncodeH264NaluSliceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH264SessionParametersCreateInfoKHR & operator=( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH264NaluSliceInfoEXT & operator=( VkVideoEncodeH264NaluSliceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264SessionParametersCreateInfoKHR & operator=( VkVideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoEXT & setMbCount( uint32_t mbCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT { - mbCount = mbCount_; + maxStdSPSCount = maxStdSPSCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoEXT & - setPReferenceFinalLists( const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT { - pReferenceFinalLists = pReferenceFinalLists_; + maxStdPPSCount = maxStdPPSCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264NaluSliceInfoEXT & setPSliceHeaderStd( const StdVideoEncodeH264SliceHeader * pSliceHeaderStd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoKHR & + setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT { - pSliceHeaderStd = pSliceHeaderStd_; + pParametersAddInfo = pParametersAddInfo_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoEncodeH264NaluSliceInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH264NaluSliceInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + uint32_t const &, + const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR * const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, mbCount, pReferenceFinalLists, pSliceHeaderStd ); + return std::tie( sType, pNext, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH264NaluSliceInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH264NaluSliceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mbCount == rhs.mbCount ) && ( pReferenceFinalLists == rhs.pReferenceFinalLists ) && - ( pSliceHeaderStd == rhs.pSliceHeaderStd ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) && ( maxStdPPSCount == rhs.maxStdPPSCount ) && + ( pParametersAddInfo == rhs.pParametersAddInfo ); +# endif } - bool operator!=( VideoEncodeH264NaluSliceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH264SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264NaluSliceInfoEXT; - const void * pNext = {}; - uint32_t mbCount = {}; - const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists = {}; - const StdVideoEncodeH264SliceHeader * pSliceHeaderStd = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersCreateInfoKHR; + const void * pNext = {}; + uint32_t maxStdSPSCount = {}; + uint32_t maxStdPPSCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR * pParametersAddInfo = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH264NaluSliceInfoEXT; + using Type = VideoEncodeH264SessionParametersCreateInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH264ProfileInfoEXT + struct VideoEncodeH264SessionParametersFeedbackInfoKHR { - using NativeType = VkVideoEncodeH264ProfileInfoEXT; + using NativeType = VkVideoEncodeH264SessionParametersFeedbackInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264ProfileInfoEXT; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileInfoEXT( StdVideoH264ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stdProfileIdc( stdProfileIdc_ ) - { - } + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersFeedbackInfoKHR; - VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileInfoEXT( VideoEncodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - VideoEncodeH264ProfileInfoEXT( VkVideoEncodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH264ProfileInfoEXT( *reinterpret_cast( &rhs ) ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersFeedbackInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hasStdSPSOverrides{ hasStdSPSOverrides_ } + , hasStdPPSOverrides{ hasStdPPSOverrides_ } { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH264ProfileInfoEXT & operator=( VideoEncodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + VideoEncodeH264SessionParametersFeedbackInfoKHR( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH264ProfileInfoEXT & operator=( VkVideoEncodeH264ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264SessionParametersFeedbackInfoKHR( VkVideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersFeedbackInfoKHR( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); - return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + VideoEncodeH264SessionParametersFeedbackInfoKHR & operator=( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264ProfileInfoEXT & setStdProfileIdc( StdVideoH264ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264SessionParametersFeedbackInfoKHR & operator=( VkVideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - stdProfileIdc = stdProfileIdc_; + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVideoEncodeH264ProfileInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264SessionParametersFeedbackInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH264ProfileInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH264SessionParametersFeedbackInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, stdProfileIdc ); + return std::tie( sType, pNext, hasStdSPSOverrides, hasStdPPSOverrides ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( VideoEncodeH264ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersFeedbackInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) - return cmp; - if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) - return cmp; - if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ); cmp != 0 ) - return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - - return std::strong_ordering::equivalent; - } +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasStdSPSOverrides == rhs.hasStdSPSOverrides ) && + ( hasStdPPSOverrides == rhs.hasStdPPSOverrides ); # endif - - bool operator==( VideoEncodeH264ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH264ProfileIdc ) ) == 0 ); } - bool operator!=( VideoEncodeH264ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH264SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264ProfileInfoEXT; - const void * pNext = {}; - StdVideoH264ProfileIdc stdProfileIdc = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersFeedbackInfoKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH264ProfileInfoEXT; + using Type = VideoEncodeH264SessionParametersFeedbackInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH264QpEXT + struct VideoEncodeH264SessionParametersGetInfoKHR { - using NativeType = VkVideoEncodeH264QpEXT; + using NativeType = VkVideoEncodeH264SessionParametersGetInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersGetInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH264QpEXT( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT - : qpI( qpI_ ) - , qpP( qpP_ ) - , qpB( qpB_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersGetInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS_ = {}, + uint32_t stdSPSId_ = {}, + uint32_t stdPPSId_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , writeStdSPS{ writeStdSPS_ } + , writeStdPPS{ writeStdPPS_ } + , stdSPSId{ stdSPSId_ } + , stdPPSId{ stdPPSId_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH264QpEXT( VideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersGetInfoKHR( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH264QpEXT( VkVideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH264QpEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH264SessionParametersGetInfoKHR( VkVideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264SessionParametersGetInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH264QpEXT & operator=( VideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH264SessionParametersGetInfoKHR & operator=( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH264QpEXT & operator=( VkVideoEncodeH264QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH264SessionParametersGetInfoKHR & operator=( VkVideoEncodeH264SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpEXT & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - qpI = qpI_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpEXT & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setWriteStdSPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS_ ) VULKAN_HPP_NOEXCEPT { - qpP = qpP_; + writeStdSPS = writeStdSPS_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264QpEXT & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setWriteStdPPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS_ ) VULKAN_HPP_NOEXCEPT { - qpB = qpB_; + writeStdPPS = writeStdPPS_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVideoEncodeH264QpEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setStdSPSId( uint32_t stdSPSId_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + stdSPSId = stdSPSId_; + return *this; } - operator VkVideoEncodeH264QpEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersGetInfoKHR & setStdPPSId( uint32_t stdPPSId_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + stdPPSId = stdPPSId_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkVideoEncodeH264SessionParametersGetInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264SessionParametersGetInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( qpI, qpP, qpB ); + return std::tie( sType, pNext, writeStdSPS, writeStdPPS, stdSPSId, stdPPSId ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH264QpEXT const & ) const = default; -# else - bool operator==( VideoEncodeH264QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264SessionParametersGetInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( qpI == rhs.qpI ) && ( qpP == rhs.qpP ) && ( qpB == rhs.qpB ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( writeStdSPS == rhs.writeStdSPS ) && ( writeStdPPS == rhs.writeStdPPS ) && + ( stdSPSId == rhs.stdSPSId ) && ( stdPPSId == rhs.stdPPSId ); +# endif } - bool operator!=( VideoEncodeH264QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH264SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - int32_t qpI = {}; - int32_t qpP = {}; - int32_t qpB = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersGetInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS = {}; + VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS = {}; + uint32_t stdSPSId = {}; + uint32_t stdPPSId = {}; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH264RateControlInfoEXT + template <> + struct CppType { - using NativeType = VkVideoEncodeH264RateControlInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlInfoEXT; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoEXT( uint32_t gopFrameCount_ = {}, - uint32_t idrPeriod_ = {}, - uint32_t consecutiveBFrameCount_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureEXT rateControlStructure_ = - VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureEXT::eUnknown, - uint8_t temporalLayerCount_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , gopFrameCount( gopFrameCount_ ) - , idrPeriod( idrPeriod_ ) - , consecutiveBFrameCount( consecutiveBFrameCount_ ) - , rateControlStructure( rateControlStructure_ ) - , temporalLayerCount( temporalLayerCount_ ) - { - } - - VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlInfoEXT( VideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - VideoEncodeH264RateControlInfoEXT( VkVideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH264RateControlInfoEXT( *reinterpret_cast( &rhs ) ) - { - } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - VideoEncodeH264RateControlInfoEXT & operator=( VideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + using Type = VideoEncodeH264SessionParametersGetInfoKHR; + }; - VideoEncodeH264RateControlInfoEXT & operator=( VkVideoEncodeH264RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } + struct VideoEncodeH265CapabilitiesKHR + { + using NativeType = VkVideoEncodeH265CapabilitiesKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265CapabilitiesKHR; - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsKHR flags_ = {}, + StdVideoH265LevelIdc maxLevelIdc_ = {}, + uint32_t maxSliceSegmentCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxTiles_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR ctbSizes_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsKHR transformBlockSizes_ = {}, + uint32_t maxPPictureL0ReferenceCount_ = {}, + uint32_t maxBPictureL0ReferenceCount_ = {}, + uint32_t maxL1ReferenceCount_ = {}, + uint32_t maxSubLayerCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 expectDyadicTemporalSubLayerPattern_ = {}, + int32_t minQp_ = {}, + int32_t maxQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagsKHR stdSyntaxFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , maxLevelIdc{ maxLevelIdc_ } + , maxSliceSegmentCount{ maxSliceSegmentCount_ } + , maxTiles{ maxTiles_ } + , ctbSizes{ ctbSizes_ } + , transformBlockSizes{ transformBlockSizes_ } + , maxPPictureL0ReferenceCount{ maxPPictureL0ReferenceCount_ } + , maxBPictureL0ReferenceCount{ maxBPictureL0ReferenceCount_ } + , maxL1ReferenceCount{ maxL1ReferenceCount_ } + , maxSubLayerCount{ maxSubLayerCount_ } + , expectDyadicTemporalSubLayerPattern{ expectDyadicTemporalSubLayerPattern_ } + , minQp{ minQp_ } + , maxQp{ maxQp_ } + , prefersGopRemainingFrames{ prefersGopRemainingFrames_ } + , requiresGopRemainingFrames{ requiresGopRemainingFrames_ } + , stdSyntaxFlags{ stdSyntaxFlags_ } { - gopFrameCount = gopFrameCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT - { - idrPeriod = idrPeriod_; - return *this; - } + VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesKHR( VideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT + VideoEncodeH265CapabilitiesKHR( VkVideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265CapabilitiesKHR( *reinterpret_cast( &rhs ) ) { - consecutiveBFrameCount = consecutiveBFrameCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & - setRateControlStructure( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureEXT rateControlStructure_ ) VULKAN_HPP_NOEXCEPT - { - rateControlStructure = rateControlStructure_; - return *this; - } + VideoEncodeH265CapabilitiesKHR & operator=( VideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlInfoEXT & setTemporalLayerCount( uint8_t temporalLayerCount_ ) VULKAN_HPP_NOEXCEPT + VideoEncodeH265CapabilitiesKHR & operator=( VkVideoEncodeH265CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - temporalLayerCount = temporalLayerCount_; + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVideoEncodeH264RateControlInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH264RateControlInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + int32_t const &, + int32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagsKHR const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, gopFrameCount, idrPeriod, consecutiveBFrameCount, rateControlStructure, temporalLayerCount ); + return std::tie( sType, + pNext, + flags, + maxLevelIdc, + maxSliceSegmentCount, + maxTiles, + ctbSizes, + transformBlockSizes, + maxPPictureL0ReferenceCount, + maxBPictureL0ReferenceCount, + maxL1ReferenceCount, + maxSubLayerCount, + expectDyadicTemporalSubLayerPattern, + minQp, + maxQp, + prefersGopRemainingFrames, + requiresGopRemainingFrames, + stdSyntaxFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = maxSliceSegmentCount <=> rhs.maxSliceSegmentCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxTiles <=> rhs.maxTiles; cmp != 0 ) + return cmp; + if ( auto cmp = ctbSizes <=> rhs.ctbSizes; cmp != 0 ) + return cmp; + if ( auto cmp = transformBlockSizes <=> rhs.transformBlockSizes; cmp != 0 ) + return cmp; + if ( auto cmp = maxPPictureL0ReferenceCount <=> rhs.maxPPictureL0ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxBPictureL0ReferenceCount <=> rhs.maxBPictureL0ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxL1ReferenceCount <=> rhs.maxL1ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxSubLayerCount <=> rhs.maxSubLayerCount; cmp != 0 ) + return cmp; + if ( auto cmp = expectDyadicTemporalSubLayerPattern <=> rhs.expectDyadicTemporalSubLayerPattern; cmp != 0 ) + return cmp; + if ( auto cmp = minQp <=> rhs.minQp; cmp != 0 ) + return cmp; + if ( auto cmp = maxQp <=> rhs.maxQp; cmp != 0 ) + return cmp; + if ( auto cmp = prefersGopRemainingFrames <=> rhs.prefersGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = requiresGopRemainingFrames <=> rhs.requiresGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = stdSyntaxFlags <=> rhs.stdSyntaxFlags; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH264RateControlInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH264RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoEncodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( gopFrameCount == rhs.gopFrameCount ) && ( idrPeriod == rhs.idrPeriod ) && - ( consecutiveBFrameCount == rhs.consecutiveBFrameCount ) && ( rateControlStructure == rhs.rateControlStructure ) && - ( temporalLayerCount == rhs.temporalLayerCount ); -# endif + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ) == 0 ) && ( maxSliceSegmentCount == rhs.maxSliceSegmentCount ) && + ( maxTiles == rhs.maxTiles ) && ( ctbSizes == rhs.ctbSizes ) && ( transformBlockSizes == rhs.transformBlockSizes ) && + ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount ) && ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount ) && + ( maxL1ReferenceCount == rhs.maxL1ReferenceCount ) && ( maxSubLayerCount == rhs.maxSubLayerCount ) && + ( expectDyadicTemporalSubLayerPattern == rhs.expectDyadicTemporalSubLayerPattern ) && ( minQp == rhs.minQp ) && ( maxQp == rhs.maxQp ) && + ( prefersGopRemainingFrames == rhs.prefersGopRemainingFrames ) && ( requiresGopRemainingFrames == rhs.requiresGopRemainingFrames ) && + ( stdSyntaxFlags == rhs.stdSyntaxFlags ); } - bool operator!=( VideoEncodeH264RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH265CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlInfoEXT; - const void * pNext = {}; - uint32_t gopFrameCount = {}; - uint32_t idrPeriod = {}; - uint32_t consecutiveBFrameCount = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureEXT rateControlStructure = VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlStructureEXT::eUnknown; - uint8_t temporalLayerCount = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265CapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsKHR flags = {}; + StdVideoH265LevelIdc maxLevelIdc = {}; + uint32_t maxSliceSegmentCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxTiles = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR ctbSizes = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsKHR transformBlockSizes = {}; + uint32_t maxPPictureL0ReferenceCount = {}; + uint32_t maxBPictureL0ReferenceCount = {}; + uint32_t maxL1ReferenceCount = {}; + uint32_t maxSubLayerCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 expectDyadicTemporalSubLayerPattern = {}; + int32_t minQp = {}; + int32_t maxQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagsKHR stdSyntaxFlags = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH264RateControlInfoEXT; + using Type = VideoEncodeH265CapabilitiesKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH264RateControlLayerInfoEXT + struct VideoEncodeH265DpbSlotInfoKHR { - using NativeType = VkVideoEncodeH264RateControlLayerInfoEXT; + using NativeType = VkVideoEncodeH265DpbSlotInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264RateControlLayerInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265DpbSlotInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoEXT( uint8_t temporalLayerId_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT initialRcQp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT minQp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT maxQp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT maxFrameSize_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , temporalLayerId( temporalLayerId_ ) - , useInitialRcQp( useInitialRcQp_ ) - , initialRcQp( initialRcQp_ ) - , useMinQp( useMinQp_ ) - , minQp( minQp_ ) - , useMaxQp( useMaxQp_ ) - , maxQp( maxQp_ ) - , useMaxFrameSize( useMaxFrameSize_ ) - , maxFrameSize( maxFrameSize_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoKHR( const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH264RateControlLayerInfoEXT( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoKHR( VideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH264RateControlLayerInfoEXT( VkVideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH264RateControlLayerInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH265DpbSlotInfoKHR( VkVideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265DpbSlotInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH264RateControlLayerInfoEXT & operator=( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH265DpbSlotInfoKHR & operator=( VideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH264RateControlLayerInfoEXT & operator=( VkVideoEncodeH264RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH265DpbSlotInfoKHR & operator=( VkVideoEncodeH265DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setTemporalLayerId( uint8_t temporalLayerId_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoKHR & + setPStdReferenceInfo( const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT { - temporalLayerId = temporalLayerId_; + pStdReferenceInfo = pStdReferenceInfo_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setUseInitialRcQp( VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - useInitialRcQp = useInitialRcQp_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & - setInitialRcQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & initialRcQp_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT { - initialRcQp = initialRcQp_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - useMinQp = useMinQp_; - return *this; + return std::tie( sType, pNext, pStdReferenceInfo ); } +#endif - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & minQp_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265DpbSlotInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - minQp = minQp_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); +# endif } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH265DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - useMaxQp = useMaxQp_; + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265DpbSlotInfoKHR; + const void * pNext = {}; + const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265DpbSlotInfoKHR; + }; + + struct VideoEncodeH265FrameSizeKHR + { + using NativeType = VkVideoEncodeH265FrameSizeKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeKHR( uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT + : frameISize{ frameISize_ } + , framePSize{ framePSize_ } + , frameBSize{ frameBSize_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeKHR( VideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265FrameSizeKHR( VkVideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265FrameSizeKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265FrameSizeKHR & operator=( VideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265FrameSizeKHR & operator=( VkVideoEncodeH265FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT const & maxQp_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeKHR & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT { - maxQp = maxQp_; + frameISize = frameISize_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeKHR & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT { - useMaxFrameSize = useMaxFrameSize_; + framePSize = framePSize_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264RateControlLayerInfoEXT & - setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeKHR & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT { - maxFrameSize = maxFrameSize_; + frameBSize = frameBSize_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoEncodeH264RateControlLayerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265FrameSizeKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH264RateControlLayerInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265FrameSizeKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, temporalLayerId, useInitialRcQp, initialRcQp, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize ); + return std::tie( frameISize, framePSize, frameBSize ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH264RateControlLayerInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265FrameSizeKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( temporalLayerId == rhs.temporalLayerId ) && ( useInitialRcQp == rhs.useInitialRcQp ) && - ( initialRcQp == rhs.initialRcQp ) && ( useMinQp == rhs.useMinQp ) && ( minQp == rhs.minQp ) && ( useMaxQp == rhs.useMaxQp ) && - ( maxQp == rhs.maxQp ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) && ( maxFrameSize == rhs.maxFrameSize ); -# endif +# else + return ( frameISize == rhs.frameISize ) && ( framePSize == rhs.framePSize ) && ( frameBSize == rhs.frameBSize ); +# endif } - bool operator!=( VideoEncodeH264RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH265FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264RateControlLayerInfoEXT; - const void * pNext = {}; - uint8_t temporalLayerId = {}; - VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT initialRcQp = {}; - VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT minQp = {}; - VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT maxQp = {}; - VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT maxFrameSize = {}; - }; - - template <> - struct CppType - { - using Type = VideoEncodeH264RateControlLayerInfoEXT; + uint32_t frameISize = {}; + uint32_t framePSize = {}; + uint32_t frameBSize = {}; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH264SessionParametersAddInfoEXT + struct VideoEncodeH265GopRemainingFrameInfoKHR { - using NativeType = VkVideoEncodeH264SessionParametersAddInfoEXT; + using NativeType = VkVideoEncodeH265GopRemainingFrameInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersAddInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265GopRemainingFrameInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT( uint32_t spsStdCount_ = {}, - const StdVideoH264SequenceParameterSet * pSpsStd_ = {}, - uint32_t ppsStdCount_ = {}, - const StdVideoH264PictureParameterSet * pPpsStd_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , spsStdCount( spsStdCount_ ) - , pSpsStd( pSpsStd_ ) - , ppsStdCount( ppsStdCount_ ) - , pPpsStd( pPpsStd_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265GopRemainingFrameInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ = {}, + uint32_t gopRemainingI_ = {}, + uint32_t gopRemainingP_ = {}, + uint32_t gopRemainingB_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useGopRemainingFrames{ useGopRemainingFrames_ } + , gopRemainingI{ gopRemainingI_ } + , gopRemainingP{ gopRemainingP_ } + , gopRemainingB{ gopRemainingB_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersAddInfoEXT( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - VideoEncodeH264SessionParametersAddInfoEXT( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH264SessionParametersAddInfoEXT( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR VideoEncodeH265GopRemainingFrameInfoKHR( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH264SessionParametersAddInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , spsStdCount( static_cast( spsStd_.size() ) ) - , pSpsStd( spsStd_.data() ) - , ppsStdCount( static_cast( ppsStd_.size() ) ) - , pPpsStd( ppsStd_.data() ) + VideoEncodeH265GopRemainingFrameInfoKHR( VkVideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265GopRemainingFrameInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH264SessionParametersAddInfoEXT & operator=( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH265GopRemainingFrameInfoKHR & operator=( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH264SessionParametersAddInfoEXT & operator=( VkVideoEncodeH264SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH265GopRemainingFrameInfoKHR & operator=( VkVideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & + setUseGopRemainingFrames( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ ) VULKAN_HPP_NOEXCEPT { - spsStdCount = spsStdCount_; + useGopRemainingFrames = useGopRemainingFrames_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setPSpsStd( const StdVideoH264SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setGopRemainingI( uint32_t gopRemainingI_ ) VULKAN_HPP_NOEXCEPT { - pSpsStd = pSpsStd_; + gopRemainingI = gopRemainingI_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH264SessionParametersAddInfoEXT & - setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_ ) VULKAN_HPP_NOEXCEPT - { - spsStdCount = static_cast( spsStd_.size() ); - pSpsStd = spsStd_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT - { - ppsStdCount = ppsStdCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersAddInfoEXT & setPPpsStd( const StdVideoH264PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setGopRemainingP( uint32_t gopRemainingP_ ) VULKAN_HPP_NOEXCEPT { - pPpsStd = pPpsStd_; + gopRemainingP = gopRemainingP_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH264SessionParametersAddInfoEXT & - setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265GopRemainingFrameInfoKHR & setGopRemainingB( uint32_t gopRemainingB_ ) VULKAN_HPP_NOEXCEPT { - ppsStdCount = static_cast( ppsStd_.size() ); - pPpsStd = ppsStd_.data(); + gopRemainingB = gopRemainingB_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoEncodeH264SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265GopRemainingFrameInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH264SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265GopRemainingFrameInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + uint32_t const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, spsStdCount, pSpsStd, ppsStdCount, pPpsStd ); + return std::tie( sType, pNext, useGopRemainingFrames, gopRemainingI, gopRemainingP, gopRemainingB ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH264SessionParametersAddInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265GopRemainingFrameInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spsStdCount == rhs.spsStdCount ) && ( pSpsStd == rhs.pSpsStd ) && - ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useGopRemainingFrames == rhs.useGopRemainingFrames ) && + ( gopRemainingI == rhs.gopRemainingI ) && ( gopRemainingP == rhs.gopRemainingP ) && ( gopRemainingB == rhs.gopRemainingB ); +# endif } - bool operator!=( VideoEncodeH264SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH265GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersAddInfoEXT; - const void * pNext = {}; - uint32_t spsStdCount = {}; - const StdVideoH264SequenceParameterSet * pSpsStd = {}; - uint32_t ppsStdCount = {}; - const StdVideoH264PictureParameterSet * pPpsStd = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265GopRemainingFrameInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames = {}; + uint32_t gopRemainingI = {}; + uint32_t gopRemainingP = {}; + uint32_t gopRemainingB = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH264SessionParametersAddInfoEXT; + using Type = VideoEncodeH265GopRemainingFrameInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH264SessionParametersCreateInfoEXT + struct VideoEncodeH265NaluSliceSegmentInfoKHR { - using NativeType = VkVideoEncodeH264SessionParametersCreateInfoEXT; + using NativeType = VkVideoEncodeH265NaluSliceSegmentInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265NaluSliceSegmentInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - VideoEncodeH264SessionParametersCreateInfoEXT( uint32_t maxSpsStdCount_ = {}, - uint32_t maxPpsStdCount_ = {}, - const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxSpsStdCount( maxSpsStdCount_ ) - , maxPpsStdCount( maxPpsStdCount_ ) - , pParametersAddInfo( pParametersAddInfo_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentInfoKHR( int32_t constantQp_ = {}, + const StdVideoEncodeH265SliceSegmentHeader * pStdSliceSegmentHeader_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , constantQp{ constantQp_ } + , pStdSliceSegmentHeader{ pStdSliceSegmentHeader_ } { } - VULKAN_HPP_CONSTEXPR - VideoEncodeH264SessionParametersCreateInfoEXT( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentInfoKHR( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH264SessionParametersCreateInfoEXT( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH264SessionParametersCreateInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH265NaluSliceSegmentInfoKHR( VkVideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265NaluSliceSegmentInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH264SessionParametersCreateInfoEXT & operator=( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH265NaluSliceSegmentInfoKHR & operator=( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH264SessionParametersCreateInfoEXT & operator=( VkVideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH265NaluSliceSegmentInfoKHR & operator=( VkVideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT - { - maxSpsStdCount = maxSpsStdCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoKHR & setConstantQp( int32_t constantQp_ ) VULKAN_HPP_NOEXCEPT { - maxPpsStdCount = maxPpsStdCount_; + constantQp = constantQp_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264SessionParametersCreateInfoEXT & - setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoKHR & + setPStdSliceSegmentHeader( const StdVideoEncodeH265SliceSegmentHeader * pStdSliceSegmentHeader_ ) VULKAN_HPP_NOEXCEPT { - pParametersAddInfo = pParametersAddInfo_; + pStdSliceSegmentHeader = pStdSliceSegmentHeader_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoEncodeH264SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265NaluSliceSegmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH264SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265NaluSliceSegmentInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo ); + return std::tie( sType, pNext, constantQp, pStdSliceSegmentHeader ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH264SessionParametersCreateInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265NaluSliceSegmentInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) && ( maxPpsStdCount == rhs.maxPpsStdCount ) && - ( pParametersAddInfo == rhs.pParametersAddInfo ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( constantQp == rhs.constantQp ) && ( pStdSliceSegmentHeader == rhs.pStdSliceSegmentHeader ); +# endif } - bool operator!=( VideoEncodeH264SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH265NaluSliceSegmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT; - const void * pNext = {}; - uint32_t maxSpsStdCount = {}; - uint32_t maxPpsStdCount = {}; - const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT * pParametersAddInfo = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265NaluSliceSegmentInfoKHR; + const void * pNext = {}; + int32_t constantQp = {}; + const StdVideoEncodeH265SliceSegmentHeader * pStdSliceSegmentHeader = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH264SessionParametersCreateInfoEXT; + using Type = VideoEncodeH265NaluSliceSegmentInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH264VclFrameInfoEXT + struct VideoEncodeH265PictureInfoKHR { - using NativeType = VkVideoEncodeH264VclFrameInfoEXT; + using NativeType = VkVideoEncodeH265PictureInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264VclFrameInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265PictureInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH264VclFrameInfoEXT( const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists_ = {}, - uint32_t naluSliceEntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT * pNaluSliceEntries_ = {}, - const StdVideoEncodeH264PictureInfo * pCurrentPictureInfo_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pReferenceFinalLists( pReferenceFinalLists_ ) - , naluSliceEntryCount( naluSliceEntryCount_ ) - , pNaluSliceEntries( pNaluSliceEntries_ ) - , pCurrentPictureInfo( pCurrentPictureInfo_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265PictureInfoKHR( uint32_t naluSliceSegmentEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR * pNaluSliceSegmentEntries_ = {}, + const StdVideoEncodeH265PictureInfo * pStdPictureInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , naluSliceSegmentEntryCount{ naluSliceSegmentEntryCount_ } + , pNaluSliceSegmentEntries{ pNaluSliceSegmentEntries_ } + , pStdPictureInfo{ pStdPictureInfo_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH264VclFrameInfoEXT( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH265PictureInfoKHR( VideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH264VclFrameInfoEXT( VkVideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH264VclFrameInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH265PictureInfoKHR( VkVideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265PictureInfoKHR( *reinterpret_cast( &rhs ) ) { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH264VclFrameInfoEXT( - const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & naluSliceEntries_, - const StdVideoEncodeH264PictureInfo * pCurrentPictureInfo_ = {}, - const void * pNext_ = nullptr ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265PictureInfoKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & naluSliceSegmentEntries_, + const StdVideoEncodeH265PictureInfo * pStdPictureInfo_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , pReferenceFinalLists( pReferenceFinalLists_ ) - , naluSliceEntryCount( static_cast( naluSliceEntries_.size() ) ) - , pNaluSliceEntries( naluSliceEntries_.data() ) - , pCurrentPictureInfo( pCurrentPictureInfo_ ) + , naluSliceSegmentEntryCount( static_cast( naluSliceSegmentEntries_.size() ) ) + , pNaluSliceSegmentEntries( naluSliceSegmentEntries_.data() ) + , pStdPictureInfo( pStdPictureInfo_ ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VideoEncodeH264VclFrameInfoEXT & operator=( VideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH265PictureInfoKHR & operator=( VideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH264VclFrameInfoEXT & operator=( VkVideoEncodeH264VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH265PictureInfoKHR & operator=( VkVideoEncodeH265PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & - setPReferenceFinalLists( const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT - { - pReferenceFinalLists = pReferenceFinalLists_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & setNaluSliceEntryCount( uint32_t naluSliceEntryCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & setNaluSliceSegmentEntryCount( uint32_t naluSliceSegmentEntryCount_ ) VULKAN_HPP_NOEXCEPT { - naluSliceEntryCount = naluSliceEntryCount_; + naluSliceSegmentEntryCount = naluSliceSegmentEntryCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & - setPNaluSliceEntries( const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT * pNaluSliceEntries_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & + setPNaluSliceSegmentEntries( const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR * pNaluSliceSegmentEntries_ ) VULKAN_HPP_NOEXCEPT { - pNaluSliceEntries = pNaluSliceEntries_; + pNaluSliceSegmentEntries = pNaluSliceSegmentEntries_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH264VclFrameInfoEXT & setNaluSliceEntries( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & naluSliceEntries_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265PictureInfoKHR & setNaluSliceSegmentEntries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & naluSliceSegmentEntries_ ) + VULKAN_HPP_NOEXCEPT { - naluSliceEntryCount = static_cast( naluSliceEntries_.size() ); - pNaluSliceEntries = naluSliceEntries_.data(); + naluSliceSegmentEntryCount = static_cast( naluSliceSegmentEntries_.size() ); + pNaluSliceSegmentEntries = naluSliceSegmentEntries_.data(); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH264VclFrameInfoEXT & - setPCurrentPictureInfo( const StdVideoEncodeH264PictureInfo * pCurrentPictureInfo_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265PictureInfoKHR & setPStdPictureInfo( const StdVideoEncodeH265PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT { - pCurrentPictureInfo = pCurrentPictureInfo_; + pStdPictureInfo = pStdPictureInfo_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoEncodeH264VclFrameInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH264VclFrameInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265PictureInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR * const &, + const StdVideoEncodeH265PictureInfo * const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pReferenceFinalLists, naluSliceEntryCount, pNaluSliceEntries, pCurrentPictureInfo ); + return std::tie( sType, pNext, naluSliceSegmentEntryCount, pNaluSliceSegmentEntries, pStdPictureInfo ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH264VclFrameInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH264VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265PictureInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pReferenceFinalLists == rhs.pReferenceFinalLists ) && - ( naluSliceEntryCount == rhs.naluSliceEntryCount ) && ( pNaluSliceEntries == rhs.pNaluSliceEntries ) && - ( pCurrentPictureInfo == rhs.pCurrentPictureInfo ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( naluSliceSegmentEntryCount == rhs.naluSliceSegmentEntryCount ) && + ( pNaluSliceSegmentEntries == rhs.pNaluSliceSegmentEntries ) && ( pStdPictureInfo == rhs.pStdPictureInfo ); +# endif } - bool operator!=( VideoEncodeH264VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH265PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264VclFrameInfoEXT; - const void * pNext = {}; - const VULKAN_HPP_NAMESPACE::VideoEncodeH264ReferenceListsInfoEXT * pReferenceFinalLists = {}; - uint32_t naluSliceEntryCount = {}; - const VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT * pNaluSliceEntries = {}; - const StdVideoEncodeH264PictureInfo * pCurrentPictureInfo = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265PictureInfoKHR; + const void * pNext = {}; + uint32_t naluSliceSegmentEntryCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR * pNaluSliceSegmentEntries = {}; + const StdVideoEncodeH265PictureInfo * pStdPictureInfo = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH264VclFrameInfoEXT; + using Type = VideoEncodeH265PictureInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH265CapabilitiesEXT - { - using NativeType = VkVideoEncodeH265CapabilitiesEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265CapabilitiesEXT; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesEXT( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT inputModeFlags_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT outputModeFlags_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT ctbSizes_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsEXT transformBlockSizes_ = {}, - uint8_t maxPPictureL0ReferenceCount_ = {}, - uint8_t maxBPictureL0ReferenceCount_ = {}, - uint8_t maxL1ReferenceCount_ = {}, - uint8_t maxSubLayersCount_ = {}, - uint8_t minLog2MinLumaCodingBlockSizeMinus3_ = {}, - uint8_t maxLog2MinLumaCodingBlockSizeMinus3_ = {}, - uint8_t minLog2MinLumaTransformBlockSizeMinus2_ = {}, - uint8_t maxLog2MinLumaTransformBlockSizeMinus2_ = {}, - uint8_t minMaxTransformHierarchyDepthInter_ = {}, - uint8_t maxMaxTransformHierarchyDepthInter_ = {}, - uint8_t minMaxTransformHierarchyDepthIntra_ = {}, - uint8_t maxMaxTransformHierarchyDepthIntra_ = {}, - uint8_t maxDiffCuQpDeltaDepth_ = {}, - uint8_t minMaxNumMergeCand_ = {}, - uint8_t maxMaxNumMergeCand_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , inputModeFlags( inputModeFlags_ ) - , outputModeFlags( outputModeFlags_ ) - , ctbSizes( ctbSizes_ ) - , transformBlockSizes( transformBlockSizes_ ) - , maxPPictureL0ReferenceCount( maxPPictureL0ReferenceCount_ ) - , maxBPictureL0ReferenceCount( maxBPictureL0ReferenceCount_ ) - , maxL1ReferenceCount( maxL1ReferenceCount_ ) - , maxSubLayersCount( maxSubLayersCount_ ) - , minLog2MinLumaCodingBlockSizeMinus3( minLog2MinLumaCodingBlockSizeMinus3_ ) - , maxLog2MinLumaCodingBlockSizeMinus3( maxLog2MinLumaCodingBlockSizeMinus3_ ) - , minLog2MinLumaTransformBlockSizeMinus2( minLog2MinLumaTransformBlockSizeMinus2_ ) - , maxLog2MinLumaTransformBlockSizeMinus2( maxLog2MinLumaTransformBlockSizeMinus2_ ) - , minMaxTransformHierarchyDepthInter( minMaxTransformHierarchyDepthInter_ ) - , maxMaxTransformHierarchyDepthInter( maxMaxTransformHierarchyDepthInter_ ) - , minMaxTransformHierarchyDepthIntra( minMaxTransformHierarchyDepthIntra_ ) - , maxMaxTransformHierarchyDepthIntra( maxMaxTransformHierarchyDepthIntra_ ) - , maxDiffCuQpDeltaDepth( maxDiffCuQpDeltaDepth_ ) - , minMaxNumMergeCand( minMaxNumMergeCand_ ) - , maxMaxNumMergeCand( maxMaxNumMergeCand_ ) + struct VideoEncodeH265ProfileInfoKHR + { + using NativeType = VkVideoEncodeH265ProfileInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ProfileInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileInfoKHR( StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdProfileIdc{ stdProfileIdc_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH265CapabilitiesEXT( VideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileInfoKHR( VideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH265CapabilitiesEXT( VkVideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH265CapabilitiesEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH265ProfileInfoKHR( VkVideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265ProfileInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH265CapabilitiesEXT & operator=( VideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH265ProfileInfoKHR & operator=( VideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH265CapabilitiesEXT & operator=( VkVideoEncodeH265CapabilitiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH265ProfileInfoKHR & operator=( VkVideoEncodeH265ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkVideoEncodeH265CapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } - operator VkVideoEncodeH265CapabilitiesEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileInfoKHR & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + stdProfileIdc = stdProfileIdc_; + return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkVideoEncodeH265ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - flags, - inputModeFlags, - outputModeFlags, - ctbSizes, - transformBlockSizes, - maxPPictureL0ReferenceCount, - maxBPictureL0ReferenceCount, - maxL1ReferenceCount, - maxSubLayersCount, - minLog2MinLumaCodingBlockSizeMinus3, - maxLog2MinLumaCodingBlockSizeMinus3, - minLog2MinLumaTransformBlockSizeMinus2, - maxLog2MinLumaTransformBlockSizeMinus2, - minMaxTransformHierarchyDepthInter, - maxMaxTransformHierarchyDepthInter, - minMaxTransformHierarchyDepthIntra, - maxMaxTransformHierarchyDepthIntra, - maxDiffCuQpDeltaDepth, - minMaxNumMergeCand, - maxMaxNumMergeCand ); + return std::tie( sType, pNext, stdProfileIdc ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH265CapabilitiesEXT const & ) const = default; -# else - bool operator==( VideoEncodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( inputModeFlags == rhs.inputModeFlags ) && - ( outputModeFlags == rhs.outputModeFlags ) && ( ctbSizes == rhs.ctbSizes ) && ( transformBlockSizes == rhs.transformBlockSizes ) && - ( maxPPictureL0ReferenceCount == rhs.maxPPictureL0ReferenceCount ) && ( maxBPictureL0ReferenceCount == rhs.maxBPictureL0ReferenceCount ) && - ( maxL1ReferenceCount == rhs.maxL1ReferenceCount ) && ( maxSubLayersCount == rhs.maxSubLayersCount ) && - ( minLog2MinLumaCodingBlockSizeMinus3 == rhs.minLog2MinLumaCodingBlockSizeMinus3 ) && - ( maxLog2MinLumaCodingBlockSizeMinus3 == rhs.maxLog2MinLumaCodingBlockSizeMinus3 ) && - ( minLog2MinLumaTransformBlockSizeMinus2 == rhs.minLog2MinLumaTransformBlockSizeMinus2 ) && - ( maxLog2MinLumaTransformBlockSizeMinus2 == rhs.maxLog2MinLumaTransformBlockSizeMinus2 ) && - ( minMaxTransformHierarchyDepthInter == rhs.minMaxTransformHierarchyDepthInter ) && - ( maxMaxTransformHierarchyDepthInter == rhs.maxMaxTransformHierarchyDepthInter ) && - ( minMaxTransformHierarchyDepthIntra == rhs.minMaxTransformHierarchyDepthIntra ) && - ( maxMaxTransformHierarchyDepthIntra == rhs.maxMaxTransformHierarchyDepthIntra ) && ( maxDiffCuQpDeltaDepth == rhs.maxDiffCuQpDeltaDepth ) && - ( minMaxNumMergeCand == rhs.minMaxNumMergeCand ) && ( maxMaxNumMergeCand == rhs.maxMaxNumMergeCand ); -# endif + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 ); } - bool operator!=( VideoEncodeH265CapabilitiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH265ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265CapabilitiesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH265InputModeFlagsEXT inputModeFlags = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH265OutputModeFlagsEXT outputModeFlags = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT ctbSizes = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsEXT transformBlockSizes = {}; - uint8_t maxPPictureL0ReferenceCount = {}; - uint8_t maxBPictureL0ReferenceCount = {}; - uint8_t maxL1ReferenceCount = {}; - uint8_t maxSubLayersCount = {}; - uint8_t minLog2MinLumaCodingBlockSizeMinus3 = {}; - uint8_t maxLog2MinLumaCodingBlockSizeMinus3 = {}; - uint8_t minLog2MinLumaTransformBlockSizeMinus2 = {}; - uint8_t maxLog2MinLumaTransformBlockSizeMinus2 = {}; - uint8_t minMaxTransformHierarchyDepthInter = {}; - uint8_t maxMaxTransformHierarchyDepthInter = {}; - uint8_t minMaxTransformHierarchyDepthIntra = {}; - uint8_t maxMaxTransformHierarchyDepthIntra = {}; - uint8_t maxDiffCuQpDeltaDepth = {}; - uint8_t minMaxNumMergeCand = {}; - uint8_t maxMaxNumMergeCand = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ProfileInfoKHR; + const void * pNext = {}; + StdVideoH265ProfileIdc stdProfileIdc = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH265CapabilitiesEXT; + using Type = VideoEncodeH265ProfileInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH265DpbSlotInfoEXT + struct VideoEncodeH265QpKHR { - using NativeType = VkVideoEncodeH265DpbSlotInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265DpbSlotInfoEXT; + using NativeType = VkVideoEncodeH265QpKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoEXT( int8_t slotIndex_ = {}, - const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , slotIndex( slotIndex_ ) - , pStdReferenceInfo( pStdReferenceInfo_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265QpKHR( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT + : qpI{ qpI_ } + , qpP{ qpP_ } + , qpB{ qpB_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoEXT( VideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH265QpKHR( VideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH265DpbSlotInfoEXT( VkVideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH265DpbSlotInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH265QpKHR( VkVideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265QpKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH265DpbSlotInfoEXT & operator=( VideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH265QpKHR & operator=( VideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH265DpbSlotInfoEXT & operator=( VkVideoEncodeH265DpbSlotInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH265QpKHR & operator=( VkVideoEncodeH265QpKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpKHR & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + qpI = qpI_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoEXT & setSlotIndex( int8_t slotIndex_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpKHR & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT { - slotIndex = slotIndex_; + qpP = qpP_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265DpbSlotInfoEXT & - setPStdReferenceInfo( const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpKHR & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT { - pStdReferenceInfo = pStdReferenceInfo_; + qpB = qpB_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoEncodeH265DpbSlotInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265QpKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH265DpbSlotInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265QpKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, slotIndex, pStdReferenceInfo ); + return std::tie( qpI, qpP, qpB ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH265DpbSlotInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265QpKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( slotIndex == rhs.slotIndex ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); -# endif +# else + return ( qpI == rhs.qpI ) && ( qpP == rhs.qpP ) && ( qpB == rhs.qpB ); +# endif } - bool operator!=( VideoEncodeH265DpbSlotInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH265QpKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265DpbSlotInfoEXT; - const void * pNext = {}; - int8_t slotIndex = {}; - const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo = {}; - }; - - template <> - struct CppType - { - using Type = VideoEncodeH265DpbSlotInfoEXT; + int32_t qpI = {}; + int32_t qpP = {}; + int32_t qpB = {}; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH265EmitPictureParametersInfoEXT + struct VideoEncodeH265QualityLevelPropertiesKHR { - using NativeType = VkVideoEncodeH265EmitPictureParametersInfoEXT; + using NativeType = VkVideoEncodeH265QualityLevelPropertiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265EmitPictureParametersInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265QualityLevelPropertiesKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH265EmitPictureParametersInfoEXT( uint8_t vpsId_ = {}, - uint8_t spsId_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ = {}, - uint32_t ppsIdEntryCount_ = {}, - const uint8_t * ppsIdEntries_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , vpsId( vpsId_ ) - , spsId( spsId_ ) - , emitVpsEnable( emitVpsEnable_ ) - , emitSpsEnable( emitSpsEnable_ ) - , ppsIdEntryCount( ppsIdEntryCount_ ) - , ppsIdEntries( ppsIdEntries_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265QualityLevelPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR preferredRateControlFlags_ = {}, + uint32_t preferredGopFrameCount_ = {}, + uint32_t preferredIdrPeriod_ = {}, + uint32_t preferredConsecutiveBFrameCount_ = {}, + uint32_t preferredSubLayerCount_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR preferredConstantQp_ = {}, + uint32_t preferredMaxL0ReferenceCount_ = {}, + uint32_t preferredMaxL1ReferenceCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , preferredRateControlFlags{ preferredRateControlFlags_ } + , preferredGopFrameCount{ preferredGopFrameCount_ } + , preferredIdrPeriod{ preferredIdrPeriod_ } + , preferredConsecutiveBFrameCount{ preferredConsecutiveBFrameCount_ } + , preferredSubLayerCount{ preferredSubLayerCount_ } + , preferredConstantQp{ preferredConstantQp_ } + , preferredMaxL0ReferenceCount{ preferredMaxL0ReferenceCount_ } + , preferredMaxL1ReferenceCount{ preferredMaxL1ReferenceCount_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH265EmitPictureParametersInfoEXT( VideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - VideoEncodeH265EmitPictureParametersInfoEXT( VkVideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH265EmitPictureParametersInfoEXT( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR VideoEncodeH265QualityLevelPropertiesKHR( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH265EmitPictureParametersInfoEXT( uint8_t vpsId_, - uint8_t spsId_, - VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_, - VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & psIdEntries_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , vpsId( vpsId_ ) - , spsId( spsId_ ) - , emitVpsEnable( emitVpsEnable_ ) - , emitSpsEnable( emitSpsEnable_ ) - , ppsIdEntryCount( static_cast( psIdEntries_.size() ) ) - , ppsIdEntries( psIdEntries_.data() ) + VideoEncodeH265QualityLevelPropertiesKHR( VkVideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265QualityLevelPropertiesKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH265EmitPictureParametersInfoEXT & operator=( VideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH265QualityLevelPropertiesKHR & operator=( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH265EmitPictureParametersInfoEXT & operator=( VkVideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH265QualityLevelPropertiesKHR & operator=( VkVideoEncodeH265QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265QualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setVpsId( uint8_t vpsId_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265QualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT { - vpsId = vpsId_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setSpsId( uint8_t spsId_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - spsId = spsId_; - return *this; + return std::tie( sType, + pNext, + preferredRateControlFlags, + preferredGopFrameCount, + preferredIdrPeriod, + preferredConsecutiveBFrameCount, + preferredSubLayerCount, + preferredConstantQp, + preferredMaxL0ReferenceCount, + preferredMaxL1ReferenceCount ); } +#endif - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setEmitVpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265QualityLevelPropertiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - emitVpsEnable = emitVpsEnable_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredRateControlFlags == rhs.preferredRateControlFlags ) && + ( preferredGopFrameCount == rhs.preferredGopFrameCount ) && ( preferredIdrPeriod == rhs.preferredIdrPeriod ) && + ( preferredConsecutiveBFrameCount == rhs.preferredConsecutiveBFrameCount ) && ( preferredSubLayerCount == rhs.preferredSubLayerCount ) && + ( preferredConstantQp == rhs.preferredConstantQp ) && ( preferredMaxL0ReferenceCount == rhs.preferredMaxL0ReferenceCount ) && + ( preferredMaxL1ReferenceCount == rhs.preferredMaxL1ReferenceCount ); +# endif } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setEmitSpsEnable( VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH265QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - emitSpsEnable = emitSpsEnable_; - return *this; + return !operator==( rhs ); } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265QualityLevelPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR preferredRateControlFlags = {}; + uint32_t preferredGopFrameCount = {}; + uint32_t preferredIdrPeriod = {}; + uint32_t preferredConsecutiveBFrameCount = {}; + uint32_t preferredSubLayerCount = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR preferredConstantQp = {}; + uint32_t preferredMaxL0ReferenceCount = {}; + uint32_t preferredMaxL1ReferenceCount = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265QualityLevelPropertiesKHR; + }; + + struct VideoEncodeH265QuantizationMapCapabilitiesKHR + { + using NativeType = VkVideoEncodeH265QuantizationMapCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265QuantizationMapCapabilitiesKHR; - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setPpsIdEntryCount( uint32_t ppsIdEntryCount_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH265QuantizationMapCapabilitiesKHR( int32_t minQpDelta_ = {}, int32_t maxQpDelta_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minQpDelta{ minQpDelta_ } + , maxQpDelta{ maxQpDelta_ } { - ppsIdEntryCount = ppsIdEntryCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265EmitPictureParametersInfoEXT & setPpsIdEntries( const uint8_t * ppsIdEntries_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR + VideoEncodeH265QuantizationMapCapabilitiesKHR( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265QuantizationMapCapabilitiesKHR( VkVideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265QuantizationMapCapabilitiesKHR( *reinterpret_cast( &rhs ) ) { - ppsIdEntries = ppsIdEntries_; - return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH265EmitPictureParametersInfoEXT & - setPsIdEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & psIdEntries_ ) VULKAN_HPP_NOEXCEPT + VideoEncodeH265QuantizationMapCapabilitiesKHR & operator=( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265QuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - ppsIdEntryCount = static_cast( psIdEntries_.size() ); - ppsIdEntries = psIdEntries_.data(); + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVideoEncodeH265EmitPictureParametersInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265QuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH265EmitPictureParametersInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265QuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, vpsId, spsId, emitVpsEnable, emitSpsEnable, ppsIdEntryCount, ppsIdEntries ); + return std::tie( sType, pNext, minQpDelta, maxQpDelta ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH265EmitPictureParametersInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265QuantizationMapCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vpsId == rhs.vpsId ) && ( spsId == rhs.spsId ) && ( emitVpsEnable == rhs.emitVpsEnable ) && - ( emitSpsEnable == rhs.emitSpsEnable ) && ( ppsIdEntryCount == rhs.ppsIdEntryCount ) && ( ppsIdEntries == rhs.ppsIdEntries ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minQpDelta == rhs.minQpDelta ) && ( maxQpDelta == rhs.maxQpDelta ); +# endif } - bool operator!=( VideoEncodeH265EmitPictureParametersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265EmitPictureParametersInfoEXT; - const void * pNext = {}; - uint8_t vpsId = {}; - uint8_t spsId = {}; - VULKAN_HPP_NAMESPACE::Bool32 emitVpsEnable = {}; - VULKAN_HPP_NAMESPACE::Bool32 emitSpsEnable = {}; - uint32_t ppsIdEntryCount = {}; - const uint8_t * ppsIdEntries = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265QuantizationMapCapabilitiesKHR; + void * pNext = {}; + int32_t minQpDelta = {}; + int32_t maxQpDelta = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH265EmitPictureParametersInfoEXT; + using Type = VideoEncodeH265QuantizationMapCapabilitiesKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH265FrameSizeEXT + struct VideoEncodeH265RateControlInfoKHR { - using NativeType = VkVideoEncodeH265FrameSizeEXT; + using NativeType = VkVideoEncodeH265RateControlInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeEXT( uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT - : frameISize( frameISize_ ) - , framePSize( framePSize_ ) - , frameBSize( frameBSize_ ) + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR flags_ = {}, + uint32_t gopFrameCount_ = {}, + uint32_t idrPeriod_ = {}, + uint32_t consecutiveBFrameCount_ = {}, + uint32_t subLayerCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , gopFrameCount{ gopFrameCount_ } + , idrPeriod{ idrPeriod_ } + , consecutiveBFrameCount{ consecutiveBFrameCount_ } + , subLayerCount{ subLayerCount_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeEXT( VideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoKHR( VideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH265FrameSizeEXT( VkVideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH265FrameSizeEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH265RateControlInfoKHR( VkVideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265RateControlInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH265FrameSizeEXT & operator=( VideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH265RateControlInfoKHR & operator=( VideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH265FrameSizeEXT & operator=( VkVideoEncodeH265FrameSizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH265RateControlInfoKHR & operator=( VkVideoEncodeH265RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeEXT & setFrameISize( uint32_t frameISize_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - frameISize = frameISize_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeEXT & setFramePSize( uint32_t framePSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { - framePSize = framePSize_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265FrameSizeEXT & setFrameBSize( uint32_t frameBSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT { - frameBSize = frameBSize_; + gopFrameCount = gopFrameCount_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVideoEncodeH265FrameSizeEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + idrPeriod = idrPeriod_; + return *this; } - operator VkVideoEncodeH265FrameSizeEXT &() VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + consecutiveBFrameCount = consecutiveBFrameCount_; + return *this; } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoKHR & setSubLayerCount( uint32_t subLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + subLayerCount = subLayerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeH265RateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265RateControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( frameISize, framePSize, frameBSize ); + return std::tie( sType, pNext, flags, gopFrameCount, idrPeriod, consecutiveBFrameCount, subLayerCount ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH265FrameSizeEXT const & ) const = default; -# else - bool operator==( VideoEncodeH265FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265RateControlInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( frameISize == rhs.frameISize ) && ( framePSize == rhs.framePSize ) && ( frameBSize == rhs.frameBSize ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( gopFrameCount == rhs.gopFrameCount ) && + ( idrPeriod == rhs.idrPeriod ) && ( consecutiveBFrameCount == rhs.consecutiveBFrameCount ) && ( subLayerCount == rhs.subLayerCount ); +# endif } - bool operator!=( VideoEncodeH265FrameSizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH265RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - uint32_t frameISize = {}; - uint32_t framePSize = {}; - uint32_t frameBSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsKHR flags = {}; + uint32_t gopFrameCount = {}; + uint32_t idrPeriod = {}; + uint32_t consecutiveBFrameCount = {}; + uint32_t subLayerCount = {}; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH265ReferenceListsInfoEXT + template <> + struct CppType + { + using Type = VideoEncodeH265RateControlInfoKHR; + }; + + struct VideoEncodeH265RateControlLayerInfoKHR { - using NativeType = VkVideoEncodeH265ReferenceListsInfoEXT; + using NativeType = VkVideoEncodeH265RateControlLayerInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ReferenceListsInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlLayerInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH265ReferenceListsInfoEXT( uint8_t referenceList0EntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList0Entries_ = {}, - uint8_t referenceList1EntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList1Entries_ = {}, - const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , referenceList0EntryCount( referenceList0EntryCount_ ) - , pReferenceList0Entries( pReferenceList0Entries_ ) - , referenceList1EntryCount( referenceList1EntryCount_ ) - , pReferenceList1Entries( pReferenceList1Entries_ ) - , pReferenceModifications( pReferenceModifications_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR minQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR maxQp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR maxFrameSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useMinQp{ useMinQp_ } + , minQp{ minQp_ } + , useMaxQp{ useMaxQp_ } + , maxQp{ maxQp_ } + , useMaxFrameSize{ useMaxFrameSize_ } + , maxFrameSize{ maxFrameSize_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH265ReferenceListsInfoEXT( VideoEncodeH265ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - VideoEncodeH265ReferenceListsInfoEXT( VkVideoEncodeH265ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH265ReferenceListsInfoEXT( *reinterpret_cast( &rhs ) ) - { - } + VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoKHR( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH265ReferenceListsInfoEXT( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceList0Entries_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceList1Entries_ = {}, - const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , referenceList0EntryCount( static_cast( referenceList0Entries_.size() ) ) - , pReferenceList0Entries( referenceList0Entries_.data() ) - , referenceList1EntryCount( static_cast( referenceList1Entries_.size() ) ) - , pReferenceList1Entries( referenceList1Entries_.data() ) - , pReferenceModifications( pReferenceModifications_ ) + VideoEncodeH265RateControlLayerInfoKHR( VkVideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265RateControlLayerInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH265ReferenceListsInfoEXT & operator=( VideoEncodeH265ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH265RateControlLayerInfoKHR & operator=( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH265ReferenceListsInfoEXT & operator=( VkVideoEncodeH265ReferenceListsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH265RateControlLayerInfoKHR & operator=( VkVideoEncodeH265RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT & setReferenceList0EntryCount( uint8_t referenceList0EntryCount_ ) VULKAN_HPP_NOEXCEPT - { - referenceList0EntryCount = referenceList0EntryCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT & - setPReferenceList0Entries( const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList0Entries_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT { - pReferenceList0Entries = pReferenceList0Entries_; + useMinQp = useMinQp_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH265ReferenceListsInfoEXT & setReferenceList0Entries( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceList0Entries_ ) - VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR const & minQp_ ) VULKAN_HPP_NOEXCEPT { - referenceList0EntryCount = static_cast( referenceList0Entries_.size() ); - pReferenceList0Entries = referenceList0Entries_.data(); + minQp = minQp_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT & setReferenceList1EntryCount( uint8_t referenceList1EntryCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT { - referenceList1EntryCount = referenceList1EntryCount_; + useMaxQp = useMaxQp_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT & - setPReferenceList1Entries( const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList1Entries_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR const & maxQp_ ) VULKAN_HPP_NOEXCEPT { - pReferenceList1Entries = pReferenceList1Entries_; + maxQp = maxQp_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH265ReferenceListsInfoEXT & setReferenceList1Entries( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceList1Entries_ ) - VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT { - referenceList1EntryCount = static_cast( referenceList1Entries_.size() ); - pReferenceList1Entries = referenceList1Entries_.data(); + useMaxFrameSize = useMaxFrameSize_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ReferenceListsInfoEXT & - setPReferenceModifications( const StdVideoEncodeH265ReferenceModifications * pReferenceModifications_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoKHR & + setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT { - pReferenceModifications = pReferenceModifications_; + maxFrameSize = maxFrameSize_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoEncodeH265ReferenceListsInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265RateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH265ReferenceListsInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265RateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( - sType, pNext, referenceList0EntryCount, pReferenceList0Entries, referenceList1EntryCount, pReferenceList1Entries, pReferenceModifications ); + return std::tie( sType, pNext, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH265ReferenceListsInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH265ReferenceListsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265RateControlLayerInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( referenceList0EntryCount == rhs.referenceList0EntryCount ) && - ( pReferenceList0Entries == rhs.pReferenceList0Entries ) && ( referenceList1EntryCount == rhs.referenceList1EntryCount ) && - ( pReferenceList1Entries == rhs.pReferenceList1Entries ) && ( pReferenceModifications == rhs.pReferenceModifications ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMinQp == rhs.useMinQp ) && ( minQp == rhs.minQp ) && ( useMaxQp == rhs.useMaxQp ) && + ( maxQp == rhs.maxQp ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) && ( maxFrameSize == rhs.maxFrameSize ); +# endif } - bool operator!=( VideoEncodeH265ReferenceListsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH265RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ReferenceListsInfoEXT; - const void * pNext = {}; - uint8_t referenceList0EntryCount = {}; - const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList0Entries = {}; - uint8_t referenceList1EntryCount = {}; - const VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT * pReferenceList1Entries = {}; - const StdVideoEncodeH265ReferenceModifications * pReferenceModifications = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlLayerInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR minQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265QpKHR maxQp = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR maxFrameSize = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH265ReferenceListsInfoEXT; + using Type = VideoEncodeH265RateControlLayerInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH265NaluSliceSegmentInfoEXT + struct VideoEncodeH265SessionCreateInfoKHR { - using NativeType = VkVideoEncodeH265NaluSliceSegmentInfoEXT; + using NativeType = VkVideoEncodeH265SessionCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265NaluSliceSegmentInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionCreateInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentInfoEXT( uint32_t ctbCount_ = {}, - const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists_ = {}, - const StdVideoEncodeH265SliceSegmentHeader * pSliceSegmentHeaderStd_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , ctbCount( ctbCount_ ) - , pReferenceFinalLists( pReferenceFinalLists_ ) - , pSliceSegmentHeaderStd( pSliceSegmentHeaderStd_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionCreateInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ = {}, + StdVideoH265LevelIdc maxLevelIdc_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useMaxLevelIdc{ useMaxLevelIdc_ } + , maxLevelIdc{ maxLevelIdc_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentInfoEXT( VideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionCreateInfoKHR( VideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH265NaluSliceSegmentInfoEXT( VkVideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH265NaluSliceSegmentInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH265SessionCreateInfoKHR( VkVideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH265NaluSliceSegmentInfoEXT & operator=( VideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH265SessionCreateInfoKHR & operator=( VideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH265NaluSliceSegmentInfoEXT & operator=( VkVideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH265SessionCreateInfoKHR & operator=( VkVideoEncodeH265SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoEXT & setCtbCount( uint32_t ctbCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoKHR & setUseMaxLevelIdc( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ ) VULKAN_HPP_NOEXCEPT { - ctbCount = ctbCount_; + useMaxLevelIdc = useMaxLevelIdc_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoEXT & - setPReferenceFinalLists( const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionCreateInfoKHR & setMaxLevelIdc( StdVideoH265LevelIdc maxLevelIdc_ ) VULKAN_HPP_NOEXCEPT { - pReferenceFinalLists = pReferenceFinalLists_; + maxLevelIdc = maxLevelIdc_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265NaluSliceSegmentInfoEXT & - setPSliceSegmentHeaderStd( const StdVideoEncodeH265SliceSegmentHeader * pSliceSegmentHeaderStd_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265SessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - pSliceSegmentHeaderStd = pSliceSegmentHeaderStd_; - return *this; + return *reinterpret_cast( this ); } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVideoEncodeH265NaluSliceSegmentInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265SessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH265NaluSliceSegmentInfoEXT &() VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return std::tie( sType, pNext, useMaxLevelIdc, maxLevelIdc ); } +#endif -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeH265SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, ctbCount, pReferenceFinalLists, pSliceSegmentHeaderStd ); + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = useMaxLevelIdc <=> rhs.useMaxLevelIdc; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH265NaluSliceSegmentInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VideoEncodeH265SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ctbCount == rhs.ctbCount ) && ( pReferenceFinalLists == rhs.pReferenceFinalLists ) && - ( pSliceSegmentHeaderStd == rhs.pSliceSegmentHeaderStd ); -# endif + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMaxLevelIdc == rhs.useMaxLevelIdc ) && + ( memcmp( &maxLevelIdc, &rhs.maxLevelIdc, sizeof( StdVideoH265LevelIdc ) ) == 0 ); } - bool operator!=( VideoEncodeH265NaluSliceSegmentInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH265SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265NaluSliceSegmentInfoEXT; - const void * pNext = {}; - uint32_t ctbCount = {}; - const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists = {}; - const StdVideoEncodeH265SliceSegmentHeader * pSliceSegmentHeaderStd = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc = {}; + StdVideoH265LevelIdc maxLevelIdc = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH265NaluSliceSegmentInfoEXT; + using Type = VideoEncodeH265SessionCreateInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH265ProfileInfoEXT + struct VideoEncodeH265SessionParametersAddInfoKHR { - using NativeType = VkVideoEncodeH265ProfileInfoEXT; + using NativeType = VkVideoEncodeH265SessionParametersAddInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265ProfileInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersAddInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileInfoEXT( StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stdProfileIdc( stdProfileIdc_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoKHR( uint32_t stdVPSCount_ = {}, + const StdVideoH265VideoParameterSet * pStdVPSs_ = {}, + uint32_t stdSPSCount_ = {}, + const StdVideoH265SequenceParameterSet * pStdSPSs_ = {}, + uint32_t stdPPSCount_ = {}, + const StdVideoH265PictureParameterSet * pStdPPSs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdVPSCount{ stdVPSCount_ } + , pStdVPSs{ pStdVPSs_ } + , stdSPSCount{ stdSPSCount_ } + , pStdSPSs{ pStdSPSs_ } + , stdPPSCount{ stdPPSCount_ } + , pStdPPSs{ pStdPPSs_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileInfoEXT( VideoEncodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoKHR( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH265ProfileInfoEXT( VkVideoEncodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH265ProfileInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH265SessionParametersAddInfoKHR( VkVideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionParametersAddInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH265ProfileInfoEXT & operator=( VideoEncodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - VideoEncodeH265ProfileInfoEXT & operator=( VkVideoEncodeH265ProfileInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265SessionParametersAddInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdVPSs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , stdVPSCount( static_cast( stdVPSs_.size() ) ) + , pStdVPSs( stdVPSs_.data() ) + , stdSPSCount( static_cast( stdSPSs_.size() ) ) + , pStdSPSs( stdSPSs_.data() ) + , stdPPSCount( static_cast( stdPPSs_.size() ) ) + , pStdPPSs( stdPPSs_.data() ) { - *this = *reinterpret_cast( &rhs ); - return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } + VideoEncodeH265SessionParametersAddInfoKHR & operator=( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265ProfileInfoEXT & setStdProfileIdc( StdVideoH265ProfileIdc stdProfileIdc_ ) VULKAN_HPP_NOEXCEPT + VideoEncodeH265SessionParametersAddInfoKHR & operator=( VkVideoEncodeH265SessionParametersAddInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - stdProfileIdc = stdProfileIdc_; + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkVideoEncodeH265ProfileInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkVideoEncodeH265ProfileInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, stdProfileIdc ); - } -# endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( VideoEncodeH265ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) - return cmp; - if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) - return cmp; - if ( auto cmp = memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ); cmp != 0 ) - return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; - - return std::strong_ordering::equivalent; + pNext = pNext_; + return *this; } -# endif - bool operator==( VideoEncodeH265ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setStdVPSCount( uint32_t stdVPSCount_ ) VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfileIdc, &rhs.stdProfileIdc, sizeof( StdVideoH265ProfileIdc ) ) == 0 ); + stdVPSCount = stdVPSCount_; + return *this; } - bool operator!=( VideoEncodeH265ProfileInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPStdVPSs( const StdVideoH265VideoParameterSet * pStdVPSs_ ) VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + pStdVPSs = pStdVPSs_; + return *this; } - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265ProfileInfoEXT; - const void * pNext = {}; - StdVideoH265ProfileIdc stdProfileIdc = {}; - }; - - template <> - struct CppType - { - using Type = VideoEncodeH265ProfileInfoEXT; - }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH265QpEXT - { - using NativeType = VkVideoEncodeH265QpEXT; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH265QpEXT( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT - : qpI( qpI_ ) - , qpP( qpP_ ) - , qpB( qpB_ ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265SessionParametersAddInfoKHR & + setStdVPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdVPSs_ ) VULKAN_HPP_NOEXCEPT { + stdVPSCount = static_cast( stdVPSs_.size() ); + pStdVPSs = stdVPSs_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR VideoEncodeH265QpEXT( VideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - VideoEncodeH265QpEXT( VkVideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH265QpEXT( *reinterpret_cast( &rhs ) ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setStdSPSCount( uint32_t stdSPSCount_ ) VULKAN_HPP_NOEXCEPT { + stdSPSCount = stdSPSCount_; + return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH265QpEXT & operator=( VideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPStdSPSs( const StdVideoH265SequenceParameterSet * pStdSPSs_ ) VULKAN_HPP_NOEXCEPT + { + pStdSPSs = pStdSPSs_; + return *this; + } - VideoEncodeH265QpEXT & operator=( VkVideoEncodeH265QpEXT const & rhs ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265SessionParametersAddInfoKHR & + setStdSPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdSPSs_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + stdSPSCount = static_cast( stdSPSs_.size() ); + pStdSPSs = stdSPSs_.data(); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpEXT & setQpI( int32_t qpI_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setStdPPSCount( uint32_t stdPPSCount_ ) VULKAN_HPP_NOEXCEPT { - qpI = qpI_; + stdPPSCount = stdPPSCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpEXT & setQpP( int32_t qpP_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoKHR & setPStdPPSs( const StdVideoH265PictureParameterSet * pStdPPSs_ ) VULKAN_HPP_NOEXCEPT { - qpP = qpP_; + pStdPPSs = pStdPPSs_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265QpEXT & setQpB( int32_t qpB_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeH265SessionParametersAddInfoKHR & + setStdPPSs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdPPSs_ ) VULKAN_HPP_NOEXCEPT { - qpB = qpB_; + stdPPSCount = static_cast( stdPPSs_.size() ); + pStdPPSs = stdPPSs_.data(); return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoEncodeH265QpEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265SessionParametersAddInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH265QpEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265SessionParametersAddInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( qpI, qpP, qpB ); + return std::tie( sType, pNext, stdVPSCount, pStdVPSs, stdSPSCount, pStdSPSs, stdPPSCount, pStdPPSs ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH265QpEXT const & ) const = default; -# else - bool operator==( VideoEncodeH265QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265SessionParametersAddInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( qpI == rhs.qpI ) && ( qpP == rhs.qpP ) && ( qpB == rhs.qpB ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stdVPSCount == rhs.stdVPSCount ) && ( pStdVPSs == rhs.pStdVPSs ) && + ( stdSPSCount == rhs.stdSPSCount ) && ( pStdSPSs == rhs.pStdSPSs ) && ( stdPPSCount == rhs.stdPPSCount ) && ( pStdPPSs == rhs.pStdPPSs ); +# endif } - bool operator!=( VideoEncodeH265QpEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH265SessionParametersAddInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - int32_t qpI = {}; - int32_t qpP = {}; - int32_t qpB = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersAddInfoKHR; + const void * pNext = {}; + uint32_t stdVPSCount = {}; + const StdVideoH265VideoParameterSet * pStdVPSs = {}; + uint32_t stdSPSCount = {}; + const StdVideoH265SequenceParameterSet * pStdSPSs = {}; + uint32_t stdPPSCount = {}; + const StdVideoH265PictureParameterSet * pStdPPSs = {}; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH265RateControlInfoEXT + template <> + struct CppType + { + using Type = VideoEncodeH265SessionParametersAddInfoKHR; + }; + + struct VideoEncodeH265SessionParametersCreateInfoKHR { - using NativeType = VkVideoEncodeH265RateControlInfoEXT; + using NativeType = VkVideoEncodeH265SessionParametersCreateInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersCreateInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoEXT( uint32_t gopFrameCount_ = {}, - uint32_t idrPeriod_ = {}, - uint32_t consecutiveBFrameCount_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureEXT rateControlStructure_ = - VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureEXT::eUnknown, - uint8_t subLayerCount_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , gopFrameCount( gopFrameCount_ ) - , idrPeriod( idrPeriod_ ) - , consecutiveBFrameCount( consecutiveBFrameCount_ ) - , rateControlStructure( rateControlStructure_ ) - , subLayerCount( subLayerCount_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH265SessionParametersCreateInfoKHR( uint32_t maxStdVPSCount_ = {}, + uint32_t maxStdSPSCount_ = {}, + uint32_t maxStdPPSCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxStdVPSCount{ maxStdVPSCount_ } + , maxStdSPSCount{ maxStdSPSCount_ } + , maxStdPPSCount{ maxStdPPSCount_ } + , pParametersAddInfo{ pParametersAddInfo_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlInfoEXT( VideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + VideoEncodeH265SessionParametersCreateInfoKHR( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH265RateControlInfoEXT( VkVideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH265RateControlInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH265SessionParametersCreateInfoKHR( VkVideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH265RateControlInfoEXT & operator=( VideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH265SessionParametersCreateInfoKHR & operator=( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH265RateControlInfoEXT & operator=( VkVideoEncodeH265RateControlInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH265SessionParametersCreateInfoKHR & operator=( VkVideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setMaxStdVPSCount( uint32_t maxStdVPSCount_ ) VULKAN_HPP_NOEXCEPT { - gopFrameCount = gopFrameCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setIdrPeriod( uint32_t idrPeriod_ ) VULKAN_HPP_NOEXCEPT - { - idrPeriod = idrPeriod_; + maxStdVPSCount = maxStdVPSCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setConsecutiveBFrameCount( uint32_t consecutiveBFrameCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setMaxStdSPSCount( uint32_t maxStdSPSCount_ ) VULKAN_HPP_NOEXCEPT { - consecutiveBFrameCount = consecutiveBFrameCount_; + maxStdSPSCount = maxStdSPSCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & - setRateControlStructure( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureEXT rateControlStructure_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & setMaxStdPPSCount( uint32_t maxStdPPSCount_ ) VULKAN_HPP_NOEXCEPT { - rateControlStructure = rateControlStructure_; + maxStdPPSCount = maxStdPPSCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlInfoEXT & setSubLayerCount( uint8_t subLayerCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoKHR & + setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT { - subLayerCount = subLayerCount_; + pParametersAddInfo = pParametersAddInfo_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoEncodeH265RateControlInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH265RateControlInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR * const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, gopFrameCount, idrPeriod, consecutiveBFrameCount, rateControlStructure, subLayerCount ); + return std::tie( sType, pNext, maxStdVPSCount, maxStdSPSCount, maxStdPPSCount, pParametersAddInfo ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH265RateControlInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH265RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265SessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( gopFrameCount == rhs.gopFrameCount ) && ( idrPeriod == rhs.idrPeriod ) && - ( consecutiveBFrameCount == rhs.consecutiveBFrameCount ) && ( rateControlStructure == rhs.rateControlStructure ) && - ( subLayerCount == rhs.subLayerCount ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxStdVPSCount == rhs.maxStdVPSCount ) && ( maxStdSPSCount == rhs.maxStdSPSCount ) && + ( maxStdPPSCount == rhs.maxStdPPSCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo ); +# endif } - bool operator!=( VideoEncodeH265RateControlInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH265SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlInfoEXT; - const void * pNext = {}; - uint32_t gopFrameCount = {}; - uint32_t idrPeriod = {}; - uint32_t consecutiveBFrameCount = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureEXT rateControlStructure = VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlStructureEXT::eUnknown; - uint8_t subLayerCount = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersCreateInfoKHR; + const void * pNext = {}; + uint32_t maxStdVPSCount = {}; + uint32_t maxStdSPSCount = {}; + uint32_t maxStdPPSCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR * pParametersAddInfo = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH265RateControlInfoEXT; + using Type = VideoEncodeH265SessionParametersCreateInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH265RateControlLayerInfoEXT + struct VideoEncodeH265SessionParametersFeedbackInfoKHR { - using NativeType = VkVideoEncodeH265RateControlLayerInfoEXT; + using NativeType = VkVideoEncodeH265SessionParametersFeedbackInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265RateControlLayerInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersFeedbackInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoEXT( uint8_t temporalId_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT initialRcQp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT minQp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT maxQp_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT maxFrameSize_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , temporalId( temporalId_ ) - , useInitialRcQp( useInitialRcQp_ ) - , initialRcQp( initialRcQp_ ) - , useMinQp( useMinQp_ ) - , minQp( minQp_ ) - , useMaxQp( useMaxQp_ ) - , maxQp( maxQp_ ) - , useMaxFrameSize( useMaxFrameSize_ ) - , maxFrameSize( maxFrameSize_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersFeedbackInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 hasStdVPSOverrides_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hasStdVPSOverrides{ hasStdVPSOverrides_ } + , hasStdSPSOverrides{ hasStdSPSOverrides_ } + , hasStdPPSOverrides{ hasStdPPSOverrides_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH265RateControlLayerInfoEXT( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR + VideoEncodeH265SessionParametersFeedbackInfoKHR( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH265RateControlLayerInfoEXT( VkVideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH265RateControlLayerInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeH265SessionParametersFeedbackInfoKHR( VkVideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionParametersFeedbackInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH265RateControlLayerInfoEXT & operator=( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeH265SessionParametersFeedbackInfoKHR & operator=( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH265RateControlLayerInfoEXT & operator=( VkVideoEncodeH265RateControlLayerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeH265SessionParametersFeedbackInfoKHR & operator=( VkVideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265SessionParametersFeedbackInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setTemporalId( uint8_t temporalId_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265SessionParametersFeedbackInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hasStdVPSOverrides, hasStdSPSOverrides, hasStdPPSOverrides ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265SessionParametersFeedbackInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasStdVPSOverrides == rhs.hasStdVPSOverrides ) && + ( hasStdSPSOverrides == rhs.hasStdSPSOverrides ) && ( hasStdPPSOverrides == rhs.hasStdPPSOverrides ); +# endif + } + + bool operator!=( VideoEncodeH265SessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersFeedbackInfoKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasStdVPSOverrides = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265SessionParametersFeedbackInfoKHR; + }; + + struct VideoEncodeH265SessionParametersGetInfoKHR + { + using NativeType = VkVideoEncodeH265SessionParametersGetInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersGetInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersGetInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 writeStdVPS_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS_ = {}, + uint32_t stdVPSId_ = {}, + uint32_t stdSPSId_ = {}, + uint32_t stdPPSId_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , writeStdVPS{ writeStdVPS_ } + , writeStdSPS{ writeStdSPS_ } + , writeStdPPS{ writeStdPPS_ } + , stdVPSId{ stdVPSId_ } + , stdSPSId{ stdSPSId_ } + , stdPPSId{ stdPPSId_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersGetInfoKHR( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265SessionParametersGetInfoKHR( VkVideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265SessionParametersGetInfoKHR( *reinterpret_cast( &rhs ) ) { - temporalId = temporalId_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setUseInitialRcQp( VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp_ ) VULKAN_HPP_NOEXCEPT + VideoEncodeH265SessionParametersGetInfoKHR & operator=( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeH265SessionParametersGetInfoKHR & operator=( VkVideoEncodeH265SessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - useInitialRcQp = useInitialRcQp_; + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & - setInitialRcQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & initialRcQp_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - initialRcQp = initialRcQp_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setUseMinQp( VULKAN_HPP_NAMESPACE::Bool32 useMinQp_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setWriteStdVPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdVPS_ ) VULKAN_HPP_NOEXCEPT { - useMinQp = useMinQp_; + writeStdVPS = writeStdVPS_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setMinQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & minQp_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setWriteStdSPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS_ ) VULKAN_HPP_NOEXCEPT { - minQp = minQp_; + writeStdSPS = writeStdSPS_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setUseMaxQp( VULKAN_HPP_NAMESPACE::Bool32 useMaxQp_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setWriteStdPPS( VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS_ ) VULKAN_HPP_NOEXCEPT { - useMaxQp = useMaxQp_; + writeStdPPS = writeStdPPS_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setMaxQp( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT const & maxQp_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setStdVPSId( uint32_t stdVPSId_ ) VULKAN_HPP_NOEXCEPT { - maxQp = maxQp_; + stdVPSId = stdVPSId_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setStdSPSId( uint32_t stdSPSId_ ) VULKAN_HPP_NOEXCEPT { - useMaxFrameSize = useMaxFrameSize_; + stdSPSId = stdSPSId_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265RateControlLayerInfoEXT & - setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersGetInfoKHR & setStdPPSId( uint32_t stdPPSId_ ) VULKAN_HPP_NOEXCEPT { - maxFrameSize = maxFrameSize_; + stdPPSId = stdPPSId_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoEncodeH265RateControlLayerInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265SessionParametersGetInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH265RateControlLayerInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeH265SessionParametersGetInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + uint32_t const &, + uint32_t const &, + uint32_t const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, temporalId, useInitialRcQp, initialRcQp, useMinQp, minQp, useMaxQp, maxQp, useMaxFrameSize, maxFrameSize ); + return std::tie( sType, pNext, writeStdVPS, writeStdSPS, writeStdPPS, stdVPSId, stdSPSId, stdPPSId ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH265RateControlLayerInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265SessionParametersGetInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( temporalId == rhs.temporalId ) && ( useInitialRcQp == rhs.useInitialRcQp ) && - ( initialRcQp == rhs.initialRcQp ) && ( useMinQp == rhs.useMinQp ) && ( minQp == rhs.minQp ) && ( useMaxQp == rhs.useMaxQp ) && - ( maxQp == rhs.maxQp ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) && ( maxFrameSize == rhs.maxFrameSize ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( writeStdVPS == rhs.writeStdVPS ) && ( writeStdSPS == rhs.writeStdSPS ) && + ( writeStdPPS == rhs.writeStdPPS ) && ( stdVPSId == rhs.stdVPSId ) && ( stdSPSId == rhs.stdSPSId ) && ( stdPPSId == rhs.stdPPSId ); +# endif } - bool operator!=( VideoEncodeH265RateControlLayerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeH265SessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265RateControlLayerInfoEXT; - const void * pNext = {}; - uint8_t temporalId = {}; - VULKAN_HPP_NAMESPACE::Bool32 useInitialRcQp = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT initialRcQp = {}; - VULKAN_HPP_NAMESPACE::Bool32 useMinQp = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT minQp = {}; - VULKAN_HPP_NAMESPACE::Bool32 useMaxQp = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT maxQp = {}; - VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT maxFrameSize = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersGetInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 writeStdVPS = {}; + VULKAN_HPP_NAMESPACE::Bool32 writeStdSPS = {}; + VULKAN_HPP_NAMESPACE::Bool32 writeStdPPS = {}; + uint32_t stdVPSId = {}; + uint32_t stdSPSId = {}; + uint32_t stdPPSId = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH265RateControlLayerInfoEXT; + using Type = VideoEncodeH265SessionParametersGetInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH265SessionParametersAddInfoEXT + struct VideoEncodeInfoKHR { - using NativeType = VkVideoEncodeH265SessionParametersAddInfoEXT; + using NativeType = VkVideoEncodeInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersAddInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoEXT( uint32_t vpsStdCount_ = {}, - const StdVideoH265VideoParameterSet * pVpsStd_ = {}, - uint32_t spsStdCount_ = {}, - const StdVideoH265SequenceParameterSet * pSpsStd_ = {}, - uint32_t ppsStdCount_ = {}, - const StdVideoH265PictureParameterSet * pPpsStd_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , vpsStdCount( vpsStdCount_ ) - , pVpsStd( pVpsStd_ ) - , spsStdCount( spsStdCount_ ) - , pSpsStd( pSpsStd_ ) - , ppsStdCount( ppsStdCount_ ) - , pPpsStd( pPpsStd_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferRange_ = {}, + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ = {}, + uint32_t referenceSlotCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, + uint32_t precedingExternallyEncodedBytes_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , dstBuffer{ dstBuffer_ } + , dstBufferOffset{ dstBufferOffset_ } + , dstBufferRange{ dstBufferRange_ } + , srcPictureResource{ srcPictureResource_ } + , pSetupReferenceSlot{ pSetupReferenceSlot_ } + , referenceSlotCount{ referenceSlotCount_ } + , pReferenceSlots{ pReferenceSlots_ } + , precedingExternallyEncodedBytes{ precedingExternallyEncodedBytes_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionParametersAddInfoEXT( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH265SessionParametersAddInfoEXT( VkVideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH265SessionParametersAddInfoEXT( *reinterpret_cast( &rhs ) ) - { - } + VideoEncodeInfoKHR( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoEncodeInfoKHR( *reinterpret_cast( &rhs ) ) {} -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH265SessionParametersAddInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vpsStd_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ = {}, - const void * pNext_ = nullptr ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferOffset_, + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferRange_, + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource_, + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_, + uint32_t precedingExternallyEncodedBytes_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) - , vpsStdCount( static_cast( vpsStd_.size() ) ) - , pVpsStd( vpsStd_.data() ) - , spsStdCount( static_cast( spsStd_.size() ) ) - , pSpsStd( spsStd_.data() ) - , ppsStdCount( static_cast( ppsStd_.size() ) ) - , pPpsStd( ppsStd_.data() ) + , flags( flags_ ) + , dstBuffer( dstBuffer_ ) + , dstBufferOffset( dstBufferOffset_ ) + , dstBufferRange( dstBufferRange_ ) + , srcPictureResource( srcPictureResource_ ) + , pSetupReferenceSlot( pSetupReferenceSlot_ ) + , referenceSlotCount( static_cast( referenceSlots_.size() ) ) + , pReferenceSlots( referenceSlots_.data() ) + , precedingExternallyEncodedBytes( precedingExternallyEncodedBytes_ ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VideoEncodeH265SessionParametersAddInfoEXT & operator=( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeInfoKHR & operator=( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH265SessionParametersAddInfoEXT & operator=( VkVideoEncodeH265SessionParametersAddInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeInfoKHR & operator=( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setVpsStdCount( uint32_t vpsStdCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { - vpsStdCount = vpsStdCount_; + flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPVpsStd( const StdVideoH265VideoParameterSet * pVpsStd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT { - pVpsStd = pVpsStd_; + dstBuffer = dstBuffer_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH265SessionParametersAddInfoEXT & - setVpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vpsStd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstBufferOffset_ ) VULKAN_HPP_NOEXCEPT { - vpsStdCount = static_cast( vpsStd_.size() ); - pVpsStd = vpsStd_.data(); + dstBufferOffset = dstBufferOffset_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setSpsStdCount( uint32_t spsStdCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBufferRange( VULKAN_HPP_NAMESPACE::DeviceSize dstBufferRange_ ) VULKAN_HPP_NOEXCEPT { - spsStdCount = spsStdCount_; + dstBufferRange = dstBufferRange_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPSpsStd( const StdVideoH265SequenceParameterSet * pSpsStd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + setSrcPictureResource( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & srcPictureResource_ ) VULKAN_HPP_NOEXCEPT { - pSpsStd = pSpsStd_; + srcPictureResource = srcPictureResource_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH265SessionParametersAddInfoEXT & - setSpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & spsStd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + setPSetupReferenceSlot( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT { - spsStdCount = static_cast( spsStd_.size() ); - pSpsStd = spsStd_.data(); + pSetupReferenceSlot = pSetupReferenceSlot_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPpsStdCount( uint32_t ppsStdCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT { - ppsStdCount = ppsStdCount_; + referenceSlotCount = referenceSlotCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersAddInfoEXT & setPPpsStd( const StdVideoH265PictureParameterSet * pPpsStd_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & + setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT { - pPpsStd = pPpsStd_; + pReferenceSlots = pReferenceSlots_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH265SessionParametersAddInfoEXT & - setPpsStd( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & ppsStd_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeInfoKHR & setReferenceSlots( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT { - ppsStdCount = static_cast( ppsStd_.size() ); - pPpsStd = ppsStd_.data(); + referenceSlotCount = static_cast( referenceSlots_.size() ); + pReferenceSlots = referenceSlots_.data(); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPrecedingExternallyEncodedBytes( uint32_t precedingExternallyEncodedBytes_ ) VULKAN_HPP_NOEXCEPT + { + precedingExternallyEncodedBytes = precedingExternallyEncodedBytes_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoEncodeH265SessionParametersAddInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH265SessionParametersAddInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * const &, + uint32_t const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, vpsStdCount, pVpsStd, spsStdCount, pSpsStd, ppsStdCount, pPpsStd ); + return std::tie( sType, + pNext, + flags, + dstBuffer, + dstBufferOffset, + dstBufferRange, + srcPictureResource, + pSetupReferenceSlot, + referenceSlotCount, + pReferenceSlots, + precedingExternallyEncodedBytes ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH265SessionParametersAddInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vpsStdCount == rhs.vpsStdCount ) && ( pVpsStd == rhs.pVpsStd ) && - ( spsStdCount == rhs.spsStdCount ) && ( pSpsStd == rhs.pSpsStd ) && ( ppsStdCount == rhs.ppsStdCount ) && ( pPpsStd == rhs.pPpsStd ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dstBuffer == rhs.dstBuffer ) && + ( dstBufferOffset == rhs.dstBufferOffset ) && ( dstBufferRange == rhs.dstBufferRange ) && ( srcPictureResource == rhs.srcPictureResource ) && + ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && ( referenceSlotCount == rhs.referenceSlotCount ) && + ( pReferenceSlots == rhs.pReferenceSlots ) && ( precedingExternallyEncodedBytes == rhs.precedingExternallyEncodedBytes ); +# endif } - bool operator!=( VideoEncodeH265SessionParametersAddInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersAddInfoEXT; - const void * pNext = {}; - uint32_t vpsStdCount = {}; - const StdVideoH265VideoParameterSet * pVpsStd = {}; - uint32_t spsStdCount = {}; - const StdVideoH265SequenceParameterSet * pSpsStd = {}; - uint32_t ppsStdCount = {}; - const StdVideoH265PictureParameterSet * pPpsStd = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstBufferRange = {}; + VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot = {}; + uint32_t referenceSlotCount = {}; + const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {}; + uint32_t precedingExternallyEncodedBytes = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH265SessionParametersAddInfoEXT; + using Type = VideoEncodeInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH265SessionParametersCreateInfoEXT + struct VideoEncodeQualityLevelInfoKHR { - using NativeType = VkVideoEncodeH265SessionParametersCreateInfoEXT; + using NativeType = VkVideoEncodeQualityLevelInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQualityLevelInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - VideoEncodeH265SessionParametersCreateInfoEXT( uint32_t maxVpsStdCount_ = {}, - uint32_t maxSpsStdCount_ = {}, - uint32_t maxPpsStdCount_ = {}, - const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxVpsStdCount( maxVpsStdCount_ ) - , maxSpsStdCount( maxSpsStdCount_ ) - , maxPpsStdCount( maxPpsStdCount_ ) - , pParametersAddInfo( pParametersAddInfo_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelInfoKHR( uint32_t qualityLevel_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , qualityLevel{ qualityLevel_ } { } - VULKAN_HPP_CONSTEXPR - VideoEncodeH265SessionParametersCreateInfoEXT( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelInfoKHR( VideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH265SessionParametersCreateInfoEXT( VkVideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH265SessionParametersCreateInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeQualityLevelInfoKHR( VkVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeQualityLevelInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH265SessionParametersCreateInfoEXT & operator=( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoEncodeQualityLevelInfoKHR & operator=( VideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoEncodeH265SessionParametersCreateInfoEXT & operator=( VkVideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeQualityLevelInfoKHR & operator=( VkVideoEncodeQualityLevelInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQualityLevelInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setMaxVpsStdCount( uint32_t maxVpsStdCount_ ) VULKAN_HPP_NOEXCEPT - { - maxVpsStdCount = maxVpsStdCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setMaxSpsStdCount( uint32_t maxSpsStdCount_ ) VULKAN_HPP_NOEXCEPT - { - maxSpsStdCount = maxSpsStdCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & setMaxPpsStdCount( uint32_t maxPpsStdCount_ ) VULKAN_HPP_NOEXCEPT - { - maxPpsStdCount = maxPpsStdCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265SessionParametersCreateInfoEXT & - setPParametersAddInfo( const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * pParametersAddInfo_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQualityLevelInfoKHR & setQualityLevel( uint32_t qualityLevel_ ) VULKAN_HPP_NOEXCEPT { - pParametersAddInfo = pParametersAddInfo_; + qualityLevel = qualityLevel_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoEncodeH265SessionParametersCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeQualityLevelInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH265SessionParametersCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeQualityLevelInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, maxVpsStdCount, maxSpsStdCount, maxPpsStdCount, pParametersAddInfo ); + return std::tie( sType, pNext, qualityLevel ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH265SessionParametersCreateInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeQualityLevelInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVpsStdCount == rhs.maxVpsStdCount ) && ( maxSpsStdCount == rhs.maxSpsStdCount ) && - ( maxPpsStdCount == rhs.maxPpsStdCount ) && ( pParametersAddInfo == rhs.pParametersAddInfo ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( qualityLevel == rhs.qualityLevel ); +# endif } - bool operator!=( VideoEncodeH265SessionParametersCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeQualityLevelInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT; - const void * pNext = {}; - uint32_t maxVpsStdCount = {}; - uint32_t maxSpsStdCount = {}; - uint32_t maxPpsStdCount = {}; - const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT * pParametersAddInfo = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQualityLevelInfoKHR; + const void * pNext = {}; + uint32_t qualityLevel = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH265SessionParametersCreateInfoEXT; + using Type = VideoEncodeQualityLevelInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeH265VclFrameInfoEXT + struct VideoEncodeQualityLevelPropertiesKHR { - using NativeType = VkVideoEncodeH265VclFrameInfoEXT; + using NativeType = VkVideoEncodeQualityLevelPropertiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265VclFrameInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQualityLevelPropertiesKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeH265VclFrameInfoEXT( const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists_ = {}, - uint32_t naluSliceSegmentEntryCount_ = {}, - const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT * pNaluSliceSegmentEntries_ = {}, - const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pReferenceFinalLists( pReferenceFinalLists_ ) - , naluSliceSegmentEntryCount( naluSliceSegmentEntryCount_ ) - , pNaluSliceSegmentEntries( pNaluSliceSegmentEntries_ ) - , pCurrentPictureInfo( pCurrentPictureInfo_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR preferredRateControlMode_ = + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eDefault, + uint32_t preferredRateControlLayerCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , preferredRateControlMode{ preferredRateControlMode_ } + , preferredRateControlLayerCount{ preferredRateControlLayerCount_ } { } - VULKAN_HPP_CONSTEXPR VideoEncodeH265VclFrameInfoEXT( VideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelPropertiesKHR( VideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeH265VclFrameInfoEXT( VkVideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : VideoEncodeH265VclFrameInfoEXT( *reinterpret_cast( &rhs ) ) + VideoEncodeQualityLevelPropertiesKHR( VkVideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeQualityLevelPropertiesKHR( *reinterpret_cast( &rhs ) ) { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH265VclFrameInfoEXT( - const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & naluSliceSegmentEntries_, - const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , pReferenceFinalLists( pReferenceFinalLists_ ) - , naluSliceSegmentEntryCount( static_cast( naluSliceSegmentEntries_.size() ) ) - , pNaluSliceSegmentEntries( naluSliceSegmentEntries_.data() ) - , pCurrentPictureInfo( pCurrentPictureInfo_ ) + VideoEncodeQualityLevelPropertiesKHR & operator=( VideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeQualityLevelPropertiesKHR & operator=( VkVideoEncodeQualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { + *this = *reinterpret_cast( &rhs ); + return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeH265VclFrameInfoEXT & operator=( VideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + operator VkVideoEncodeQualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } - VideoEncodeH265VclFrameInfoEXT & operator=( VkVideoEncodeH265VclFrameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeQualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); - return *this; + return *reinterpret_cast( this ); } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return std::tie( sType, pNext, preferredRateControlMode, preferredRateControlLayerCount ); } +#endif - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & - setPReferenceFinalLists( const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeQualityLevelPropertiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeQualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - pReferenceFinalLists = pReferenceFinalLists_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredRateControlMode == rhs.preferredRateControlMode ) && + ( preferredRateControlLayerCount == rhs.preferredRateControlLayerCount ); +# endif } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & setNaluSliceSegmentEntryCount( uint32_t naluSliceSegmentEntryCount_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeQualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - naluSliceSegmentEntryCount = naluSliceSegmentEntryCount_; - return *this; + return !operator==( rhs ); } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQualityLevelPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR preferredRateControlMode = + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eDefault; + uint32_t preferredRateControlLayerCount = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeQualityLevelPropertiesKHR; + }; - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & - setPNaluSliceSegmentEntries( const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT * pNaluSliceSegmentEntries_ ) VULKAN_HPP_NOEXCEPT + struct VideoEncodeQuantizationMapCapabilitiesKHR + { + using NativeType = VkVideoEncodeQuantizationMapCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQuantizationMapCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Extent2D maxQuantizationMapExtent_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxQuantizationMapExtent{ maxQuantizationMapExtent_ } { - pNaluSliceSegmentEntries = pNaluSliceSegmentEntries_; - return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeH265VclFrameInfoEXT & setNaluSliceSegmentEntries( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & naluSliceSegmentEntries_ ) - VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapCapabilitiesKHR( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeQuantizationMapCapabilitiesKHR( VkVideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeQuantizationMapCapabilitiesKHR( *reinterpret_cast( &rhs ) ) { - naluSliceSegmentEntryCount = static_cast( naluSliceSegmentEntries_.size() ); - pNaluSliceSegmentEntries = naluSliceSegmentEntries_.data(); - return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 VideoEncodeH265VclFrameInfoEXT & - setPCurrentPictureInfo( const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo_ ) VULKAN_HPP_NOEXCEPT + VideoEncodeQuantizationMapCapabilitiesKHR & operator=( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeQuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - pCurrentPictureInfo = pCurrentPictureInfo_; + *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVideoEncodeH265VclFrameInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeQuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeH265VclFrameInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeQuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, pReferenceFinalLists, naluSliceSegmentEntryCount, pNaluSliceSegmentEntries, pCurrentPictureInfo ); + return std::tie( sType, pNext, maxQuantizationMapExtent ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeH265VclFrameInfoEXT const & ) const = default; -# else - bool operator==( VideoEncodeH265VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeQuantizationMapCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pReferenceFinalLists == rhs.pReferenceFinalLists ) && - ( naluSliceSegmentEntryCount == rhs.naluSliceSegmentEntryCount ) && ( pNaluSliceSegmentEntries == rhs.pNaluSliceSegmentEntries ) && - ( pCurrentPictureInfo == rhs.pCurrentPictureInfo ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxQuantizationMapExtent == rhs.maxQuantizationMapExtent ); +# endif } - bool operator!=( VideoEncodeH265VclFrameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265VclFrameInfoEXT; - const void * pNext = {}; - const VULKAN_HPP_NAMESPACE::VideoEncodeH265ReferenceListsInfoEXT * pReferenceFinalLists = {}; - uint32_t naluSliceSegmentEntryCount = {}; - const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT * pNaluSliceSegmentEntries = {}; - const StdVideoEncodeH265PictureInfo * pCurrentPictureInfo = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQuantizationMapCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxQuantizationMapExtent = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeH265VclFrameInfoEXT; + using Type = VideoEncodeQuantizationMapCapabilitiesKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoEncodeInfoKHR + struct VideoEncodeQuantizationMapInfoKHR { - using NativeType = VkVideoEncodeInfoKHR; + using NativeType = VkVideoEncodeQuantizationMapInfoKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeInfoKHR; - -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ = {}, - uint32_t qualityLevel_ = {}, - VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ = {}, - VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource_ = {}, - const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ = {}, - uint32_t referenceSlotCount_ = {}, - const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, - uint32_t precedingExternallyEncodedBytes_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , qualityLevel( qualityLevel_ ) - , dstBitstreamBuffer( dstBitstreamBuffer_ ) - , dstBitstreamBufferOffset( dstBitstreamBufferOffset_ ) - , dstBitstreamBufferMaxRange( dstBitstreamBufferMaxRange_ ) - , srcPictureResource( srcPictureResource_ ) - , pSetupReferenceSlot( pSetupReferenceSlot_ ) - , referenceSlotCount( referenceSlotCount_ ) - , pReferenceSlots( pReferenceSlots_ ) - , precedingExternallyEncodedBytes( precedingExternallyEncodedBytes_ ) - { - } - - VULKAN_HPP_CONSTEXPR VideoEncodeInfoKHR( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - VideoEncodeInfoKHR( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoEncodeInfoKHR( *reinterpret_cast( &rhs ) ) {} - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_, - uint32_t qualityLevel_, - VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_, - VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_, - VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_, - VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource_, - const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_, - uint32_t precedingExternallyEncodedBytes_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , flags( flags_ ) - , qualityLevel( qualityLevel_ ) - , dstBitstreamBuffer( dstBitstreamBuffer_ ) - , dstBitstreamBufferOffset( dstBitstreamBufferOffset_ ) - , dstBitstreamBufferMaxRange( dstBitstreamBufferMaxRange_ ) - , srcPictureResource( srcPictureResource_ ) - , pSetupReferenceSlot( pSetupReferenceSlot_ ) - , referenceSlotCount( static_cast( referenceSlots_.size() ) ) - , pReferenceSlots( referenceSlots_.data() ) - , precedingExternallyEncodedBytes( precedingExternallyEncodedBytes_ ) + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQuantizationMapInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapInfoKHR( VULKAN_HPP_NAMESPACE::ImageView quantizationMap_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D quantizationMapExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , quantizationMap{ quantizationMap_ } + , quantizationMapExtent{ quantizationMapExtent_ } { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoEncodeInfoKHR & operator=( VideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapInfoKHR( VideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoEncodeInfoKHR & operator=( VkVideoEncodeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VideoEncodeQuantizationMapInfoKHR( VkVideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeQuantizationMapInfoKHR( *reinterpret_cast( &rhs ) ) { - *this = *reinterpret_cast( &rhs ); + } + + VideoEncodeQuantizationMapInfoKHR & operator=( VideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeQuantizationMapInfoKHR & operator=( VkVideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapInfoKHR & setQuantizationMap( VULKAN_HPP_NAMESPACE::ImageView quantizationMap_ ) VULKAN_HPP_NOEXCEPT { - flags = flags_; + quantizationMap = quantizationMap_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setQualityLevel( uint32_t qualityLevel_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapInfoKHR & + setQuantizationMapExtent( VULKAN_HPP_NAMESPACE::Extent2D const & quantizationMapExtent_ ) VULKAN_HPP_NOEXCEPT { - qualityLevel = qualityLevel_; + quantizationMapExtent = quantizationMapExtent_; return *this; } +#endif /*VULKAN_HPP_NO_SETTERS*/ - VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBitstreamBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeQuantizationMapInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - dstBitstreamBuffer = dstBitstreamBuffer_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setDstBitstreamBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeQuantizationMapInfoKHR &() VULKAN_HPP_NOEXCEPT { - dstBitstreamBufferOffset = dstBitstreamBufferOffset_; - return *this; + return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & - setDstBitstreamBufferMaxRange( VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { - dstBitstreamBufferMaxRange = dstBitstreamBufferMaxRange_; - return *this; + return std::tie( sType, pNext, quantizationMap, quantizationMapExtent ); } +#endif - VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & - setSrcPictureResource( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR const & srcPictureResource_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeQuantizationMapInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeQuantizationMapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - srcPictureResource = srcPictureResource_; - return *this; +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( quantizationMap == rhs.quantizationMap ) && + ( quantizationMapExtent == rhs.quantizationMapExtent ); +# endif } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & - setPSetupReferenceSlot( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot_ ) VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeQuantizationMapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - pSetupReferenceSlot = pSetupReferenceSlot_; - return *this; + return !operator==( rhs ); } +#endif - VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setReferenceSlotCount( uint32_t referenceSlotCount_ ) VULKAN_HPP_NOEXCEPT + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQuantizationMapInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView quantizationMap = {}; + VULKAN_HPP_NAMESPACE::Extent2D quantizationMapExtent = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeQuantizationMapInfoKHR; + }; + + struct VideoEncodeQuantizationMapSessionParametersCreateInfoKHR + { + using NativeType = VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( VULKAN_HPP_NAMESPACE::Extent2D quantizationMapTexelSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , quantizationMapTexelSize{ quantizationMapTexelSize_ } { - referenceSlotCount = referenceSlotCount_; - return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & - setPReferenceSlots( const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) { - pReferenceSlots = pReferenceSlots_; + } + + VideoEncodeQuantizationMapSessionParametersCreateInfoKHR & + operator=( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeQuantizationMapSessionParametersCreateInfoKHR & + operator=( VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeInfoKHR & setReferenceSlots( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & referenceSlots_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapSessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - referenceSlotCount = static_cast( referenceSlots_.size() ); - pReferenceSlots = referenceSlots_.data(); + pNext = pNext_; return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 VideoEncodeInfoKHR & setPrecedingExternallyEncodedBytes( uint32_t precedingExternallyEncodedBytes_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapSessionParametersCreateInfoKHR & + setQuantizationMapTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & quantizationMapTexelSize_ ) VULKAN_HPP_NOEXCEPT { - precedingExternallyEncodedBytes = precedingExternallyEncodedBytes_; + quantizationMapTexelSize = quantizationMapTexelSize_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoEncodeInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoEncodeInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, - pNext, - flags, - qualityLevel, - dstBitstreamBuffer, - dstBitstreamBufferOffset, - dstBitstreamBufferMaxRange, - srcPictureResource, - pSetupReferenceSlot, - referenceSlotCount, - pReferenceSlots, - precedingExternallyEncodedBytes ); + return std::tie( sType, pNext, quantizationMapTexelSize ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoEncodeInfoKHR const & ) const = default; -# else - bool operator==( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( qualityLevel == rhs.qualityLevel ) && - ( dstBitstreamBuffer == rhs.dstBitstreamBuffer ) && ( dstBitstreamBufferOffset == rhs.dstBitstreamBufferOffset ) && - ( dstBitstreamBufferMaxRange == rhs.dstBitstreamBufferMaxRange ) && ( srcPictureResource == rhs.srcPictureResource ) && - ( pSetupReferenceSlot == rhs.pSetupReferenceSlot ) && ( referenceSlotCount == rhs.referenceSlotCount ) && - ( pReferenceSlots == rhs.pReferenceSlots ) && ( precedingExternallyEncodedBytes == rhs.precedingExternallyEncodedBytes ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( quantizationMapTexelSize == rhs.quantizationMapTexelSize ); +# endif } - bool operator!=( VideoEncodeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR flags = {}; - uint32_t qualityLevel = {}; - VULKAN_HPP_NAMESPACE::Buffer dstBitstreamBuffer = {}; - VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferOffset = {}; - VULKAN_HPP_NAMESPACE::DeviceSize dstBitstreamBufferMaxRange = {}; - VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR srcPictureResource = {}; - const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pSetupReferenceSlot = {}; - uint32_t referenceSlotCount = {}; - const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots = {}; - uint32_t precedingExternallyEncodedBytes = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D quantizationMapTexelSize = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoEncodeInfoKHR; + using Type = VideoEncodeQuantizationMapSessionParametersCreateInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) struct VideoEncodeRateControlLayerInfoKHR { using NativeType = VkVideoEncodeRateControlLayerInfoKHR; @@ -96662,21 +138465,17 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlLayerInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR( uint32_t averageBitrate_ = {}, - uint32_t maxBitrate_ = {}, - uint32_t frameRateNumerator_ = {}, - uint32_t frameRateDenominator_ = {}, - uint32_t virtualBufferSizeInMs_ = {}, - uint32_t initialVirtualBufferSizeInMs_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , averageBitrate( averageBitrate_ ) - , maxBitrate( maxBitrate_ ) - , frameRateNumerator( frameRateNumerator_ ) - , frameRateDenominator( frameRateDenominator_ ) - , virtualBufferSizeInMs( virtualBufferSizeInMs_ ) - , initialVirtualBufferSizeInMs( initialVirtualBufferSizeInMs_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeRateControlLayerInfoKHR( uint64_t averageBitrate_ = {}, + uint64_t maxBitrate_ = {}, + uint32_t frameRateNumerator_ = {}, + uint32_t frameRateDenominator_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , averageBitrate{ averageBitrate_ } + , maxBitrate{ maxBitrate_ } + , frameRateNumerator{ frameRateNumerator_ } + , frameRateDenominator{ frameRateDenominator_ } { } @@ -96686,9 +138485,9 @@ namespace VULKAN_HPP_NAMESPACE : VideoEncodeRateControlLayerInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeRateControlLayerInfoKHR & operator=( VideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ VideoEncodeRateControlLayerInfoKHR & operator=( VkVideoEncodeRateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -96696,20 +138495,20 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setAverageBitrate( uint32_t averageBitrate_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setAverageBitrate( uint64_t averageBitrate_ ) VULKAN_HPP_NOEXCEPT { averageBitrate = averageBitrate_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setMaxBitrate( uint32_t maxBitrate_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setMaxBitrate( uint64_t maxBitrate_ ) VULKAN_HPP_NOEXCEPT { maxBitrate = maxBitrate_; return *this; @@ -96726,19 +138525,7 @@ namespace VULKAN_HPP_NAMESPACE frameRateDenominator = frameRateDenominator_; return *this; } - - VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setVirtualBufferSizeInMs( uint32_t virtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT - { - virtualBufferSizeInMs = virtualBufferSizeInMs_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlLayerInfoKHR & setInitialVirtualBufferSizeInMs( uint32_t initialVirtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT - { - initialVirtualBufferSizeInMs = initialVirtualBufferSizeInMs_; - return *this; - } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkVideoEncodeRateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -96750,55 +138537,44 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( - sType, pNext, averageBitrate, maxBitrate, frameRateNumerator, frameRateDenominator, virtualBufferSizeInMs, initialVirtualBufferSizeInMs ); + return std::tie( sType, pNext, averageBitrate, maxBitrate, frameRateNumerator, frameRateDenominator ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( VideoEncodeRateControlLayerInfoKHR const & ) const = default; -# else +#else bool operator==( VideoEncodeRateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else +# else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( averageBitrate == rhs.averageBitrate ) && ( maxBitrate == rhs.maxBitrate ) && - ( frameRateNumerator == rhs.frameRateNumerator ) && ( frameRateDenominator == rhs.frameRateDenominator ) && - ( virtualBufferSizeInMs == rhs.virtualBufferSizeInMs ) && ( initialVirtualBufferSizeInMs == rhs.initialVirtualBufferSizeInMs ); -# endif + ( frameRateNumerator == rhs.frameRateNumerator ) && ( frameRateDenominator == rhs.frameRateDenominator ); +# endif } bool operator!=( VideoEncodeRateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlLayerInfoKHR; - const void * pNext = {}; - uint32_t averageBitrate = {}; - uint32_t maxBitrate = {}; - uint32_t frameRateNumerator = {}; - uint32_t frameRateDenominator = {}; - uint32_t virtualBufferSizeInMs = {}; - uint32_t initialVirtualBufferSizeInMs = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlLayerInfoKHR; + const void * pNext = {}; + uint64_t averageBitrate = {}; + uint64_t maxBitrate = {}; + uint32_t frameRateNumerator = {}; + uint32_t frameRateDenominator = {}; }; template <> @@ -96806,9 +138582,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = VideoEncodeRateControlLayerInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) struct VideoEncodeRateControlInfoKHR { using NativeType = VkVideoEncodeRateControlInfoKHR; @@ -96816,18 +138590,22 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeRateControlInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoEncodeRateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone, - uint8_t layerCount_ = {}, - const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , rateControlMode( rateControlMode_ ) - , layerCount( layerCount_ ) - , pLayerConfigs( pLayerConfigs_ ) + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eDefault, + uint32_t layerCount_ = {}, + const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayers_ = {}, + uint32_t virtualBufferSizeInMs_ = {}, + uint32_t initialVirtualBufferSizeInMs_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , rateControlMode{ rateControlMode_ } + , layerCount{ layerCount_ } + , pLayers{ pLayers_ } + , virtualBufferSizeInMs{ virtualBufferSizeInMs_ } + , initialVirtualBufferSizeInMs{ initialVirtualBufferSizeInMs_ } { } @@ -96838,23 +138616,27 @@ namespace VULKAN_HPP_NAMESPACE { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) VideoEncodeRateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags_, VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & layerConfigs_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & layers_, + uint32_t virtualBufferSizeInMs_ = {}, + uint32_t initialVirtualBufferSizeInMs_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , rateControlMode( rateControlMode_ ) - , layerCount( static_cast( layerConfigs_.size() ) ) - , pLayerConfigs( layerConfigs_.data() ) + , layerCount( static_cast( layers_.size() ) ) + , pLayers( layers_.data() ) + , virtualBufferSizeInMs( virtualBufferSizeInMs_ ) + , initialVirtualBufferSizeInMs( initialVirtualBufferSizeInMs_ ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VideoEncodeRateControlInfoKHR & operator=( VideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ VideoEncodeRateControlInfoKHR & operator=( VkVideoEncodeRateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -96862,7 +138644,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -96876,35 +138658,47 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & - setRateControlMode( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ ) VULKAN_HPP_NOEXCEPT + setRateControlMode( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode_ ) VULKAN_HPP_NOEXCEPT { rateControlMode = rateControlMode_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setLayerCount( uint8_t layerCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT { layerCount = layerCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & - setPLayerConfigs( const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs_ ) VULKAN_HPP_NOEXCEPT + setPLayers( const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayers_ ) VULKAN_HPP_NOEXCEPT { - pLayerConfigs = pLayerConfigs_; + pLayers = pLayers_; return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - VideoEncodeRateControlInfoKHR & setLayerConfigs( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & layerConfigs_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeRateControlInfoKHR & setLayers( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & layers_ ) VULKAN_HPP_NOEXCEPT { - layerCount = static_cast( layerConfigs_.size() ); - pLayerConfigs = layerConfigs_.data(); + layerCount = static_cast( layers_.size() ); + pLayers = layers_.data(); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setVirtualBufferSizeInMs( uint32_t virtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT + { + virtualBufferSizeInMs = virtualBufferSizeInMs_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeRateControlInfoKHR & setInitialVirtualBufferSizeInMs( uint32_t initialVirtualBufferSizeInMs_ ) VULKAN_HPP_NOEXCEPT + { + initialVirtualBufferSizeInMs = initialVirtualBufferSizeInMs_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkVideoEncodeRateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -96916,49 +138710,54 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif + uint32_t const &, + const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * const &, + uint32_t const &, + uint32_t const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, flags, rateControlMode, layerCount, pLayerConfigs ); + return std::tie( sType, pNext, flags, rateControlMode, layerCount, pLayers, virtualBufferSizeInMs, initialVirtualBufferSizeInMs ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( VideoEncodeRateControlInfoKHR const & ) const = default; -# else +#else bool operator==( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else +# else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rateControlMode == rhs.rateControlMode ) && - ( layerCount == rhs.layerCount ) && ( pLayerConfigs == rhs.pLayerConfigs ); -# endif + ( layerCount == rhs.layerCount ) && ( pLayers == rhs.pLayers ) && ( virtualBufferSizeInMs == rhs.virtualBufferSizeInMs ) && + ( initialVirtualBufferSizeInMs == rhs.initialVirtualBufferSizeInMs ); +# endif } bool operator!=( VideoEncodeRateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeRateControlInfoKHR; const void * pNext = {}; VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eNone; - uint8_t layerCount = {}; - const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayerConfigs = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR rateControlMode = VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eDefault; + uint32_t layerCount = {}; + const VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR * pLayers = {}; + uint32_t virtualBufferSizeInMs = {}; + uint32_t initialVirtualBufferSizeInMs = {}; }; template <> @@ -96966,9 +138765,188 @@ namespace VULKAN_HPP_NAMESPACE { using Type = VideoEncodeRateControlInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoEncodeSessionParametersFeedbackInfoKHR + { + using NativeType = VkVideoEncodeSessionParametersFeedbackInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeSessionParametersFeedbackInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersFeedbackInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 hasOverrides_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hasOverrides{ hasOverrides_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersFeedbackInfoKHR( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeSessionParametersFeedbackInfoKHR( VkVideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeSessionParametersFeedbackInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeSessionParametersFeedbackInfoKHR & operator=( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeSessionParametersFeedbackInfoKHR & operator=( VkVideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeSessionParametersFeedbackInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeSessionParametersFeedbackInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hasOverrides ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeSessionParametersFeedbackInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasOverrides == rhs.hasOverrides ); +# endif + } + + bool operator!=( VideoEncodeSessionParametersFeedbackInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeSessionParametersFeedbackInfoKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hasOverrides = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeSessionParametersFeedbackInfoKHR; + }; + + struct VideoEncodeSessionParametersGetInfoKHR + { + using NativeType = VkVideoEncodeSessionParametersGetInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeSessionParametersGetInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersGetInfoKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoSessionParameters{ videoSessionParameters_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersGetInfoKHR( VideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeSessionParametersGetInfoKHR( VkVideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeSessionParametersGetInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeSessionParametersGetInfoKHR & operator=( VideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoEncodeSessionParametersGetInfoKHR & operator=( VkVideoEncodeSessionParametersGetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeSessionParametersGetInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeSessionParametersGetInfoKHR & + setVideoSessionParameters( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ ) VULKAN_HPP_NOEXCEPT + { + videoSessionParameters = videoSessionParameters_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkVideoEncodeSessionParametersGetInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeSessionParametersGetInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoSessionParameters ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeSessionParametersGetInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeSessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoSessionParameters == rhs.videoSessionParameters ); +# endif + } + + bool operator!=( VideoEncodeSessionParametersGetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeSessionParametersGetInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeSessionParametersGetInfoKHR; + }; + struct VideoEncodeUsageInfoKHR { using NativeType = VkVideoEncodeUsageInfoKHR; @@ -96976,16 +138954,16 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeUsageInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoEncodeUsageInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR videoUsageHints_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR tuningMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR::eDefault, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , videoUsageHints( videoUsageHints_ ) - , videoContentHints( videoContentHints_ ) - , tuningMode( tuningMode_ ) + : pNext{ pNext_ } + , videoUsageHints{ videoUsageHints_ } + , videoContentHints{ videoContentHints_ } + , tuningMode{ tuningMode_ } { } @@ -96995,9 +138973,9 @@ namespace VULKAN_HPP_NAMESPACE : VideoEncodeUsageInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEncodeUsageInfoKHR & operator=( VideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ VideoEncodeUsageInfoKHR & operator=( VkVideoEncodeUsageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -97005,7 +138983,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -97019,7 +138997,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 VideoEncodeUsageInfoKHR & - setVideoContentHints( VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints_ ) VULKAN_HPP_NOEXCEPT + setVideoContentHints( VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints_ ) VULKAN_HPP_NOEXCEPT { videoContentHints = videoContentHints_; return *this; @@ -97030,7 +139008,7 @@ namespace VULKAN_HPP_NAMESPACE tuningMode = tuningMode_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkVideoEncodeUsageInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -97042,40 +139020,40 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, videoUsageHints, videoContentHints, tuningMode ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( VideoEncodeUsageInfoKHR const & ) const = default; -# else +#else bool operator==( VideoEncodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else +# else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoUsageHints == rhs.videoUsageHints ) && ( videoContentHints == rhs.videoContentHints ) && ( tuningMode == rhs.tuningMode ); -# endif +# endif } bool operator!=( VideoEncodeUsageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeUsageInfoKHR; @@ -97090,9 +139068,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = VideoEncodeUsageInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) struct VideoEndCodingInfoKHR { using NativeType = VkVideoEndCodingInfoKHR; @@ -97100,10 +139076,10 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEndCodingInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , flags{ flags_ } { } @@ -97113,9 +139089,9 @@ namespace VULKAN_HPP_NAMESPACE : VideoEndCodingInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoEndCodingInfoKHR & operator=( VideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ VideoEndCodingInfoKHR & operator=( VkVideoEndCodingInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -97123,7 +139099,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 VideoEndCodingInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -97135,7 +139111,7 @@ namespace VULKAN_HPP_NAMESPACE flags = flags_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkVideoEndCodingInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -97147,35 +139123,35 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( VideoEndCodingInfoKHR const & ) const = default; -# else +#else bool operator==( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else +# else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); -# endif +# endif } bool operator!=( VideoEndCodingInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEndCodingInfoKHR; @@ -97188,9 +139164,173 @@ namespace VULKAN_HPP_NAMESPACE { using Type = VideoEndCodingInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct VideoFormatAV1QuantizationMapPropertiesKHR + { + using NativeType = VkVideoFormatAV1QuantizationMapPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatAv1QuantizationMapPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoFormatAV1QuantizationMapPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR compatibleSuperblockSizes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , compatibleSuperblockSizes{ compatibleSuperblockSizes_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoFormatAV1QuantizationMapPropertiesKHR( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoFormatAV1QuantizationMapPropertiesKHR( VkVideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoFormatAV1QuantizationMapPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoFormatAV1QuantizationMapPropertiesKHR & operator=( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoFormatAV1QuantizationMapPropertiesKHR & operator=( VkVideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoFormatAV1QuantizationMapPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoFormatAV1QuantizationMapPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, compatibleSuperblockSizes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoFormatAV1QuantizationMapPropertiesKHR const & ) const = default; +#else + bool operator==( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compatibleSuperblockSizes == rhs.compatibleSuperblockSizes ); +# endif + } + + bool operator!=( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatAv1QuantizationMapPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR compatibleSuperblockSizes = {}; + }; + + template <> + struct CppType + { + using Type = VideoFormatAV1QuantizationMapPropertiesKHR; + }; + + struct VideoFormatH265QuantizationMapPropertiesKHR + { + using NativeType = VkVideoFormatH265QuantizationMapPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatH265QuantizationMapPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoFormatH265QuantizationMapPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR compatibleCtbSizes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , compatibleCtbSizes{ compatibleCtbSizes_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoFormatH265QuantizationMapPropertiesKHR( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoFormatH265QuantizationMapPropertiesKHR( VkVideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoFormatH265QuantizationMapPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoFormatH265QuantizationMapPropertiesKHR & operator=( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoFormatH265QuantizationMapPropertiesKHR & operator=( VkVideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoFormatH265QuantizationMapPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoFormatH265QuantizationMapPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, compatibleCtbSizes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoFormatH265QuantizationMapPropertiesKHR const & ) const = default; +#else + bool operator==( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compatibleCtbSizes == rhs.compatibleCtbSizes ); +# endif + } + + bool operator!=( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatH265QuantizationMapPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR compatibleCtbSizes = {}; + }; + + template <> + struct CppType + { + using Type = VideoFormatH265QuantizationMapPropertiesKHR; + }; + struct VideoFormatPropertiesKHR { using NativeType = VkVideoFormatPropertiesKHR; @@ -97198,7 +139338,7 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatPropertiesKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoFormatPropertiesKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ComponentMapping componentMapping_ = {}, VULKAN_HPP_NAMESPACE::ImageCreateFlags imageCreateFlags_ = {}, @@ -97206,13 +139346,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageTiling imageTiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsageFlags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , format( format_ ) - , componentMapping( componentMapping_ ) - , imageCreateFlags( imageCreateFlags_ ) - , imageType( imageType_ ) - , imageTiling( imageTiling_ ) - , imageUsageFlags( imageUsageFlags_ ) + : pNext{ pNext_ } + , format{ format_ } + , componentMapping{ componentMapping_ } + , imageCreateFlags{ imageCreateFlags_ } + , imageType{ imageType_ } + , imageTiling{ imageTiling_ } + , imageUsageFlags{ imageUsageFlags_ } { } @@ -97222,9 +139362,9 @@ namespace VULKAN_HPP_NAMESPACE : VideoFormatPropertiesKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoFormatPropertiesKHR & operator=( VideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ VideoFormatPropertiesKHR & operator=( VkVideoFormatPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -97242,10 +139382,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, format, componentMapping, imageCreateFlags, imageType, imageTiling, imageUsageFlags ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( VideoFormatPropertiesKHR const & ) const = default; -# else +#else bool operator==( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else +# else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( componentMapping == rhs.componentMapping ) && ( imageCreateFlags == rhs.imageCreateFlags ) && ( imageType == rhs.imageType ) && ( imageTiling == rhs.imageTiling ) && ( imageUsageFlags == rhs.imageUsageFlags ); -# endif +# endif } bool operator!=( VideoFormatPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatPropertiesKHR; @@ -97297,143 +139437,206 @@ namespace VULKAN_HPP_NAMESPACE { using Type = VideoFormatPropertiesKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - struct VideoProfileInfoKHR + struct VideoFormatQuantizationMapPropertiesKHR { - using NativeType = VkVideoProfileInfoKHR; + using NativeType = VkVideoFormatQuantizationMapPropertiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatQuantizationMapPropertiesKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VideoProfileInfoKHR( - VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eNone, - VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ = {}, - VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ = {}, - VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , videoCodecOperation( videoCodecOperation_ ) - , chromaSubsampling( chromaSubsampling_ ) - , lumaBitDepth( lumaBitDepth_ ) - , chromaBitDepth( chromaBitDepth_ ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoFormatQuantizationMapPropertiesKHR( VULKAN_HPP_NAMESPACE::Extent2D quantizationMapTexelSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , quantizationMapTexelSize{ quantizationMapTexelSize_ } { } - VULKAN_HPP_CONSTEXPR VideoProfileInfoKHR( VideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VideoFormatQuantizationMapPropertiesKHR( VideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VideoProfileInfoKHR( VkVideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : VideoProfileInfoKHR( *reinterpret_cast( &rhs ) ) + VideoFormatQuantizationMapPropertiesKHR( VkVideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoFormatQuantizationMapPropertiesKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VideoProfileInfoKHR & operator=( VideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VideoFormatQuantizationMapPropertiesKHR & operator=( VideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ - VideoProfileInfoKHR & operator=( VkVideoProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VideoFormatQuantizationMapPropertiesKHR & operator=( VkVideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + operator VkVideoFormatQuantizationMapPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + return *reinterpret_cast( this ); + } + + operator VkVideoFormatQuantizationMapPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, quantizationMapTexelSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoFormatQuantizationMapPropertiesKHR const & ) const = default; +#else + bool operator==( VideoFormatQuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( quantizationMapTexelSize == rhs.quantizationMapTexelSize ); +# endif + } + + bool operator!=( VideoFormatQuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatQuantizationMapPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D quantizationMapTexelSize = {}; + }; + + template <> + struct CppType + { + using Type = VideoFormatQuantizationMapPropertiesKHR; + }; + + struct VideoInlineQueryInfoKHR + { + using NativeType = VkVideoInlineQueryInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoInlineQueryInfoKHR; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoInlineQueryInfoKHR( VULKAN_HPP_NAMESPACE::QueryPool queryPool_ = {}, + uint32_t firstQuery_ = {}, + uint32_t queryCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , queryPool{ queryPool_ } + , firstQuery{ firstQuery_ } + , queryCount{ queryCount_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoInlineQueryInfoKHR( VideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoInlineQueryInfoKHR( VkVideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoInlineQueryInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoInlineQueryInfoKHR & operator=( VideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + VideoInlineQueryInfoKHR & operator=( VkVideoInlineQueryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & - setVideoCodecOperation( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation_ ) VULKAN_HPP_NOEXCEPT +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - videoCodecOperation = videoCodecOperation_; + pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & - setChromaSubsampling( VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool_ ) VULKAN_HPP_NOEXCEPT { - chromaSubsampling = chromaSubsampling_; + queryPool = queryPool_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setLumaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setFirstQuery( uint32_t firstQuery_ ) VULKAN_HPP_NOEXCEPT { - lumaBitDepth = lumaBitDepth_; + firstQuery = firstQuery_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setChromaBitDepth( VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoInlineQueryInfoKHR & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT { - chromaBitDepth = chromaBitDepth_; + queryCount = queryCount_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ - operator VkVideoProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkVideoInlineQueryInfoKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVideoProfileInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkVideoInlineQueryInfoKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, videoCodecOperation, chromaSubsampling, lumaBitDepth, chromaBitDepth ); + return std::tie( sType, pNext, queryPool, firstQuery, queryCount ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoProfileInfoKHR const & ) const = default; -# else - bool operator==( VideoProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoInlineQueryInfoKHR const & ) const = default; +#else + bool operator==( VideoInlineQueryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoCodecOperation == rhs.videoCodecOperation ) && - ( chromaSubsampling == rhs.chromaSubsampling ) && ( lumaBitDepth == rhs.lumaBitDepth ) && ( chromaBitDepth == rhs.chromaBitDepth ); -# endif +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queryPool == rhs.queryPool ) && ( firstQuery == rhs.firstQuery ) && + ( queryCount == rhs.queryCount ); +# endif } - bool operator!=( VideoProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VideoInlineQueryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR videoCodecOperation = VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR::eNone; - VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR chromaSubsampling = {}; - VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth = {}; - VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoInlineQueryInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPool queryPool = {}; + uint32_t firstQuery = {}; + uint32_t queryCount = {}; }; template <> - struct CppType + struct CppType { - using Type = VideoProfileInfoKHR; + using Type = VideoInlineQueryInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) struct VideoProfileListInfoKHR { using NativeType = VkVideoProfileListInfoKHR; @@ -97441,13 +139644,13 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoProfileListInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoProfileListInfoKHR( uint32_t profileCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pProfiles_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , profileCount( profileCount_ ) - , pProfiles( pProfiles_ ) + : pNext{ pNext_ } + , profileCount{ profileCount_ } + , pProfiles{ pProfiles_ } { } @@ -97458,16 +139661,16 @@ namespace VULKAN_HPP_NAMESPACE { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) VideoProfileListInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & profiles_, const void * pNext_ = nullptr ) : pNext( pNext_ ), profileCount( static_cast( profiles_.size() ) ), pProfiles( profiles_.data() ) { } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VideoProfileListInfoKHR & operator=( VideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ VideoProfileListInfoKHR & operator=( VkVideoProfileListInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -97475,7 +139678,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 VideoProfileListInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -97494,7 +139697,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) VideoProfileListInfoKHR & setProfiles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & profiles_ ) VULKAN_HPP_NOEXCEPT { @@ -97502,8 +139705,8 @@ namespace VULKAN_HPP_NAMESPACE pProfiles = profiles_.data(); return *this; } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkVideoProfileListInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -97515,35 +139718,35 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, profileCount, pProfiles ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( VideoProfileListInfoKHR const & ) const = default; -# else +#else bool operator==( VideoProfileListInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else +# else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( profileCount == rhs.profileCount ) && ( pProfiles == rhs.pProfiles ); -# endif +# endif } bool operator!=( VideoProfileListInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoProfileListInfoKHR; @@ -97557,9 +139760,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = VideoProfileListInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) struct VideoSessionCreateInfoKHR { using NativeType = VkVideoSessionCreateInfoKHR; @@ -97567,27 +139768,27 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionCreateInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR( uint32_t queueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags_ = {}, const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ = {}, - VULKAN_HPP_NAMESPACE::Format pictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {}, - VULKAN_HPP_NAMESPACE::Format referencePicturesFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - uint32_t maxReferencePicturesSlotsCount_ = {}, - uint32_t maxReferencePicturesActiveCount_ = {}, + VULKAN_HPP_NAMESPACE::Format pictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent_ = {}, + VULKAN_HPP_NAMESPACE::Format referencePictureFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t maxDpbSlots_ = {}, + uint32_t maxActiveReferencePictures_ = {}, const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , queueFamilyIndex( queueFamilyIndex_ ) - , flags( flags_ ) - , pVideoProfile( pVideoProfile_ ) - , pictureFormat( pictureFormat_ ) - , maxCodedExtent( maxCodedExtent_ ) - , referencePicturesFormat( referencePicturesFormat_ ) - , maxReferencePicturesSlotsCount( maxReferencePicturesSlotsCount_ ) - , maxReferencePicturesActiveCount( maxReferencePicturesActiveCount_ ) - , pStdHeaderVersion( pStdHeaderVersion_ ) + : pNext{ pNext_ } + , queueFamilyIndex{ queueFamilyIndex_ } + , flags{ flags_ } + , pVideoProfile{ pVideoProfile_ } + , pictureFormat{ pictureFormat_ } + , maxCodedExtent{ maxCodedExtent_ } + , referencePictureFormat{ referencePictureFormat_ } + , maxDpbSlots{ maxDpbSlots_ } + , maxActiveReferencePictures{ maxActiveReferencePictures_ } + , pStdHeaderVersion{ pStdHeaderVersion_ } { } @@ -97597,9 +139798,9 @@ namespace VULKAN_HPP_NAMESPACE : VideoSessionCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoSessionCreateInfoKHR & operator=( VideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ VideoSessionCreateInfoKHR & operator=( VkVideoSessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -97607,7 +139808,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -97644,31 +139845,31 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setReferencePicturesFormat( VULKAN_HPP_NAMESPACE::Format referencePicturesFormat_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setReferencePictureFormat( VULKAN_HPP_NAMESPACE::Format referencePictureFormat_ ) VULKAN_HPP_NOEXCEPT { - referencePicturesFormat = referencePicturesFormat_; + referencePictureFormat = referencePictureFormat_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxReferencePicturesSlotsCount( uint32_t maxReferencePicturesSlotsCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxDpbSlots( uint32_t maxDpbSlots_ ) VULKAN_HPP_NOEXCEPT { - maxReferencePicturesSlotsCount = maxReferencePicturesSlotsCount_; + maxDpbSlots = maxDpbSlots_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxReferencePicturesActiveCount( uint32_t maxReferencePicturesActiveCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & setMaxActiveReferencePictures( uint32_t maxActiveReferencePictures_ ) VULKAN_HPP_NOEXCEPT { - maxReferencePicturesActiveCount = maxReferencePicturesActiveCount_; + maxActiveReferencePictures = maxActiveReferencePictures_; return *this; } VULKAN_HPP_CONSTEXPR_14 VideoSessionCreateInfoKHR & - setPStdHeaderVersion( const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion_ ) VULKAN_HPP_NOEXCEPT + setPStdHeaderVersion( const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion_ ) VULKAN_HPP_NOEXCEPT { pStdHeaderVersion = pStdHeaderVersion_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkVideoSessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -97680,10 +139881,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, @@ -97705,46 +139906,46 @@ namespace VULKAN_HPP_NAMESPACE pVideoProfile, pictureFormat, maxCodedExtent, - referencePicturesFormat, - maxReferencePicturesSlotsCount, - maxReferencePicturesActiveCount, + referencePictureFormat, + maxDpbSlots, + maxActiveReferencePictures, pStdHeaderVersion ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( VideoSessionCreateInfoKHR const & ) const = default; -# else +#else bool operator==( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else +# else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( flags == rhs.flags ) && ( pVideoProfile == rhs.pVideoProfile ) && ( pictureFormat == rhs.pictureFormat ) && ( maxCodedExtent == rhs.maxCodedExtent ) && - ( referencePicturesFormat == rhs.referencePicturesFormat ) && ( maxReferencePicturesSlotsCount == rhs.maxReferencePicturesSlotsCount ) && - ( maxReferencePicturesActiveCount == rhs.maxReferencePicturesActiveCount ) && ( pStdHeaderVersion == rhs.pStdHeaderVersion ); -# endif + ( referencePictureFormat == rhs.referencePictureFormat ) && ( maxDpbSlots == rhs.maxDpbSlots ) && + ( maxActiveReferencePictures == rhs.maxActiveReferencePictures ) && ( pStdHeaderVersion == rhs.pStdHeaderVersion ); +# endif } bool operator!=( VideoSessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionCreateInfoKHR; - const void * pNext = {}; - uint32_t queueFamilyIndex = {}; - VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags = {}; - const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile = {}; - VULKAN_HPP_NAMESPACE::Format pictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; - VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {}; - VULKAN_HPP_NAMESPACE::Format referencePicturesFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; - uint32_t maxReferencePicturesSlotsCount = {}; - uint32_t maxReferencePicturesActiveCount = {}; - const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionCreateInfoKHR; + const void * pNext = {}; + uint32_t queueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR flags = {}; + const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile = {}; + VULKAN_HPP_NAMESPACE::Format pictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Extent2D maxCodedExtent = {}; + VULKAN_HPP_NAMESPACE::Format referencePictureFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t maxDpbSlots = {}; + uint32_t maxActiveReferencePictures = {}; + const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion = {}; }; template <> @@ -97752,9 +139953,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = VideoSessionCreateInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) struct VideoSessionMemoryRequirementsKHR { using NativeType = VkVideoSessionMemoryRequirementsKHR; @@ -97762,13 +139961,13 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionMemoryRequirementsKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoSessionMemoryRequirementsKHR( uint32_t memoryBindIndex_ = {}, VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryBindIndex( memoryBindIndex_ ) - , memoryRequirements( memoryRequirements_ ) + : pNext{ pNext_ } + , memoryBindIndex{ memoryBindIndex_ } + , memoryRequirements{ memoryRequirements_ } { } @@ -97778,9 +139977,9 @@ namespace VULKAN_HPP_NAMESPACE : VideoSessionMemoryRequirementsKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoSessionMemoryRequirementsKHR & operator=( VideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ VideoSessionMemoryRequirementsKHR & operator=( VkVideoSessionMemoryRequirementsKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -97798,35 +139997,35 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memoryBindIndex, memoryRequirements ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( VideoSessionMemoryRequirementsKHR const & ) const = default; -# else +#else bool operator==( VideoSessionMemoryRequirementsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else +# else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && ( memoryRequirements == rhs.memoryRequirements ); -# endif +# endif } bool operator!=( VideoSessionMemoryRequirementsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionMemoryRequirementsKHR; @@ -97840,9 +140039,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = VideoSessionMemoryRequirementsKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) struct VideoSessionParametersCreateInfoKHR { using NativeType = VkVideoSessionParametersCreateInfoKHR; @@ -97850,15 +140047,15 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionParametersCreateInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoSessionParametersCreateInfoKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , videoSessionParametersTemplate( videoSessionParametersTemplate_ ) - , videoSession( videoSession_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , videoSessionParametersTemplate{ videoSessionParametersTemplate_ } + , videoSession{ videoSession_ } { } @@ -97868,9 +140065,9 @@ namespace VULKAN_HPP_NAMESPACE : VideoSessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoSessionParametersCreateInfoKHR & operator=( VideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ VideoSessionParametersCreateInfoKHR & operator=( VkVideoSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -97878,7 +140075,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -97886,7 +140083,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersCreateInfoKHR & - setFlags( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + setFlags( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; @@ -97904,7 +140101,7 @@ namespace VULKAN_HPP_NAMESPACE videoSession = videoSession_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkVideoSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -97916,40 +140113,40 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, videoSessionParametersTemplate, videoSession ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( VideoSessionParametersCreateInfoKHR const & ) const = default; -# else +#else bool operator==( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else +# else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( videoSessionParametersTemplate == rhs.videoSessionParametersTemplate ) && ( videoSession == rhs.videoSession ); -# endif +# endif } bool operator!=( VideoSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersCreateInfoKHR; @@ -97964,9 +140161,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = VideoSessionParametersCreateInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) struct VideoSessionParametersUpdateInfoKHR { using NativeType = VkVideoSessionParametersUpdateInfoKHR; @@ -97974,10 +140169,10 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoSessionParametersUpdateInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( uint32_t updateSequenceCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , updateSequenceCount( updateSequenceCount_ ) + : pNext{ pNext_ } + , updateSequenceCount{ updateSequenceCount_ } { } @@ -97987,9 +140182,9 @@ namespace VULKAN_HPP_NAMESPACE : VideoSessionParametersUpdateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ VideoSessionParametersUpdateInfoKHR & operator=( VideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ VideoSessionParametersUpdateInfoKHR & operator=( VkVideoSessionParametersUpdateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -97997,7 +140192,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 VideoSessionParametersUpdateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -98009,7 +140204,7 @@ namespace VULKAN_HPP_NAMESPACE updateSequenceCount = updateSequenceCount_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkVideoSessionParametersUpdateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -98021,35 +140216,35 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, updateSequenceCount ); } -# endif +#endif -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( VideoSessionParametersUpdateInfoKHR const & ) const = default; -# else +#else bool operator==( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); -# else +# else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( updateSequenceCount == rhs.updateSequenceCount ); -# endif +# endif } bool operator!=( VideoSessionParametersUpdateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoSessionParametersUpdateInfoKHR; @@ -98062,7 +140257,6 @@ namespace VULKAN_HPP_NAMESPACE { using Type = VideoSessionParametersUpdateInfoKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_USE_PLATFORM_WAYLAND_KHR ) struct WaylandSurfaceCreateInfoKHR @@ -98072,15 +140266,15 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWaylandSurfaceCreateInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, struct wl_display * display_ = {}, struct wl_surface * surface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , display( display_ ) - , surface( surface_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , display{ display_ } + , surface{ surface_ } { } @@ -98090,9 +140284,9 @@ namespace VULKAN_HPP_NAMESPACE : WaylandSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ WaylandSurfaceCreateInfoKHR & operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ WaylandSurfaceCreateInfoKHR & operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -98100,7 +140294,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -98124,7 +140318,7 @@ namespace VULKAN_HPP_NAMESPACE surface = surface_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ operator VkWaylandSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -98193,7 +140387,7 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, const uint64_t * pAcquireKeys_ = {}, @@ -98202,14 +140396,14 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, const uint64_t * pReleaseKeys_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , acquireCount( acquireCount_ ) - , pAcquireSyncs( pAcquireSyncs_ ) - , pAcquireKeys( pAcquireKeys_ ) - , pAcquireTimeouts( pAcquireTimeouts_ ) - , releaseCount( releaseCount_ ) - , pReleaseSyncs( pReleaseSyncs_ ) - , pReleaseKeys( pReleaseKeys_ ) + : pNext{ pNext_ } + , acquireCount{ acquireCount_ } + , pAcquireSyncs{ pAcquireSyncs_ } + , pAcquireKeys{ pAcquireKeys_ } + , pAcquireTimeouts{ pAcquireTimeouts_ } + , releaseCount{ releaseCount_ } + , pReleaseSyncs{ pReleaseSyncs_ } + , pReleaseKeys{ pReleaseKeys_ } { } @@ -98269,9 +140463,9 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ Win32KeyedMutexAcquireReleaseInfoKHR & operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ Win32KeyedMutexAcquireReleaseInfoKHR & operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -98279,7 +140473,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -98293,7 +140487,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & - setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT + setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT { pAcquireSyncs = pAcquireSyncs_; return *this; @@ -98348,7 +140542,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & - setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT + setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT { pReleaseSyncs = pReleaseSyncs_; return *this; @@ -98379,7 +140573,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ operator VkWin32KeyedMutexAcquireReleaseInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -98458,7 +140652,7 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ = {}, const uint64_t * pAcquireKeys_ = {}, @@ -98467,14 +140661,14 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, const uint64_t * pReleaseKeys_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , acquireCount( acquireCount_ ) - , pAcquireSyncs( pAcquireSyncs_ ) - , pAcquireKeys( pAcquireKeys_ ) - , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ ) - , releaseCount( releaseCount_ ) - , pReleaseSyncs( pReleaseSyncs_ ) - , pReleaseKeys( pReleaseKeys_ ) + : pNext{ pNext_ } + , acquireCount{ acquireCount_ } + , pAcquireSyncs{ pAcquireSyncs_ } + , pAcquireKeys{ pAcquireKeys_ } + , pAcquireTimeoutMilliseconds{ pAcquireTimeoutMilliseconds_ } + , releaseCount{ releaseCount_ } + , pReleaseSyncs{ pReleaseSyncs_ } + , pReleaseKeys{ pReleaseKeys_ } { } @@ -98536,9 +140730,9 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ Win32KeyedMutexAcquireReleaseInfoNV & operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ Win32KeyedMutexAcquireReleaseInfoNV & operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -98546,7 +140740,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -98560,7 +140754,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & - setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT + setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT { pAcquireSyncs = pAcquireSyncs_; return *this; @@ -98593,7 +140787,7 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & - setPAcquireTimeoutMilliseconds( const uint32_t * pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT + setPAcquireTimeoutMilliseconds( const uint32_t * pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT { pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_; return *this; @@ -98616,7 +140810,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & - setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT + setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT { pReleaseSyncs = pReleaseSyncs_; return *this; @@ -98647,7 +140841,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ operator VkWin32KeyedMutexAcquireReleaseInfoNV const &() const VULKAN_HPP_NOEXCEPT { @@ -98726,15 +140920,15 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32SurfaceCreateInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, HINSTANCE hinstance_ = {}, HWND hwnd_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , hinstance( hinstance_ ) - , hwnd( hwnd_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , hinstance{ hinstance_ } + , hwnd{ hwnd_ } { } @@ -98744,9 +140938,9 @@ namespace VULKAN_HPP_NAMESPACE : Win32SurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ Win32SurfaceCreateInfoKHR & operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ Win32SurfaceCreateInfoKHR & operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -98754,7 +140948,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -98778,7 +140972,7 @@ namespace VULKAN_HPP_NAMESPACE hwnd = hwnd_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ operator VkWin32SurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -98839,237 +141033,6 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - struct WriteDescriptorSet - { - using NativeType = VkWriteDescriptorSet; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, - uint32_t dstBinding_ = {}, - uint32_t dstArrayElement_ = {}, - uint32_t descriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, - const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ = {}, - const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dstSet( dstSet_ ) - , dstBinding( dstBinding_ ) - , dstArrayElement( dstArrayElement_ ) - , descriptorCount( descriptorCount_ ) - , descriptorType( descriptorType_ ) - , pImageInfo( pImageInfo_ ) - , pBufferInfo( pBufferInfo_ ) - , pTexelBufferView( pTexelBufferView_ ) - { - } - - VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT : WriteDescriptorSet( *reinterpret_cast( &rhs ) ) {} - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, - uint32_t dstBinding_, - uint32_t dstArrayElement_, - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageInfo_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferInfo_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & texelBufferView_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , dstSet( dstSet_ ) - , dstBinding( dstBinding_ ) - , dstArrayElement( dstArrayElement_ ) - , descriptorCount( static_cast( !imageInfo_.empty() ? imageInfo_.size() - : !bufferInfo_.empty() ? bufferInfo_.size() - : texelBufferView_.size() ) ) - , descriptorType( descriptorType_ ) - , pImageInfo( imageInfo_.data() ) - , pBufferInfo( bufferInfo_.data() ) - , pTexelBufferView( texelBufferView_.data() ) - { -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) <= 1 ); -# else - if ( 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) ) - { - throw LogicError( VULKAN_HPP_NAMESPACE_STRING - "::WriteDescriptorSet::WriteDescriptorSet: 1 < ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() )" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT - { - dstSet = dstSet_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT - { - dstBinding = dstBinding_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT - { - dstArrayElement = dstArrayElement_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT - { - descriptorCount = descriptorCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT - { - descriptorType = descriptorType_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo_ ) VULKAN_HPP_NOEXCEPT - { - pImageInfo = pImageInfo_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - WriteDescriptorSet & - setImageInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & imageInfo_ ) VULKAN_HPP_NOEXCEPT - { - descriptorCount = static_cast( imageInfo_.size() ); - pImageInfo = imageInfo_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ ) VULKAN_HPP_NOEXCEPT - { - pBufferInfo = pBufferInfo_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - WriteDescriptorSet & - setBufferInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT - { - descriptorCount = static_cast( bufferInfo_.size() ); - pBufferInfo = bufferInfo_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT - { - pTexelBufferView = pTexelBufferView_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - WriteDescriptorSet & - setTexelBufferView( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT - { - descriptorCount = static_cast( texelBufferView_.size() ); - pTexelBufferView = texelBufferView_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkWriteDescriptorSet const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, dstSet, dstBinding, dstArrayElement, descriptorCount, descriptorType, pImageInfo, pBufferInfo, pTexelBufferView ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( WriteDescriptorSet const & ) const = default; -#else - bool operator==( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) && - ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) && - ( pImageInfo == rhs.pImageInfo ) && ( pBufferInfo == rhs.pBufferInfo ) && ( pTexelBufferView == rhs.pTexelBufferView ); -# endif - } - - bool operator!=( WriteDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; - uint32_t dstBinding = {}; - uint32_t dstArrayElement = {}; - uint32_t descriptorCount = {}; - VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; - const VULKAN_HPP_NAMESPACE::DescriptorImageInfo * pImageInfo = {}; - const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo = {}; - const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView = {}; - }; - - template <> - struct CppType - { - using Type = WriteDescriptorSet; - }; - struct WriteDescriptorSetAccelerationStructureKHR { using NativeType = VkWriteDescriptorSetAccelerationStructureKHR; @@ -99077,13 +141040,13 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureKHR; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , accelerationStructureCount( accelerationStructureCount_ ) - , pAccelerationStructures( pAccelerationStructures_ ) + : pNext{ pNext_ } + , accelerationStructureCount{ accelerationStructureCount_ } + , pAccelerationStructures{ pAccelerationStructures_ } { } @@ -99104,9 +141067,9 @@ namespace VULKAN_HPP_NAMESPACE { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ WriteDescriptorSetAccelerationStructureKHR & operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ WriteDescriptorSetAccelerationStructureKHR & operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -99114,7 +141077,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -99122,14 +141085,14 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & - setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT { accelerationStructureCount = accelerationStructureCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & - setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT { pAccelerationStructures = pAccelerationStructures_; return *this; @@ -99144,7 +141107,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkWriteDescriptorSetAccelerationStructureKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -99208,13 +141171,13 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureNV; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , accelerationStructureCount( accelerationStructureCount_ ) - , pAccelerationStructures( pAccelerationStructures_ ) + : pNext{ pNext_ } + , accelerationStructureCount{ accelerationStructureCount_ } + , pAccelerationStructures{ pAccelerationStructures_ } { } @@ -99235,9 +141198,9 @@ namespace VULKAN_HPP_NAMESPACE { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ WriteDescriptorSetAccelerationStructureNV & operator=( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ WriteDescriptorSetAccelerationStructureNV & operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -99245,7 +141208,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -99253,14 +141216,14 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & - setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT { accelerationStructureCount = accelerationStructureCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & - setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT { pAccelerationStructures = pAccelerationStructures_; return *this; @@ -99275,7 +141238,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkWriteDescriptorSetAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT { @@ -99339,12 +141302,12 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetInlineUniformBlock; -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock( uint32_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dataSize( dataSize_ ) - , pData( pData_ ) + : pNext{ pNext_ } + , dataSize{ dataSize_ } + , pData{ pData_ } { } @@ -99362,9 +141325,9 @@ namespace VULKAN_HPP_NAMESPACE { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ WriteDescriptorSetInlineUniformBlock & operator=( WriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ WriteDescriptorSetInlineUniformBlock & operator=( VkWriteDescriptorSetInlineUniformBlock const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -99372,7 +141335,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlock & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -99400,7 +141363,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ operator VkWriteDescriptorSetInlineUniformBlock const &() const VULKAN_HPP_NOEXCEPT { @@ -99454,8 +141417,352 @@ namespace VULKAN_HPP_NAMESPACE { using Type = WriteDescriptorSetInlineUniformBlock; }; + using WriteDescriptorSetInlineUniformBlockEXT = WriteDescriptorSetInlineUniformBlock; + struct WriteDescriptorSetPartitionedAccelerationStructureNV + { + using NativeType = VkWriteDescriptorSetPartitionedAccelerationStructureNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetPartitionedAccelerationStructureNV; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteDescriptorSetPartitionedAccelerationStructureNV( uint32_t accelerationStructureCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceAddress * pAccelerationStructures_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , accelerationStructureCount{ accelerationStructureCount_ } + , pAccelerationStructures{ pAccelerationStructures_ } + { + } + + VULKAN_HPP_CONSTEXPR + WriteDescriptorSetPartitionedAccelerationStructureNV( WriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteDescriptorSetPartitionedAccelerationStructureNV( VkWriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteDescriptorSetPartitionedAccelerationStructureNV( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetPartitionedAccelerationStructureNV( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & accelerationStructures_, void * pNext_ = nullptr ) + : pNext( pNext_ ) + , accelerationStructureCount( static_cast( accelerationStructures_.size() ) ) + , pAccelerationStructures( accelerationStructures_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + WriteDescriptorSetPartitionedAccelerationStructureNV & + operator=( WriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + WriteDescriptorSetPartitionedAccelerationStructureNV & operator=( VkWriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetPartitionedAccelerationStructureNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetPartitionedAccelerationStructureNV & + setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = accelerationStructureCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetPartitionedAccelerationStructureNV & + setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::DeviceAddress * pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + pAccelerationStructures = pAccelerationStructures_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + WriteDescriptorSetPartitionedAccelerationStructureNV & setAccelerationStructures( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = static_cast( accelerationStructures_.size() ); + pAccelerationStructures = accelerationStructures_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkWriteDescriptorSetPartitionedAccelerationStructureNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetPartitionedAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, accelerationStructureCount, pAccelerationStructures ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteDescriptorSetPartitionedAccelerationStructureNV const & ) const = default; +#else + bool operator==( WriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructureCount == rhs.accelerationStructureCount ) && + ( pAccelerationStructures == rhs.pAccelerationStructures ); +# endif + } + + bool operator!=( WriteDescriptorSetPartitionedAccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetPartitionedAccelerationStructureNV; + void * pNext = {}; + uint32_t accelerationStructureCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceAddress * pAccelerationStructures = {}; + }; + + template <> + struct CppType + { + using Type = WriteDescriptorSetPartitionedAccelerationStructureNV; + }; + + struct WriteIndirectExecutionSetPipelineEXT + { + using NativeType = VkWriteIndirectExecutionSetPipelineEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteIndirectExecutionSetPipelineEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetPipelineEXT( uint32_t index_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , index{ index_ } + , pipeline{ pipeline_ } + { + } + + VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetPipelineEXT( WriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteIndirectExecutionSetPipelineEXT( VkWriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteIndirectExecutionSetPipelineEXT( *reinterpret_cast( &rhs ) ) + { + } + + WriteIndirectExecutionSetPipelineEXT & operator=( WriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + WriteIndirectExecutionSetPipelineEXT & operator=( VkWriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetPipelineEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetPipelineEXT & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetPipelineEXT & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkWriteIndirectExecutionSetPipelineEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteIndirectExecutionSetPipelineEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, index, pipeline ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteIndirectExecutionSetPipelineEXT const & ) const = default; +#else + bool operator==( WriteIndirectExecutionSetPipelineEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( index == rhs.index ) && ( pipeline == rhs.pipeline ); +# endif + } + + bool operator!=( WriteIndirectExecutionSetPipelineEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteIndirectExecutionSetPipelineEXT; + const void * pNext = {}; + uint32_t index = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + }; + + template <> + struct CppType + { + using Type = WriteIndirectExecutionSetPipelineEXT; + }; + + struct WriteIndirectExecutionSetShaderEXT + { + using NativeType = VkWriteIndirectExecutionSetShaderEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteIndirectExecutionSetShaderEXT; + +#if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetShaderEXT( uint32_t index_ = {}, + VULKAN_HPP_NAMESPACE::ShaderEXT shader_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , index{ index_ } + , shader{ shader_ } + { + } + + VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetShaderEXT( WriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteIndirectExecutionSetShaderEXT( VkWriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteIndirectExecutionSetShaderEXT( *reinterpret_cast( &rhs ) ) + { + } + + WriteIndirectExecutionSetShaderEXT & operator=( WriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ + + WriteIndirectExecutionSetShaderEXT & operator=( VkWriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetShaderEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetShaderEXT & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetShaderEXT & setShader( VULKAN_HPP_NAMESPACE::ShaderEXT shader_ ) VULKAN_HPP_NOEXCEPT + { + shader = shader_; + return *this; + } +#endif /*VULKAN_HPP_NO_SETTERS*/ + + operator VkWriteIndirectExecutionSetShaderEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteIndirectExecutionSetShaderEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, index, shader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteIndirectExecutionSetShaderEXT const & ) const = default; +#else + bool operator==( WriteIndirectExecutionSetShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( index == rhs.index ) && ( shader == rhs.shader ); +# endif + } + + bool operator!=( WriteIndirectExecutionSetShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteIndirectExecutionSetShaderEXT; + const void * pNext = {}; + uint32_t index = {}; + VULKAN_HPP_NAMESPACE::ShaderEXT shader = {}; + }; + + template <> + struct CppType + { + using Type = WriteIndirectExecutionSetShaderEXT; + }; + #if defined( VK_USE_PLATFORM_XCB_KHR ) struct XcbSurfaceCreateInfoKHR { @@ -99464,15 +141771,15 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXcbSurfaceCreateInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, xcb_connection_t * connection_ = {}, xcb_window_t window_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , connection( connection_ ) - , window( window_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , connection{ connection_ } + , window{ window_ } { } @@ -99482,9 +141789,9 @@ namespace VULKAN_HPP_NAMESPACE : XcbSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ XcbSurfaceCreateInfoKHR & operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ XcbSurfaceCreateInfoKHR & operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -99492,7 +141799,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -99516,7 +141823,7 @@ namespace VULKAN_HPP_NAMESPACE window = window_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ operator VkXcbSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -99596,15 +141903,15 @@ namespace VULKAN_HPP_NAMESPACE static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXlibSurfaceCreateInfoKHR; -# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) +# if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {}, Display * dpy_ = {}, Window window_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , dpy( dpy_ ) - , window( window_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , dpy{ dpy_ } + , window{ window_ } { } @@ -99614,9 +141921,9 @@ namespace VULKAN_HPP_NAMESPACE : XlibSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } -# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ XlibSurfaceCreateInfoKHR & operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ XlibSurfaceCreateInfoKHR & operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -99624,7 +141931,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) +# if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; @@ -99648,7 +141955,7 @@ namespace VULKAN_HPP_NAMESPACE window = window_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_SETTERS*/ operator VkXlibSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { diff --git a/src/libraries/vulkanheaders/vulkan_to_string.hpp b/src/libraries/vulkanheaders/vulkan_to_string.hpp index a38848c70..22b948ee6 100644 --- a/src/libraries/vulkanheaders/vulkan_to_string.hpp +++ b/src/libraries/vulkanheaders/vulkan_to_string.hpp @@ -1,4 +1,4 @@ -// Copyright 2015-2022 The Khronos Group Inc. +// Copyright 2015-2025 The Khronos Group Inc. // // SPDX-License-Identifier: Apache-2.0 OR MIT // @@ -8,16 +8,30 @@ #ifndef VULKAN_TO_STRING_HPP #define VULKAN_TO_STRING_HPP -#include +#include -#if __cpp_lib_format -# include // std::format +// ignore warnings on using deprecated enum values in this header +#if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wdeprecated-declarations" +#elif defined( _MSC_VER ) +# pragma warning( push ) +# pragma warning( disable : 4996 ) +#endif + +#if defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) +import VULKAN_HPP_STD_MODULE; #else -# include // std::stringstream +# if __cpp_lib_format +# include // std::format +# else +# include // std::stringstream +# endif #endif namespace VULKAN_HPP_NAMESPACE { + //========================== //=== BITMASKs to_string === //========================== @@ -76,12 +90,10 @@ namespace VULKAN_HPP_NAMESPACE result += "CositedChromaSamples | "; if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax ) result += "SampledImageFilterMinmax | "; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) if ( value & FormatFeatureFlagBits::eVideoDecodeOutputKHR ) result += "VideoDecodeOutputKHR | "; if ( value & FormatFeatureFlagBits::eVideoDecodeDpbKHR ) result += "VideoDecodeDpbKHR | "; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) result += "AccelerationStructureVertexBufferKHR | "; if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicEXT ) @@ -90,12 +102,10 @@ namespace VULKAN_HPP_NAMESPACE result += "FragmentDensityMapEXT | "; if ( value & FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR ) result += "FragmentShadingRateAttachmentKHR | "; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) if ( value & FormatFeatureFlagBits::eVideoEncodeInputKHR ) result += "VideoEncodeInputKHR | "; if ( value & FormatFeatureFlagBits::eVideoEncodeDpbKHR ) result += "VideoEncodeDpbKHR | "; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -136,12 +146,16 @@ namespace VULKAN_HPP_NAMESPACE result += "SampleLocationsCompatibleDepthEXT | "; if ( value & ImageCreateFlagBits::eSubsampledEXT ) result += "SubsampledEXT | "; + if ( value & ImageCreateFlagBits::eDescriptorBufferCaptureReplayEXT ) + result += "DescriptorBufferCaptureReplayEXT | "; if ( value & ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT ) result += "MultisampledRenderToSingleSampledEXT | "; if ( value & ImageCreateFlagBits::e2DViewCompatibleEXT ) result += "2DViewCompatibleEXT | "; if ( value & ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM ) result += "FragmentDensityMapOffsetQCOM | "; + if ( value & ImageCreateFlagBits::eVideoProfileIndependentKHR ) + result += "VideoProfileIndependentKHR | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -168,26 +182,24 @@ namespace VULKAN_HPP_NAMESPACE result += "TransientAttachment | "; if ( value & ImageUsageFlagBits::eInputAttachment ) result += "InputAttachment | "; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & ImageUsageFlagBits::eHostTransfer ) + result += "HostTransfer | "; if ( value & ImageUsageFlagBits::eVideoDecodeDstKHR ) result += "VideoDecodeDstKHR | "; if ( value & ImageUsageFlagBits::eVideoDecodeSrcKHR ) result += "VideoDecodeSrcKHR | "; if ( value & ImageUsageFlagBits::eVideoDecodeDpbKHR ) result += "VideoDecodeDpbKHR | "; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | "; if ( value & ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR ) result += "FragmentShadingRateAttachmentKHR | "; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) if ( value & ImageUsageFlagBits::eVideoEncodeDstKHR ) result += "VideoEncodeDstKHR | "; if ( value & ImageUsageFlagBits::eVideoEncodeSrcKHR ) result += "VideoEncodeSrcKHR | "; if ( value & ImageUsageFlagBits::eVideoEncodeDpbKHR ) result += "VideoEncodeDpbKHR | "; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ if ( value & ImageUsageFlagBits::eAttachmentFeedbackLoopEXT ) result += "AttachmentFeedbackLoopEXT | "; if ( value & ImageUsageFlagBits::eInvocationMaskHUAWEI ) @@ -196,6 +208,10 @@ namespace VULKAN_HPP_NAMESPACE result += "SampleWeightQCOM | "; if ( value & ImageUsageFlagBits::eSampleBlockMatchQCOM ) result += "SampleBlockMatchQCOM | "; + if ( value & ImageUsageFlagBits::eVideoEncodeQuantizationDeltaMapKHR ) + result += "VideoEncodeQuantizationDeltaMapKHR | "; + if ( value & ImageUsageFlagBits::eVideoEncodeEmphasisMapKHR ) + result += "VideoEncodeEmphasisMapKHR | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -270,12 +286,12 @@ namespace VULKAN_HPP_NAMESPACE result += "SparseBinding | "; if ( value & QueueFlagBits::eProtected ) result += "Protected | "; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) if ( value & QueueFlagBits::eVideoDecodeKHR ) result += "VideoDecodeKHR | "; if ( value & QueueFlagBits::eVideoEncodeKHR ) result += "VideoEncodeKHR | "; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & QueueFlagBits::eOpticalFlowNV ) + result += "OpticalFlowNV | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -324,7 +340,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value ) { if ( !value ) - return "{}"; + return "None"; std::string result; if ( value & PipelineStageFlagBits::eTopOfPipe ) @@ -383,15 +399,22 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags ) + VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags value ) { - return "{}"; + if ( !value ) + return "{}"; + + std::string result; + if ( value & MemoryMapFlagBits::ePlacedEXT ) + result += "PlacedEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value ) { if ( !value ) - return "{}"; + return "None"; std::string result; if ( value & ImageAspectFlagBits::eColor ) @@ -509,6 +532,8 @@ namespace VULKAN_HPP_NAMESPACE result += "TaskShaderInvocationsEXT | "; if ( value & QueryPipelineStatisticFlagBits::eMeshShaderInvocationsEXT ) result += "MeshShaderInvocationsEXT | "; + if ( value & QueryPipelineStatisticFlagBits::eClusterCullingShaderInvocationsHUAWEI ) + result += "ClusterCullingShaderInvocationsHUAWEI | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -532,10 +557,8 @@ namespace VULKAN_HPP_NAMESPACE result += "WithAvailability | "; if ( value & QueryResultFlagBits::ePartial ) result += "Partial | "; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) if ( value & QueryResultFlagBits::eWithStatusKHR ) result += "WithStatusKHR | "; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -556,6 +579,10 @@ namespace VULKAN_HPP_NAMESPACE result += "Protected | "; if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplay ) result += "DeviceAddressCaptureReplay | "; + if ( value & BufferCreateFlagBits::eDescriptorBufferCaptureReplayEXT ) + result += "DescriptorBufferCaptureReplayEXT | "; + if ( value & BufferCreateFlagBits::eVideoProfileIndependentKHR ) + result += "VideoProfileIndependentKHR | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -586,30 +613,40 @@ namespace VULKAN_HPP_NAMESPACE result += "IndirectBuffer | "; if ( value & BufferUsageFlagBits::eShaderDeviceAddress ) result += "ShaderDeviceAddress | "; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) if ( value & BufferUsageFlagBits::eVideoDecodeSrcKHR ) result += "VideoDecodeSrcKHR | "; if ( value & BufferUsageFlagBits::eVideoDecodeDstKHR ) result += "VideoDecodeDstKHR | "; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) result += "TransformFeedbackBufferEXT | "; if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) result += "TransformFeedbackCounterBufferEXT | "; if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits::eExecutionGraphScratchAMDX ) + result += "ExecutionGraphScratchAMDX | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ if ( value & BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR ) result += "AccelerationStructureBuildInputReadOnlyKHR | "; if ( value & BufferUsageFlagBits::eAccelerationStructureStorageKHR ) result += "AccelerationStructureStorageKHR | "; if ( value & BufferUsageFlagBits::eShaderBindingTableKHR ) result += "ShaderBindingTableKHR | "; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) if ( value & BufferUsageFlagBits::eVideoEncodeDstKHR ) result += "VideoEncodeDstKHR | "; if ( value & BufferUsageFlagBits::eVideoEncodeSrcKHR ) result += "VideoEncodeSrcKHR | "; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BufferUsageFlagBits::eSamplerDescriptorBufferEXT ) + result += "SamplerDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits::eResourceDescriptorBufferEXT ) + result += "ResourceDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits::ePushDescriptorsDescriptorBufferEXT ) + result += "PushDescriptorsDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT ) + result += "MicromapBuildInputReadOnlyEXT | "; + if ( value & BufferUsageFlagBits::eMicromapStorageEXT ) + result += "MicromapStorageEXT | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -627,6 +664,8 @@ namespace VULKAN_HPP_NAMESPACE std::string result; if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) result += "FragmentDensityMapDynamicEXT | "; + if ( value & ImageViewCreateFlagBits::eDescriptorBufferCaptureReplayEXT ) + result += "DescriptorBufferCaptureReplayEXT | "; if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT ) result += "FragmentDensityMapDeferredEXT | "; @@ -646,6 +685,8 @@ namespace VULKAN_HPP_NAMESPACE std::string result; if ( value & PipelineCacheCreateFlagBits::eExternallySynchronized ) result += "ExternallySynchronized | "; + if ( value & PipelineCacheCreateFlagBits::eInternallySynchronizedMergeKHR ) + result += "InternallySynchronizedMergeKHR | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -671,7 +712,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( CullModeFlags value ) { if ( !value ) - return "{}"; + return "None"; std::string result; if ( value & CullModeFlagBits::eFront ) @@ -714,10 +755,10 @@ namespace VULKAN_HPP_NAMESPACE result += "FailOnPipelineCompileRequired | "; if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailure ) result += "EarlyReturnOnFailure | "; - if ( value & PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR ) - result += "RenderingFragmentShadingRateAttachmentKHR | "; - if ( value & PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT ) - result += "RenderingFragmentDensityMapAttachmentEXT | "; + if ( value & PipelineCreateFlagBits::eNoProtectedAccess ) + result += "NoProtectedAccess | "; + if ( value & PipelineCreateFlagBits::eProtectedAccessOnly ) + result += "ProtectedAccessOnly | "; if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) result += "RayTracingNoNullAnyHitShadersKHR | "; if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) @@ -734,6 +775,10 @@ namespace VULKAN_HPP_NAMESPACE result += "RayTracingShaderGroupHandleCaptureReplayKHR | "; if ( value & PipelineCreateFlagBits::eDeferCompileNV ) result += "DeferCompileNV | "; + if ( value & PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT ) + result += "RenderingFragmentDensityMapAttachmentEXT | "; + if ( value & PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR ) + result += "RenderingFragmentShadingRateAttachmentKHR | "; if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) result += "CaptureStatisticsKHR | "; if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) @@ -742,6 +787,8 @@ namespace VULKAN_HPP_NAMESPACE result += "IndirectBindableNV | "; if ( value & PipelineCreateFlagBits::eLibraryKHR ) result += "LibraryKHR | "; + if ( value & PipelineCreateFlagBits::eDescriptorBufferEXT ) + result += "DescriptorBufferEXT | "; if ( value & PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT ) result += "RetainLinkTimeOptimizationInfoEXT | "; if ( value & PipelineCreateFlagBits::eLinkTimeOptimizationEXT ) @@ -752,6 +799,12 @@ namespace VULKAN_HPP_NAMESPACE result += "ColorAttachmentFeedbackLoopEXT | "; if ( value & PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT ) result += "DepthStencilAttachmentFeedbackLoopEXT | "; + if ( value & PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT ) + result += "RayTracingOpacityMicromapEXT | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & PipelineCreateFlagBits::eRayTracingDisplacementMicromapNV ) + result += "RayTracingDisplacementMicromapNV | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -867,6 +920,8 @@ namespace VULKAN_HPP_NAMESPACE result += "MeshEXT | "; if ( value & ShaderStageFlagBits::eSubpassShadingHUAWEI ) result += "SubpassShadingHUAWEI | "; + if ( value & ShaderStageFlagBits::eClusterCullingHUAWEI ) + result += "ClusterCullingHUAWEI | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -881,6 +936,8 @@ namespace VULKAN_HPP_NAMESPACE result += "SubsampledEXT | "; if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) result += "SubsampledCoarseReconstructionEXT | "; + if ( value & SamplerCreateFlagBits::eDescriptorBufferCaptureReplayEXT ) + result += "DescriptorBufferCaptureReplayEXT | "; if ( value & SamplerCreateFlagBits::eNonSeamlessCubeMapEXT ) result += "NonSeamlessCubeMapEXT | "; if ( value & SamplerCreateFlagBits::eImageProcessingQCOM ) @@ -901,6 +958,10 @@ namespace VULKAN_HPP_NAMESPACE result += "UpdateAfterBind | "; if ( value & DescriptorPoolCreateFlagBits::eHostOnlyEXT ) result += "HostOnlyEXT | "; + if ( value & DescriptorPoolCreateFlagBits::eAllowOverallocationSetsNV ) + result += "AllowOverallocationSetsNV | "; + if ( value & DescriptorPoolCreateFlagBits::eAllowOverallocationPoolsNV ) + result += "AllowOverallocationPoolsNV | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -918,10 +979,18 @@ namespace VULKAN_HPP_NAMESPACE std::string result; if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) result += "UpdateAfterBindPool | "; - if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) - result += "PushDescriptorKHR | "; + if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptor ) + result += "PushDescriptor | "; + if ( value & DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT ) + result += "DescriptorBufferEXT | "; + if ( value & DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT ) + result += "EmbeddedImmutableSamplersEXT | "; + if ( value & DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV ) + result += "IndirectBindableNV | "; if ( value & DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT ) result += "HostOnlyPoolEXT | "; + if ( value & DescriptorSetLayoutCreateFlagBits::ePerStageNV ) + result += "PerStageNV | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -929,7 +998,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( AccessFlags value ) { if ( !value ) - return "{}"; + return "None"; std::string result; if ( value & AccessFlagBits::eIndirectCommandRead ) @@ -1018,6 +1087,8 @@ namespace VULKAN_HPP_NAMESPACE result += "ViewLocal | "; if ( value & DependencyFlagBits::eFeedbackLoopEXT ) result += "FeedbackLoopEXT | "; + if ( value & DependencyFlagBits::eQueueFamilyOwnershipTransferUseAllStagesKHR ) + result += "QueueFamilyOwnershipTransferUseAllStagesKHR | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -1178,6 +1249,10 @@ namespace VULKAN_HPP_NAMESPACE result += "Clustered | "; if ( value & SubgroupFeatureFlagBits::eQuad ) result += "Quad | "; + if ( value & SubgroupFeatureFlagBits::eRotate ) + result += "Rotate | "; + if ( value & SubgroupFeatureFlagBits::eRotateClustered ) + result += "RotateClustered | "; if ( value & SubgroupFeatureFlagBits::ePartitionedNV ) result += "PartitionedNV | "; @@ -1264,6 +1339,18 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_FUCHSIA*/ if ( value & ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV ) result += "RdmaAddressNV | "; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + if ( value & ExternalMemoryHandleTypeFlagBits::eScreenBufferQNX ) + result += "ScreenBufferQNX | "; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + if ( value & ExternalMemoryHandleTypeFlagBits::eMtlbufferEXT ) + result += "MtlbufferEXT | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eMtltextureEXT ) + result += "MtltextureEXT | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eMtlheapEXT ) + result += "MtlheapEXT | "; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -1401,7 +1488,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value ) { if ( !value ) - return "{}"; + return "None"; std::string result; if ( value & ResolveModeFlagBits::eSampleZero ) @@ -1412,6 +1499,10 @@ namespace VULKAN_HPP_NAMESPACE result += "Min | "; if ( value & ResolveModeFlagBits::eMax ) result += "Max | "; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + if ( value & ResolveModeFlagBits::eExternalFormatDownsampleANDROID ) + result += "ExternalFormatDownsampleANDROID | "; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -1478,7 +1569,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags2 value ) { if ( !value ) - return "{}"; + return "None"; std::string result; if ( value & PipelineStageFlagBits2::eTopOfPipe ) @@ -1529,12 +1620,10 @@ namespace VULKAN_HPP_NAMESPACE result += "VertexAttributeInput | "; if ( value & PipelineStageFlagBits2::ePreRasterizationShaders ) result += "PreRasterizationShaders | "; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) if ( value & PipelineStageFlagBits2::eVideoDecodeKHR ) result += "VideoDecodeKHR | "; if ( value & PipelineStageFlagBits2::eVideoEncodeKHR ) result += "VideoEncodeKHR | "; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ if ( value & PipelineStageFlagBits2::eTransformFeedbackEXT ) result += "TransformFeedbackEXT | "; if ( value & PipelineStageFlagBits2::eConditionalRenderingEXT ) @@ -1553,12 +1642,20 @@ namespace VULKAN_HPP_NAMESPACE result += "TaskShaderEXT | "; if ( value & PipelineStageFlagBits2::eMeshShaderEXT ) result += "MeshShaderEXT | "; - if ( value & PipelineStageFlagBits2::eSubpassShadingHUAWEI ) - result += "SubpassShadingHUAWEI | "; + if ( value & PipelineStageFlagBits2::eSubpassShaderHUAWEI ) + result += "SubpassShaderHUAWEI | "; if ( value & PipelineStageFlagBits2::eInvocationMaskHUAWEI ) result += "InvocationMaskHUAWEI | "; if ( value & PipelineStageFlagBits2::eAccelerationStructureCopyKHR ) result += "AccelerationStructureCopyKHR | "; + if ( value & PipelineStageFlagBits2::eMicromapBuildEXT ) + result += "MicromapBuildEXT | "; + if ( value & PipelineStageFlagBits2::eClusterCullingShaderHUAWEI ) + result += "ClusterCullingShaderHUAWEI | "; + if ( value & PipelineStageFlagBits2::eOpticalFlowNV ) + result += "OpticalFlowNV | "; + if ( value & PipelineStageFlagBits2::eConvertCooperativeVectorMatrixNV ) + result += "ConvertCooperativeVectorMatrixNV | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -1566,7 +1663,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( AccessFlags2 value ) { if ( !value ) - return "{}"; + return "None"; std::string result; if ( value & AccessFlagBits2::eIndirectCommandRead ) @@ -1609,7 +1706,6 @@ namespace VULKAN_HPP_NAMESPACE result += "ShaderStorageRead | "; if ( value & AccessFlagBits2::eShaderStorageWrite ) result += "ShaderStorageWrite | "; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) if ( value & AccessFlagBits2::eVideoDecodeReadKHR ) result += "VideoDecodeReadKHR | "; if ( value & AccessFlagBits2::eVideoDecodeWriteKHR ) @@ -1618,7 +1714,6 @@ namespace VULKAN_HPP_NAMESPACE result += "VideoEncodeReadKHR | "; if ( value & AccessFlagBits2::eVideoEncodeWriteKHR ) result += "VideoEncodeWriteKHR | "; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ if ( value & AccessFlagBits2::eTransformFeedbackWriteEXT ) result += "TransformFeedbackWriteEXT | "; if ( value & AccessFlagBits2::eTransformFeedbackCounterReadEXT ) @@ -1641,10 +1736,20 @@ namespace VULKAN_HPP_NAMESPACE result += "FragmentDensityMapReadEXT | "; if ( value & AccessFlagBits2::eColorAttachmentReadNoncoherentEXT ) result += "ColorAttachmentReadNoncoherentEXT | "; + if ( value & AccessFlagBits2::eDescriptorBufferReadEXT ) + result += "DescriptorBufferReadEXT | "; if ( value & AccessFlagBits2::eInvocationMaskReadHUAWEI ) result += "InvocationMaskReadHUAWEI | "; if ( value & AccessFlagBits2::eShaderBindingTableReadKHR ) result += "ShaderBindingTableReadKHR | "; + if ( value & AccessFlagBits2::eMicromapReadEXT ) + result += "MicromapReadEXT | "; + if ( value & AccessFlagBits2::eMicromapWriteEXT ) + result += "MicromapWriteEXT | "; + if ( value & AccessFlagBits2::eOpticalFlowReadNV ) + result += "OpticalFlowReadNV | "; + if ( value & AccessFlagBits2::eOpticalFlowWriteNV ) + result += "OpticalFlowWriteNV | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -1675,6 +1780,8 @@ namespace VULKAN_HPP_NAMESPACE result += "Resuming | "; if ( value & RenderingFlagBits::eEnableLegacyDitheringEXT ) result += "EnableLegacyDitheringEXT | "; + if ( value & RenderingFlagBits::eContentsInlineKHR ) + result += "ContentsInlineKHR | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -1711,8 +1818,6 @@ namespace VULKAN_HPP_NAMESPACE result += "BlitDst | "; if ( value & FormatFeatureFlagBits2::eSampledImageFilterLinear ) result += "SampledImageFilterLinear | "; - if ( value & FormatFeatureFlagBits2::eSampledImageFilterCubic ) - result += "SampledImageFilterCubic | "; if ( value & FormatFeatureFlagBits2::eTransferSrc ) result += "TransferSrc | "; if ( value & FormatFeatureFlagBits2::eTransferDst ) @@ -1739,24 +1844,26 @@ namespace VULKAN_HPP_NAMESPACE result += "StorageWriteWithoutFormat | "; if ( value & FormatFeatureFlagBits2::eSampledImageDepthComparison ) result += "SampledImageDepthComparison | "; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & FormatFeatureFlagBits2::eSampledImageFilterCubic ) + result += "SampledImageFilterCubic | "; + if ( value & FormatFeatureFlagBits2::eHostImageTransfer ) + result += "HostImageTransfer | "; if ( value & FormatFeatureFlagBits2::eVideoDecodeOutputKHR ) result += "VideoDecodeOutputKHR | "; if ( value & FormatFeatureFlagBits2::eVideoDecodeDpbKHR ) result += "VideoDecodeDpbKHR | "; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ if ( value & FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR ) result += "AccelerationStructureVertexBufferKHR | "; if ( value & FormatFeatureFlagBits2::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | "; if ( value & FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR ) result += "FragmentShadingRateAttachmentKHR | "; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) if ( value & FormatFeatureFlagBits2::eVideoEncodeInputKHR ) result += "VideoEncodeInputKHR | "; if ( value & FormatFeatureFlagBits2::eVideoEncodeDpbKHR ) result += "VideoEncodeDpbKHR | "; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & FormatFeatureFlagBits2::eAccelerationStructureRadiusBufferNV ) + result += "AccelerationStructureRadiusBufferNV | "; if ( value & FormatFeatureFlagBits2::eLinearColorAttachmentNV ) result += "LinearColorAttachmentNV | "; if ( value & FormatFeatureFlagBits2::eWeightImageQCOM ) @@ -1767,6 +1874,194 @@ namespace VULKAN_HPP_NAMESPACE result += "BlockMatchingQCOM | "; if ( value & FormatFeatureFlagBits2::eBoxFilterSampledQCOM ) result += "BoxFilterSampledQCOM | "; + if ( value & FormatFeatureFlagBits2::eOpticalFlowImageNV ) + result += "OpticalFlowImageNV | "; + if ( value & FormatFeatureFlagBits2::eOpticalFlowVectorNV ) + result += "OpticalFlowVectorNV | "; + if ( value & FormatFeatureFlagBits2::eOpticalFlowCostNV ) + result += "OpticalFlowCostNV | "; + if ( value & FormatFeatureFlagBits2::eVideoEncodeQuantizationDeltaMapKHR ) + result += "VideoEncodeQuantizationDeltaMapKHR | "; + if ( value & FormatFeatureFlagBits2::eVideoEncodeEmphasisMapKHR ) + result += "VideoEncodeEmphasisMapKHR | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_VERSION_1_4 === + + VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & MemoryUnmapFlagBits::eReserveEXT ) + result += "ReserveEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags2 value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineCreateFlagBits2::eDisableOptimization ) + result += "DisableOptimization | "; + if ( value & PipelineCreateFlagBits2::eAllowDerivatives ) + result += "AllowDerivatives | "; + if ( value & PipelineCreateFlagBits2::eDerivative ) + result += "Derivative | "; + if ( value & PipelineCreateFlagBits2::eViewIndexFromDeviceIndex ) + result += "ViewIndexFromDeviceIndex | "; + if ( value & PipelineCreateFlagBits2::eDispatchBase ) + result += "DispatchBase | "; + if ( value & PipelineCreateFlagBits2::eFailOnPipelineCompileRequired ) + result += "FailOnPipelineCompileRequired | "; + if ( value & PipelineCreateFlagBits2::eEarlyReturnOnFailure ) + result += "EarlyReturnOnFailure | "; + if ( value & PipelineCreateFlagBits2::eNoProtectedAccess ) + result += "NoProtectedAccess | "; + if ( value & PipelineCreateFlagBits2::eProtectedAccessOnly ) + result += "ProtectedAccessOnly | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & PipelineCreateFlagBits2::eExecutionGraphAMDX ) + result += "ExecutionGraphAMDX | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & PipelineCreateFlagBits2::eRayTracingAllowSpheresAndLinearSweptSpheresNV ) + result += "RayTracingAllowSpheresAndLinearSweptSpheresNV | "; + if ( value & PipelineCreateFlagBits2::eEnableLegacyDitheringEXT ) + result += "EnableLegacyDitheringEXT | "; + if ( value & PipelineCreateFlagBits2::eDeferCompileNV ) + result += "DeferCompileNV | "; + if ( value & PipelineCreateFlagBits2::eCaptureStatisticsKHR ) + result += "CaptureStatisticsKHR | "; + if ( value & PipelineCreateFlagBits2::eCaptureInternalRepresentationsKHR ) + result += "CaptureInternalRepresentationsKHR | "; + if ( value & PipelineCreateFlagBits2::eLinkTimeOptimizationEXT ) + result += "LinkTimeOptimizationEXT | "; + if ( value & PipelineCreateFlagBits2::eRetainLinkTimeOptimizationInfoEXT ) + result += "RetainLinkTimeOptimizationInfoEXT | "; + if ( value & PipelineCreateFlagBits2::eLibraryKHR ) + result += "LibraryKHR | "; + if ( value & PipelineCreateFlagBits2::eRayTracingSkipTrianglesKHR ) + result += "RayTracingSkipTrianglesKHR | "; + if ( value & PipelineCreateFlagBits2::eRayTracingSkipAabbsKHR ) + result += "RayTracingSkipAabbsKHR | "; + if ( value & PipelineCreateFlagBits2::eRayTracingNoNullAnyHitShadersKHR ) + result += "RayTracingNoNullAnyHitShadersKHR | "; + if ( value & PipelineCreateFlagBits2::eRayTracingNoNullClosestHitShadersKHR ) + result += "RayTracingNoNullClosestHitShadersKHR | "; + if ( value & PipelineCreateFlagBits2::eRayTracingNoNullMissShadersKHR ) + result += "RayTracingNoNullMissShadersKHR | "; + if ( value & PipelineCreateFlagBits2::eRayTracingNoNullIntersectionShadersKHR ) + result += "RayTracingNoNullIntersectionShadersKHR | "; + if ( value & PipelineCreateFlagBits2::eRayTracingShaderGroupHandleCaptureReplayKHR ) + result += "RayTracingShaderGroupHandleCaptureReplayKHR | "; + if ( value & PipelineCreateFlagBits2::eIndirectBindableNV ) + result += "IndirectBindableNV | "; + if ( value & PipelineCreateFlagBits2::eRayTracingAllowMotionNV ) + result += "RayTracingAllowMotionNV | "; + if ( value & PipelineCreateFlagBits2::eRenderingFragmentShadingRateAttachmentKHR ) + result += "RenderingFragmentShadingRateAttachmentKHR | "; + if ( value & PipelineCreateFlagBits2::eRenderingFragmentDensityMapAttachmentEXT ) + result += "RenderingFragmentDensityMapAttachmentEXT | "; + if ( value & PipelineCreateFlagBits2::eRayTracingOpacityMicromapEXT ) + result += "RayTracingOpacityMicromapEXT | "; + if ( value & PipelineCreateFlagBits2::eColorAttachmentFeedbackLoopEXT ) + result += "ColorAttachmentFeedbackLoopEXT | "; + if ( value & PipelineCreateFlagBits2::eDepthStencilAttachmentFeedbackLoopEXT ) + result += "DepthStencilAttachmentFeedbackLoopEXT | "; + if ( value & PipelineCreateFlagBits2::eRayTracingDisplacementMicromapNV ) + result += "RayTracingDisplacementMicromapNV | "; + if ( value & PipelineCreateFlagBits2::eDescriptorBufferEXT ) + result += "DescriptorBufferEXT | "; + if ( value & PipelineCreateFlagBits2::eDisallowOpacityMicromapARM ) + result += "DisallowOpacityMicromapARM | "; + if ( value & PipelineCreateFlagBits2::eCaptureDataKHR ) + result += "CaptureDataKHR | "; + if ( value & PipelineCreateFlagBits2::eIndirectBindableEXT ) + result += "IndirectBindableEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags2 value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & BufferUsageFlagBits2::eTransferSrc ) + result += "TransferSrc | "; + if ( value & BufferUsageFlagBits2::eTransferDst ) + result += "TransferDst | "; + if ( value & BufferUsageFlagBits2::eUniformTexelBuffer ) + result += "UniformTexelBuffer | "; + if ( value & BufferUsageFlagBits2::eStorageTexelBuffer ) + result += "StorageTexelBuffer | "; + if ( value & BufferUsageFlagBits2::eUniformBuffer ) + result += "UniformBuffer | "; + if ( value & BufferUsageFlagBits2::eStorageBuffer ) + result += "StorageBuffer | "; + if ( value & BufferUsageFlagBits2::eIndexBuffer ) + result += "IndexBuffer | "; + if ( value & BufferUsageFlagBits2::eVertexBuffer ) + result += "VertexBuffer | "; + if ( value & BufferUsageFlagBits2::eIndirectBuffer ) + result += "IndirectBuffer | "; + if ( value & BufferUsageFlagBits2::eShaderDeviceAddress ) + result += "ShaderDeviceAddress | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits2::eExecutionGraphScratchAMDX ) + result += "ExecutionGraphScratchAMDX | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BufferUsageFlagBits2::eConditionalRenderingEXT ) + result += "ConditionalRenderingEXT | "; + if ( value & BufferUsageFlagBits2::eShaderBindingTableKHR ) + result += "ShaderBindingTableKHR | "; + if ( value & BufferUsageFlagBits2::eTransformFeedbackBufferEXT ) + result += "TransformFeedbackBufferEXT | "; + if ( value & BufferUsageFlagBits2::eTransformFeedbackCounterBufferEXT ) + result += "TransformFeedbackCounterBufferEXT | "; + if ( value & BufferUsageFlagBits2::eVideoDecodeSrcKHR ) + result += "VideoDecodeSrcKHR | "; + if ( value & BufferUsageFlagBits2::eVideoDecodeDstKHR ) + result += "VideoDecodeDstKHR | "; + if ( value & BufferUsageFlagBits2::eVideoEncodeDstKHR ) + result += "VideoEncodeDstKHR | "; + if ( value & BufferUsageFlagBits2::eVideoEncodeSrcKHR ) + result += "VideoEncodeSrcKHR | "; + if ( value & BufferUsageFlagBits2::eAccelerationStructureBuildInputReadOnlyKHR ) + result += "AccelerationStructureBuildInputReadOnlyKHR | "; + if ( value & BufferUsageFlagBits2::eAccelerationStructureStorageKHR ) + result += "AccelerationStructureStorageKHR | "; + if ( value & BufferUsageFlagBits2::eSamplerDescriptorBufferEXT ) + result += "SamplerDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits2::eResourceDescriptorBufferEXT ) + result += "ResourceDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits2::ePushDescriptorsDescriptorBufferEXT ) + result += "PushDescriptorsDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits2::eMicromapBuildInputReadOnlyEXT ) + result += "MicromapBuildInputReadOnlyEXT | "; + if ( value & BufferUsageFlagBits2::eMicromapStorageEXT ) + result += "MicromapStorageEXT | "; + if ( value & BufferUsageFlagBits2::ePreprocessBufferEXT ) + result += "PreprocessBufferEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( HostImageCopyFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & HostImageCopyFlagBits::eMemcpy ) + result += "Memcpy | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -1805,6 +2100,8 @@ namespace VULKAN_HPP_NAMESPACE result += "Protected | "; if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) result += "MutableFormat | "; + if ( value & SwapchainCreateFlagBitsKHR::eDeferredMemoryAllocationEXT ) + result += "DeferredMemoryAllocationEXT | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -1952,25 +2249,26 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_queue === VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagsKHR value ) { if ( !value ) - return "{}"; + return "None"; std::string result; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH264EXT ) - result += "EncodeH264EXT | "; - if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH265EXT ) - result += "EncodeH265EXT | "; - if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH264EXT ) - result += "DecodeH264EXT | "; - if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH265EXT ) - result += "DecodeH265EXT | "; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH264 ) + result += "EncodeH264 | "; + if ( value & VideoCodecOperationFlagBitsKHR::eEncodeH265 ) + result += "EncodeH265 | "; + if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH264 ) + result += "DecodeH264 | "; + if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH265 ) + result += "DecodeH265 | "; + if ( value & VideoCodecOperationFlagBitsKHR::eDecodeAv1 ) + result += "DecodeAv1 | "; + if ( value & VideoCodecOperationFlagBitsKHR::eEncodeAv1 ) + result += "EncodeAv1 | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -1978,7 +2276,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( VideoChromaSubsamplingFlagsKHR value ) { if ( !value ) - return "{}"; + return "Invalid"; std::string result; if ( value & VideoChromaSubsamplingFlagBitsKHR::eMonochrome ) @@ -1996,7 +2294,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( VideoComponentBitDepthFlagsKHR value ) { if ( !value ) - return "{}"; + return "Invalid"; std::string result; if ( value & VideoComponentBitDepthFlagBitsKHR::e8 ) @@ -2031,13 +2329,30 @@ namespace VULKAN_HPP_NAMESPACE std::string result; if ( value & VideoSessionCreateFlagBitsKHR::eProtectedContent ) result += "ProtectedContent | "; + if ( value & VideoSessionCreateFlagBitsKHR::eAllowEncodeParameterOptimizations ) + result += "AllowEncodeParameterOptimizations | "; + if ( value & VideoSessionCreateFlagBitsKHR::eInlineQueries ) + result += "InlineQueries | "; + if ( value & VideoSessionCreateFlagBitsKHR::eAllowEncodeQuantizationDeltaMap ) + result += "AllowEncodeQuantizationDeltaMap | "; + if ( value & VideoSessionCreateFlagBitsKHR::eAllowEncodeEmphasisMap ) + result += "AllowEncodeEmphasisMap | "; + if ( value & VideoSessionCreateFlagBitsKHR::eInlineSessionParameters ) + result += "InlineSessionParameters | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - VULKAN_HPP_INLINE std::string to_string( VideoSessionParametersCreateFlagsKHR ) + VULKAN_HPP_INLINE std::string to_string( VideoSessionParametersCreateFlagsKHR value ) { - return "{}"; + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoSessionParametersCreateFlagBitsKHR::eQuantizationMapCompatible ) + result += "QuantizationMapCompatible | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagsKHR ) @@ -2058,18 +2373,14 @@ namespace VULKAN_HPP_NAMESPACE std::string result; if ( value & VideoCodingControlFlagBitsKHR::eReset ) result += "Reset | "; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) if ( value & VideoCodingControlFlagBitsKHR::eEncodeRateControl ) result += "EncodeRateControl | "; - if ( value & VideoCodingControlFlagBitsKHR::eEncodeRateControlLayer ) - result += "EncodeRateControlLayer | "; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & VideoCodingControlFlagBitsKHR::eEncodeQualityLevel ) + result += "EncodeQualityLevel | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_decode_queue === VULKAN_HPP_INLINE std::string to_string( VideoDecodeCapabilityFlagsKHR value ) @@ -2089,7 +2400,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( VideoDecodeUsageFlagsKHR value ) { if ( !value ) - return "{}"; + return "Default"; std::string result; if ( value & VideoDecodeUsageFlagBitsKHR::eTranscoding ) @@ -2106,7 +2417,6 @@ namespace VULKAN_HPP_NAMESPACE { return "{}"; } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_EXT_transform_feedback === @@ -2115,251 +2425,263 @@ namespace VULKAN_HPP_NAMESPACE return "{}"; } -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_encode_h264 === + //=== VK_KHR_video_encode_h264 === - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilityFlagsEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilityFlagsKHR value ) { if ( !value ) return "{}"; std::string result; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceEnabled ) - result += "Direct8X8InferenceEnabled | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceDisabled ) - result += "Direct8X8InferenceDisabled | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eSeparateColourPlane ) - result += "SeparateColourPlane | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eQpprimeYZeroTransformBypass ) - result += "QpprimeYZeroTransformBypass | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eScalingLists ) - result += "ScalingLists | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eHrdCompliance ) + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eHrdCompliance ) result += "HrdCompliance | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eChromaQpOffset ) - result += "ChromaQpOffset | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eSecondChromaQpOffset ) - result += "SecondChromaQpOffset | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::ePicInitQpMinus26 ) - result += "PicInitQpMinus26 | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPred ) - result += "WeightedPred | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredExplicit ) - result += "WeightedBipredExplicit | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredImplicit ) - result += "WeightedBipredImplicit | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPredNoTable ) - result += "WeightedPredNoTable | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eTransform8X8 ) - result += "Transform8X8 | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eCabac ) - result += "Cabac | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eCavlc ) - result += "Cavlc | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterDisabled ) - result += "DeblockingFilterDisabled | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterEnabled ) - result += "DeblockingFilterEnabled | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterPartial ) - result += "DeblockingFilterPartial | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDisableDirectSpatialMvPred ) - result += "DisableDirectSpatialMvPred | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eMultipleSlicePerFrame ) - result += "MultipleSlicePerFrame | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eSliceMbCount ) - result += "SliceMbCount | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eRowUnalignedSlice ) + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::ePredictionWeightTableGenerated ) + result += "PredictionWeightTableGenerated | "; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eRowUnalignedSlice ) result += "RowUnalignedSlice | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eDifferentSliceType ) + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eDifferentSliceType ) result += "DifferentSliceType | "; - if ( value & VideoEncodeH264CapabilityFlagBitsEXT::eBFrameInL1List ) + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL0List ) + result += "BFrameInL0List | "; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL1List ) result += "BFrameInL1List | "; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp ) + result += "PerPictureTypeMinMaxQp | "; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::ePerSliceConstantQp ) + result += "PerSliceConstantQp | "; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eGeneratePrefixNalu ) + result += "GeneratePrefixNalu | "; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eMbQpDiffWraparound ) + result += "MbQpDiffWraparound | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264InputModeFlagsEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264StdFlagsKHR value ) { if ( !value ) return "{}"; std::string result; - if ( value & VideoEncodeH264InputModeFlagBitsEXT::eFrame ) - result += "Frame | "; - if ( value & VideoEncodeH264InputModeFlagBitsEXT::eSlice ) - result += "Slice | "; - if ( value & VideoEncodeH264InputModeFlagBitsEXT::eNonVcl ) - result += "NonVcl | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eSeparateColorPlaneFlagSet ) + result += "SeparateColorPlaneFlagSet | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eQpprimeYZeroTransformBypassFlagSet ) + result += "QpprimeYZeroTransformBypassFlagSet | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eScalingMatrixPresentFlagSet ) + result += "ScalingMatrixPresentFlagSet | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eChromaQpIndexOffset ) + result += "ChromaQpIndexOffset | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eSecondChromaQpIndexOffset ) + result += "SecondChromaQpIndexOffset | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::ePicInitQpMinus26 ) + result += "PicInitQpMinus26 | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eWeightedPredFlagSet ) + result += "WeightedPredFlagSet | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcExplicit ) + result += "WeightedBipredIdcExplicit | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcImplicit ) + result += "WeightedBipredIdcImplicit | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eTransform8X8ModeFlagSet ) + result += "Transform8X8ModeFlagSet | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDirectSpatialMvPredFlagUnset ) + result += "DirectSpatialMvPredFlagUnset | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagUnset ) + result += "EntropyCodingModeFlagUnset | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagSet ) + result += "EntropyCodingModeFlagSet | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDirect8X8InferenceFlagUnset ) + result += "Direct8X8InferenceFlagUnset | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eConstrainedIntraPredFlagSet ) + result += "ConstrainedIntraPredFlagSet | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterDisabled ) + result += "DeblockingFilterDisabled | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterEnabled ) + result += "DeblockingFilterEnabled | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterPartial ) + result += "DeblockingFilterPartial | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eSliceQpDelta ) + result += "SliceQpDelta | "; + if ( value & VideoEncodeH264StdFlagBitsKHR::eDifferentSliceQpDelta ) + result += "DifferentSliceQpDelta | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264OutputModeFlagsEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264RateControlFlagsKHR value ) { if ( !value ) return "{}"; std::string result; - if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eFrame ) - result += "Frame | "; - if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eSlice ) - result += "Slice | "; - if ( value & VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl ) - result += "NonVcl | "; + if ( value & VideoEncodeH264RateControlFlagBitsKHR::eAttemptHrdCompliance ) + result += "AttemptHrdCompliance | "; + if ( value & VideoEncodeH264RateControlFlagBitsKHR::eRegularGop ) + result += "RegularGop | "; + if ( value & VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternFlat ) + result += "ReferencePatternFlat | "; + if ( value & VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternDyadic ) + result += "ReferencePatternDyadic | "; + if ( value & VideoEncodeH264RateControlFlagBitsKHR::eTemporalLayerPatternDyadic ) + result += "TemporalLayerPatternDyadic | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_encode_h265 === + //=== VK_KHR_video_encode_h265 === - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CapabilityFlagsEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CapabilityFlagsKHR value ) { if ( !value ) return "{}"; std::string result; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eSeparateColourPlane ) - result += "SeparateColourPlane | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eScalingLists ) - result += "ScalingLists | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eSampleAdaptiveOffsetEnabled ) - result += "SampleAdaptiveOffsetEnabled | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::ePcmEnable ) - result += "PcmEnable | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eSpsTemporalMvpEnabled ) - result += "SpsTemporalMvpEnabled | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eHrdCompliance ) + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eHrdCompliance ) result += "HrdCompliance | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eInitQpMinus26 ) - result += "InitQpMinus26 | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eLog2ParallelMergeLevelMinus2 ) - result += "Log2ParallelMergeLevelMinus2 | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eSignDataHidingEnabled ) - result += "SignDataHidingEnabled | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipEnabled ) - result += "TransformSkipEnabled | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipDisabled ) - result += "TransformSkipDisabled | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::ePpsSliceChromaQpOffsetsPresent ) - result += "PpsSliceChromaQpOffsetsPresent | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPred ) - result += "WeightedPred | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eWeightedBipred ) - result += "WeightedBipred | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPredNoTable ) - result += "WeightedPredNoTable | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eTransquantBypassEnabled ) - result += "TransquantBypassEnabled | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eEntropyCodingSyncEnabled ) - result += "EntropyCodingSyncEnabled | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eDeblockingFilterOverrideEnabled ) - result += "DeblockingFilterOverrideEnabled | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerFrame ) - result += "MultipleTilePerFrame | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eMultipleSlicePerTile ) - result += "MultipleSlicePerTile | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerSlice ) - result += "MultipleTilePerSlice | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eSliceSegmentCtbCount ) - result += "SliceSegmentCtbCount | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eRowUnalignedSliceSegment ) + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::ePredictionWeightTableGenerated ) + result += "PredictionWeightTableGenerated | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eRowUnalignedSliceSegment ) result += "RowUnalignedSliceSegment | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eDependentSliceSegment ) - result += "DependentSliceSegment | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eDifferentSliceType ) - result += "DifferentSliceType | "; - if ( value & VideoEncodeH265CapabilityFlagBitsEXT::eBFrameInL1List ) + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eDifferentSliceSegmentType ) + result += "DifferentSliceSegmentType | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL0List ) + result += "BFrameInL0List | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL1List ) result += "BFrameInL1List | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp ) + result += "PerPictureTypeMinMaxQp | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::ePerSliceSegmentConstantQp ) + result += "PerSliceSegmentConstantQp | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eMultipleTilesPerSliceSegment ) + result += "MultipleTilesPerSliceSegment | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eMultipleSliceSegmentsPerTile ) + result += "MultipleSliceSegmentsPerTile | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eCuQpDiffWraparound ) + result += "CuQpDiffWraparound | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265InputModeFlagsEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265StdFlagsKHR value ) { if ( !value ) return "{}"; std::string result; - if ( value & VideoEncodeH265InputModeFlagBitsEXT::eFrame ) - result += "Frame | "; - if ( value & VideoEncodeH265InputModeFlagBitsEXT::eSliceSegment ) - result += "SliceSegment | "; - if ( value & VideoEncodeH265InputModeFlagBitsEXT::eNonVcl ) - result += "NonVcl | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eSeparateColorPlaneFlagSet ) + result += "SeparateColorPlaneFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eSampleAdaptiveOffsetEnabledFlagSet ) + result += "SampleAdaptiveOffsetEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eScalingListDataPresentFlagSet ) + result += "ScalingListDataPresentFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::ePcmEnabledFlagSet ) + result += "PcmEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eSpsTemporalMvpEnabledFlagSet ) + result += "SpsTemporalMvpEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eInitQpMinus26 ) + result += "InitQpMinus26 | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eWeightedPredFlagSet ) + result += "WeightedPredFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eWeightedBipredFlagSet ) + result += "WeightedBipredFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eLog2ParallelMergeLevelMinus2 ) + result += "Log2ParallelMergeLevelMinus2 | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eSignDataHidingEnabledFlagSet ) + result += "SignDataHidingEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagSet ) + result += "TransformSkipEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagUnset ) + result += "TransformSkipEnabledFlagUnset | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::ePpsSliceChromaQpOffsetsPresentFlagSet ) + result += "PpsSliceChromaQpOffsetsPresentFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eTransquantBypassEnabledFlagSet ) + result += "TransquantBypassEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eConstrainedIntraPredFlagSet ) + result += "ConstrainedIntraPredFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eEntropyCodingSyncEnabledFlagSet ) + result += "EntropyCodingSyncEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eDeblockingFilterOverrideEnabledFlagSet ) + result += "DeblockingFilterOverrideEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentsEnabledFlagSet ) + result += "DependentSliceSegmentsEnabledFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentFlagSet ) + result += "DependentSliceSegmentFlagSet | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eSliceQpDelta ) + result += "SliceQpDelta | "; + if ( value & VideoEncodeH265StdFlagBitsKHR::eDifferentSliceQpDelta ) + result += "DifferentSliceQpDelta | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265OutputModeFlagsEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CtbSizeFlagsKHR value ) { if ( !value ) return "{}"; std::string result; - if ( value & VideoEncodeH265OutputModeFlagBitsEXT::eFrame ) - result += "Frame | "; - if ( value & VideoEncodeH265OutputModeFlagBitsEXT::eSliceSegment ) - result += "SliceSegment | "; - if ( value & VideoEncodeH265OutputModeFlagBitsEXT::eNonVcl ) - result += "NonVcl | "; + if ( value & VideoEncodeH265CtbSizeFlagBitsKHR::e16 ) + result += "16 | "; + if ( value & VideoEncodeH265CtbSizeFlagBitsKHR::e32 ) + result += "32 | "; + if ( value & VideoEncodeH265CtbSizeFlagBitsKHR::e64 ) + result += "64 | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CtbSizeFlagsEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265TransformBlockSizeFlagsKHR value ) { if ( !value ) return "{}"; std::string result; - if ( value & VideoEncodeH265CtbSizeFlagBitsEXT::e16 ) + if ( value & VideoEncodeH265TransformBlockSizeFlagBitsKHR::e4 ) + result += "4 | "; + if ( value & VideoEncodeH265TransformBlockSizeFlagBitsKHR::e8 ) + result += "8 | "; + if ( value & VideoEncodeH265TransformBlockSizeFlagBitsKHR::e16 ) result += "16 | "; - if ( value & VideoEncodeH265CtbSizeFlagBitsEXT::e32 ) + if ( value & VideoEncodeH265TransformBlockSizeFlagBitsKHR::e32 ) result += "32 | "; - if ( value & VideoEncodeH265CtbSizeFlagBitsEXT::e64 ) - result += "64 | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265TransformBlockSizeFlagsEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265RateControlFlagsKHR value ) { if ( !value ) return "{}"; std::string result; - if ( value & VideoEncodeH265TransformBlockSizeFlagBitsEXT::e4 ) - result += "4 | "; - if ( value & VideoEncodeH265TransformBlockSizeFlagBitsEXT::e8 ) - result += "8 | "; - if ( value & VideoEncodeH265TransformBlockSizeFlagBitsEXT::e16 ) - result += "16 | "; - if ( value & VideoEncodeH265TransformBlockSizeFlagBitsEXT::e32 ) - result += "32 | "; + if ( value & VideoEncodeH265RateControlFlagBitsKHR::eAttemptHrdCompliance ) + result += "AttemptHrdCompliance | "; + if ( value & VideoEncodeH265RateControlFlagBitsKHR::eRegularGop ) + result += "RegularGop | "; + if ( value & VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternFlat ) + result += "ReferencePatternFlat | "; + if ( value & VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternDyadic ) + result += "ReferencePatternDyadic | "; + if ( value & VideoEncodeH265RateControlFlagBitsKHR::eTemporalSubLayerPatternDyadic ) + result += "TemporalSubLayerPatternDyadic | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_decode_h264 === + //=== VK_KHR_video_decode_h264 === - VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264PictureLayoutFlagsEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264PictureLayoutFlagsKHR value ) { if ( !value ) - return "{}"; + return "Progressive"; std::string result; - if ( value & VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedInterleavedLines ) + if ( value & VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedInterleavedLines ) result += "InterlacedInterleavedLines | "; - if ( value & VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedSeparatePlanes ) + if ( value & VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedSeparatePlanes ) result += "InterlacedSeparatePlanes | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === @@ -2542,6 +2864,8 @@ namespace VULKAN_HPP_NAMESPACE result += "Validation | "; if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) result += "Performance | "; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding ) + result += "DeviceAddressBinding | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -2593,6 +2917,10 @@ namespace VULKAN_HPP_NAMESPACE result += "ForceOpaque | "; if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque ) result += "ForceNoOpaque | "; + if ( value & GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT ) + result += "ForceOpacityMicromap2StateEXT | "; + if ( value & GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT ) + result += "DisableOpacityMicromapsEXT | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -2615,6 +2943,18 @@ namespace VULKAN_HPP_NAMESPACE result += "LowMemory | "; if ( value & BuildAccelerationStructureFlagBitsKHR::eMotionNV ) result += "MotionNV | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT ) + result += "AllowOpacityMicromapUpdateEXT | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT ) + result += "AllowDisableOpacityMicromapsEXT | "; + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT ) + result += "AllowOpacityMicromapDataUpdateEXT | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowDisplacementMicromapUpdateNV ) + result += "AllowDisplacementMicromapUpdateNV | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowDataAccess ) + result += "AllowDataAccess | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -2627,6 +2967,8 @@ namespace VULKAN_HPP_NAMESPACE std::string result; if ( value & AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay ) result += "DeviceAddressCaptureReplay | "; + if ( value & AccelerationStructureCreateFlagBitsKHR::eDescriptorBufferCaptureReplayEXT ) + result += "DescriptorBufferCaptureReplayEXT | "; if ( value & AccelerationStructureCreateFlagBitsKHR::eMotionNV ) result += "MotionNV | "; @@ -2693,6 +3035,40 @@ namespace VULKAN_HPP_NAMESPACE return "{}"; } + //=== VK_EXT_surface_maintenance1 === + + VULKAN_HPP_INLINE std::string to_string( PresentScalingFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PresentScalingFlagBitsEXT::eOneToOne ) + result += "OneToOne | "; + if ( value & PresentScalingFlagBitsEXT::eAspectRatioStretch ) + result += "AspectRatioStretch | "; + if ( value & PresentScalingFlagBitsEXT::eStretch ) + result += "Stretch | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( PresentGravityFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PresentGravityFlagBitsEXT::eMin ) + result += "Min | "; + if ( value & PresentGravityFlagBitsEXT::eMax ) + result += "Max | "; + if ( value & PresentGravityFlagBitsEXT::eCentered ) + result += "Centered | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + //=== VK_NV_device_generated_commands === VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value ) @@ -2730,12 +3106,20 @@ namespace VULKAN_HPP_NAMESPACE return "{}"; } -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_encode_queue === - VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagsKHR ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagsKHR value ) { - return "{}"; + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeFlagBitsKHR::eWithQuantizationDeltaMap ) + result += "WithQuantizationDeltaMap | "; + if ( value & VideoEncodeFlagBitsKHR::eWithEmphasisMap ) + result += "WithEmphasisMap | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } VULKAN_HPP_INLINE std::string to_string( VideoEncodeCapabilityFlagsKHR value ) @@ -2746,15 +3130,37 @@ namespace VULKAN_HPP_NAMESPACE std::string result; if ( value & VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes ) result += "PrecedingExternallyEncodedBytes | "; + if ( value & VideoEncodeCapabilityFlagBitsKHR::eInsufficientBitstreamBufferRangeDetection ) + result += "InsufficientBitstreamBufferRangeDetection | "; + if ( value & VideoEncodeCapabilityFlagBitsKHR::eQuantizationDeltaMap ) + result += "QuantizationDeltaMap | "; + if ( value & VideoEncodeCapabilityFlagBitsKHR::eEmphasisMap ) + result += "EmphasisMap | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - VULKAN_HPP_INLINE std::string to_string( VideoEncodeUsageFlagsKHR value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFeedbackFlagsKHR value ) { if ( !value ) return "{}"; + std::string result; + if ( value & VideoEncodeFeedbackFlagBitsKHR::eBitstreamBufferOffset ) + result += "BitstreamBufferOffset | "; + if ( value & VideoEncodeFeedbackFlagBitsKHR::eBitstreamBytesWritten ) + result += "BitstreamBytesWritten | "; + if ( value & VideoEncodeFeedbackFlagBitsKHR::eBitstreamHasOverrides ) + result += "BitstreamHasOverrides | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeUsageFlagsKHR value ) + { + if ( !value ) + return "Default"; + std::string result; if ( value & VideoEncodeUsageFlagBitsKHR::eTranscoding ) result += "Transcoding | "; @@ -2771,7 +3177,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( VideoEncodeContentFlagsKHR value ) { if ( !value ) - return "{}"; + return "Default"; std::string result; if ( value & VideoEncodeContentFlagBitsKHR::eCamera ) @@ -2792,13 +3198,18 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlModeFlagsKHR value ) { if ( !value ) - return "{}"; + return "Default"; std::string result; + if ( value & VideoEncodeRateControlModeFlagBitsKHR::eDisabled ) + result += "Disabled | "; + if ( value & VideoEncodeRateControlModeFlagBitsKHR::eCbr ) + result += "Cbr | "; + if ( value & VideoEncodeRateControlModeFlagBitsKHR::eVbr ) + result += "Vbr | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_NV_device_diagnostics_config === @@ -2883,7 +3294,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ImageCompressionFlagsEXT value ) { if ( !value ) - return "{}"; + return "Default"; std::string result; if ( value & ImageCompressionFlagBitsEXT::eFixedRateDefault ) @@ -2899,7 +3310,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::string to_string( ImageCompressionFixedRateFlagsEXT value ) { if ( !value ) - return "{}"; + return "None"; std::string result; if ( value & ImageCompressionFixedRateFlagBitsEXT::e1Bpc ) @@ -2963,6 +3374,20 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + //=== VK_EXT_device_address_binding_report === + + VULKAN_HPP_INLINE std::string to_string( DeviceAddressBindingFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & DeviceAddressBindingFlagBitsEXT::eInternalObject ) + result += "InternalObject | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_buffer_collection === @@ -2992,6 +3417,20 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VK_USE_PLATFORM_FUCHSIA*/ + //=== VK_EXT_frame_boundary === + + VULKAN_HPP_INLINE std::string to_string( FrameBoundaryFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & FrameBoundaryFlagBitsEXT::eFrameEnd ) + result += "FrameEnd | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + #if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === @@ -3001,6 +3440,375 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + //=== VK_EXT_opacity_micromap === + + VULKAN_HPP_INLINE std::string to_string( BuildMicromapFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & BuildMicromapFlagBitsEXT::ePreferFastTrace ) + result += "PreferFastTrace | "; + if ( value & BuildMicromapFlagBitsEXT::ePreferFastBuild ) + result += "PreferFastBuild | "; + if ( value & BuildMicromapFlagBitsEXT::eAllowCompaction ) + result += "AllowCompaction | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( MicromapCreateFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay ) + result += "DeviceAddressCaptureReplay | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_ARM_scheduling_controls === + + VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceSchedulingControlsFlagsARM value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PhysicalDeviceSchedulingControlsFlagBitsARM::eShaderCoreCount ) + result += "ShaderCoreCount | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_memory_decompression === + + VULKAN_HPP_INLINE std::string to_string( MemoryDecompressionMethodFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & MemoryDecompressionMethodFlagBitsNV::eGdeflate10 ) + result += "Gdeflate10 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_LUNARG_direct_driver_loading === + + VULKAN_HPP_INLINE std::string to_string( DirectDriverLoadingFlagsLUNARG ) + { + return "{}"; + } + + //=== VK_NV_optical_flow === + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowUsageFlagsNV value ) + { + if ( !value ) + return "Unknown"; + + std::string result; + if ( value & OpticalFlowUsageFlagBitsNV::eInput ) + result += "Input | "; + if ( value & OpticalFlowUsageFlagBitsNV::eOutput ) + result += "Output | "; + if ( value & OpticalFlowUsageFlagBitsNV::eHint ) + result += "Hint | "; + if ( value & OpticalFlowUsageFlagBitsNV::eCost ) + result += "Cost | "; + if ( value & OpticalFlowUsageFlagBitsNV::eGlobalFlow ) + result += "GlobalFlow | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowGridSizeFlagsNV value ) + { + if ( !value ) + return "Unknown"; + + std::string result; + if ( value & OpticalFlowGridSizeFlagBitsNV::e1X1 ) + result += "1X1 | "; + if ( value & OpticalFlowGridSizeFlagBitsNV::e2X2 ) + result += "2X2 | "; + if ( value & OpticalFlowGridSizeFlagBitsNV::e4X4 ) + result += "4X4 | "; + if ( value & OpticalFlowGridSizeFlagBitsNV::e8X8 ) + result += "8X8 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowSessionCreateFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableHint ) + result += "EnableHint | "; + if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableCost ) + result += "EnableCost | "; + if ( value & OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow ) + result += "EnableGlobalFlow | "; + if ( value & OpticalFlowSessionCreateFlagBitsNV::eAllowRegions ) + result += "AllowRegions | "; + if ( value & OpticalFlowSessionCreateFlagBitsNV::eBothDirections ) + result += "BothDirections | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowExecuteFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints ) + result += "DisableTemporalHints | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_EXT_shader_object === + + VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ShaderCreateFlagBitsEXT::eLinkStage ) + result += "LinkStage | "; + if ( value & ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize ) + result += "AllowVaryingSubgroupSize | "; + if ( value & ShaderCreateFlagBitsEXT::eRequireFullSubgroups ) + result += "RequireFullSubgroups | "; + if ( value & ShaderCreateFlagBitsEXT::eNoTaskShader ) + result += "NoTaskShader | "; + if ( value & ShaderCreateFlagBitsEXT::eDispatchBase ) + result += "DispatchBase | "; + if ( value & ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment ) + result += "FragmentShadingRateAttachment | "; + if ( value & ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment ) + result += "FragmentDensityMapAttachment | "; + if ( value & ShaderCreateFlagBitsEXT::eIndirectBindable ) + result += "IndirectBindable | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_video_encode_av1 === + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1CapabilityFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeAV1CapabilityFlagBitsKHR::ePerRateControlGroupMinMaxQIndex ) + result += "PerRateControlGroupMinMaxQIndex | "; + if ( value & VideoEncodeAV1CapabilityFlagBitsKHR::eGenerateObuExtensionHeader ) + result += "GenerateObuExtensionHeader | "; + if ( value & VideoEncodeAV1CapabilityFlagBitsKHR::ePrimaryReferenceCdfOnly ) + result += "PrimaryReferenceCdfOnly | "; + if ( value & VideoEncodeAV1CapabilityFlagBitsKHR::eFrameSizeOverride ) + result += "FrameSizeOverride | "; + if ( value & VideoEncodeAV1CapabilityFlagBitsKHR::eMotionVectorScaling ) + result += "MotionVectorScaling | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1StdFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeAV1StdFlagBitsKHR::eUniformTileSpacingFlagSet ) + result += "UniformTileSpacingFlagSet | "; + if ( value & VideoEncodeAV1StdFlagBitsKHR::eSkipModePresentUnset ) + result += "SkipModePresentUnset | "; + if ( value & VideoEncodeAV1StdFlagBitsKHR::ePrimaryRefFrame ) + result += "PrimaryRefFrame | "; + if ( value & VideoEncodeAV1StdFlagBitsKHR::eDeltaQ ) + result += "DeltaQ | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1SuperblockSizeFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeAV1SuperblockSizeFlagBitsKHR::e64 ) + result += "64 | "; + if ( value & VideoEncodeAV1SuperblockSizeFlagBitsKHR::e128 ) + result += "128 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1RateControlFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeAV1RateControlFlagBitsKHR::eRegularGop ) + result += "RegularGop | "; + if ( value & VideoEncodeAV1RateControlFlagBitsKHR::eTemporalLayerPatternDyadic ) + result += "TemporalLayerPatternDyadic | "; + if ( value & VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternFlat ) + result += "ReferencePatternFlat | "; + if ( value & VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternDyadic ) + result += "ReferencePatternDyadic | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_cluster_acceleration_structure === + + VULKAN_HPP_INLINE std::string to_string( ClusterAccelerationStructureClusterFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ClusterAccelerationStructureClusterFlagBitsNV::eAllowDisableOpacityMicromaps ) + result += "AllowDisableOpacityMicromaps | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( ClusterAccelerationStructureGeometryFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ClusterAccelerationStructureGeometryFlagBitsNV::eCullDisable ) + result += "CullDisable | "; + if ( value & ClusterAccelerationStructureGeometryFlagBitsNV::eNoDuplicateAnyhitInvocation ) + result += "NoDuplicateAnyhitInvocation | "; + if ( value & ClusterAccelerationStructureGeometryFlagBitsNV::eOpaque ) + result += "Opaque | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( ClusterAccelerationStructureAddressResolutionFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstImplicitData ) + result += "IndirectedDstImplicitData | "; + if ( value & ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedScratchData ) + result += "IndirectedScratchData | "; + if ( value & ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstAddressArray ) + result += "IndirectedDstAddressArray | "; + if ( value & ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstSizesArray ) + result += "IndirectedDstSizesArray | "; + if ( value & ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedSrcInfosArray ) + result += "IndirectedSrcInfosArray | "; + if ( value & ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedSrcInfosCount ) + result += "IndirectedSrcInfosCount | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( ClusterAccelerationStructureIndexFormatFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & ClusterAccelerationStructureIndexFormatFlagBitsNV::e8 ) + result += "8 | "; + if ( value & ClusterAccelerationStructureIndexFormatFlagBitsNV::e16 ) + result += "16 | "; + if ( value & ClusterAccelerationStructureIndexFormatFlagBitsNV::e32 ) + result += "32 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_NV_partitioned_acceleration_structure === + + VULKAN_HPP_INLINE std::string to_string( PartitionedAccelerationStructureInstanceFlagsNV value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagTriangleFacingCullDisable ) + result += "FlagTriangleFacingCullDisable | "; + if ( value & PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagTriangleFlipFacing ) + result += "FlagTriangleFlipFacing | "; + if ( value & PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagForceOpaque ) + result += "FlagForceOpaque | "; + if ( value & PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagForceNoOpaque ) + result += "FlagForceNoOpaque | "; + if ( value & PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagEnableExplicitBoundingBox ) + result += "FlagEnableExplicitBoundingBox | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_EXT_device_generated_commands === + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & IndirectCommandsLayoutUsageFlagBitsEXT::eExplicitPreprocess ) + result += "ExplicitPreprocess | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsEXT::eUnorderedSequences ) + result += "UnorderedSequences | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsInputModeFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer ) + result += "VulkanIndexBuffer | "; + if ( value & IndirectCommandsInputModeFlagBitsEXT::eDxgiIndexBuffer ) + result += "DxgiIndexBuffer | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_KHR_maintenance8 === + + VULKAN_HPP_INLINE std::string to_string( AccessFlags3KHR value ) + { + if ( !value ) + return "None"; + + std::string result; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + //======================= //=== ENUMs to_string === //======================= @@ -3046,6 +3854,7 @@ namespace VULKAN_HPP_NAMESPACE case Result::eErrorFragmentation: return "ErrorFragmentation"; case Result::eErrorInvalidOpaqueCaptureAddress: return "ErrorInvalidOpaqueCaptureAddress"; case Result::ePipelineCompileRequired: return "PipelineCompileRequired"; + case Result::eErrorNotPermitted: return "ErrorNotPermitted"; case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR"; case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR"; case Result::eSuboptimalKHR: return "SuboptimalKHR"; @@ -3053,16 +3862,13 @@ namespace VULKAN_HPP_NAMESPACE case Result::eErrorIncompatibleDisplayKHR: return "ErrorIncompatibleDisplayKHR"; case Result::eErrorValidationFailedEXT: return "ErrorValidationFailedEXT"; case Result::eErrorInvalidShaderNV: return "ErrorInvalidShaderNV"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) case Result::eErrorImageUsageNotSupportedKHR: return "ErrorImageUsageNotSupportedKHR"; case Result::eErrorVideoPictureLayoutNotSupportedKHR: return "ErrorVideoPictureLayoutNotSupportedKHR"; case Result::eErrorVideoProfileOperationNotSupportedKHR: return "ErrorVideoProfileOperationNotSupportedKHR"; case Result::eErrorVideoProfileFormatNotSupportedKHR: return "ErrorVideoProfileFormatNotSupportedKHR"; case Result::eErrorVideoProfileCodecNotSupportedKHR: return "ErrorVideoProfileCodecNotSupportedKHR"; case Result::eErrorVideoStdVersionNotSupportedKHR: return "ErrorVideoStdVersionNotSupportedKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT"; - case Result::eErrorNotPermittedKHR: return "ErrorNotPermittedKHR"; #if defined( VK_USE_PLATFORM_WIN32_KHR ) case Result::eErrorFullScreenExclusiveModeLostEXT: return "ErrorFullScreenExclusiveModeLostEXT"; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -3070,7 +3876,11 @@ namespace VULKAN_HPP_NAMESPACE case Result::eThreadDoneKHR: return "ThreadDoneKHR"; case Result::eOperationDeferredKHR: return "OperationDeferredKHR"; case Result::eOperationNotDeferredKHR: return "OperationNotDeferredKHR"; + case Result::eErrorInvalidVideoStdParametersKHR: return "ErrorInvalidVideoStdParametersKHR"; case Result::eErrorCompressionExhaustedEXT: return "ErrorCompressionExhaustedEXT"; + case Result::eIncompatibleShaderBinaryEXT: return "IncompatibleShaderBinaryEXT"; + case Result::ePipelineBinaryMissingKHR: return "PipelineBinaryMissingKHR"; + case Result::eErrorNotEnoughSpaceKHR: return "ErrorNotEnoughSpaceKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -3295,6 +4105,56 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceMaintenance4Properties: return "PhysicalDeviceMaintenance4Properties"; case StructureType::eDeviceBufferMemoryRequirements: return "DeviceBufferMemoryRequirements"; case StructureType::eDeviceImageMemoryRequirements: return "DeviceImageMemoryRequirements"; + case StructureType::ePhysicalDeviceVulkan14Features: return "PhysicalDeviceVulkan14Features"; + case StructureType::ePhysicalDeviceVulkan14Properties: return "PhysicalDeviceVulkan14Properties"; + case StructureType::eDeviceQueueGlobalPriorityCreateInfo: return "DeviceQueueGlobalPriorityCreateInfo"; + case StructureType::ePhysicalDeviceGlobalPriorityQueryFeatures: return "PhysicalDeviceGlobalPriorityQueryFeatures"; + case StructureType::eQueueFamilyGlobalPriorityProperties: return "QueueFamilyGlobalPriorityProperties"; + case StructureType::ePhysicalDeviceShaderSubgroupRotateFeatures: return "PhysicalDeviceShaderSubgroupRotateFeatures"; + case StructureType::ePhysicalDeviceShaderFloatControls2Features: return "PhysicalDeviceShaderFloatControls2Features"; + case StructureType::ePhysicalDeviceShaderExpectAssumeFeatures: return "PhysicalDeviceShaderExpectAssumeFeatures"; + case StructureType::ePhysicalDeviceLineRasterizationFeatures: return "PhysicalDeviceLineRasterizationFeatures"; + case StructureType::ePipelineRasterizationLineStateCreateInfo: return "PipelineRasterizationLineStateCreateInfo"; + case StructureType::ePhysicalDeviceLineRasterizationProperties: return "PhysicalDeviceLineRasterizationProperties"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorProperties: return "PhysicalDeviceVertexAttributeDivisorProperties"; + case StructureType::ePipelineVertexInputDivisorStateCreateInfo: return "PipelineVertexInputDivisorStateCreateInfo"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorFeatures: return "PhysicalDeviceVertexAttributeDivisorFeatures"; + case StructureType::ePhysicalDeviceIndexTypeUint8Features: return "PhysicalDeviceIndexTypeUint8Features"; + case StructureType::eMemoryMapInfo: return "MemoryMapInfo"; + case StructureType::eMemoryUnmapInfo: return "MemoryUnmapInfo"; + case StructureType::ePhysicalDeviceMaintenance5Features: return "PhysicalDeviceMaintenance5Features"; + case StructureType::ePhysicalDeviceMaintenance5Properties: return "PhysicalDeviceMaintenance5Properties"; + case StructureType::eRenderingAreaInfo: return "RenderingAreaInfo"; + case StructureType::eDeviceImageSubresourceInfo: return "DeviceImageSubresourceInfo"; + case StructureType::eSubresourceLayout2: return "SubresourceLayout2"; + case StructureType::eImageSubresource2: return "ImageSubresource2"; + case StructureType::ePipelineCreateFlags2CreateInfo: return "PipelineCreateFlags2CreateInfo"; + case StructureType::eBufferUsageFlags2CreateInfo: return "BufferUsageFlags2CreateInfo"; + case StructureType::ePhysicalDevicePushDescriptorProperties: return "PhysicalDevicePushDescriptorProperties"; + case StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeatures: return "PhysicalDeviceDynamicRenderingLocalReadFeatures"; + case StructureType::eRenderingAttachmentLocationInfo: return "RenderingAttachmentLocationInfo"; + case StructureType::eRenderingInputAttachmentIndexInfo: return "RenderingInputAttachmentIndexInfo"; + case StructureType::ePhysicalDeviceMaintenance6Features: return "PhysicalDeviceMaintenance6Features"; + case StructureType::ePhysicalDeviceMaintenance6Properties: return "PhysicalDeviceMaintenance6Properties"; + case StructureType::eBindMemoryStatus: return "BindMemoryStatus"; + case StructureType::eBindDescriptorSetsInfo: return "BindDescriptorSetsInfo"; + case StructureType::ePushConstantsInfo: return "PushConstantsInfo"; + case StructureType::ePushDescriptorSetInfo: return "PushDescriptorSetInfo"; + case StructureType::ePushDescriptorSetWithTemplateInfo: return "PushDescriptorSetWithTemplateInfo"; + case StructureType::ePhysicalDevicePipelineProtectedAccessFeatures: return "PhysicalDevicePipelineProtectedAccessFeatures"; + case StructureType::ePipelineRobustnessCreateInfo: return "PipelineRobustnessCreateInfo"; + case StructureType::ePhysicalDevicePipelineRobustnessFeatures: return "PhysicalDevicePipelineRobustnessFeatures"; + case StructureType::ePhysicalDevicePipelineRobustnessProperties: return "PhysicalDevicePipelineRobustnessProperties"; + case StructureType::ePhysicalDeviceHostImageCopyFeatures: return "PhysicalDeviceHostImageCopyFeatures"; + case StructureType::ePhysicalDeviceHostImageCopyProperties: return "PhysicalDeviceHostImageCopyProperties"; + case StructureType::eMemoryToImageCopy: return "MemoryToImageCopy"; + case StructureType::eImageToMemoryCopy: return "ImageToMemoryCopy"; + case StructureType::eCopyImageToMemoryInfo: return "CopyImageToMemoryInfo"; + case StructureType::eCopyMemoryToImageInfo: return "CopyMemoryToImageInfo"; + case StructureType::eHostImageLayoutTransitionInfo: return "HostImageLayoutTransitionInfo"; + case StructureType::eCopyImageToImageInfo: return "CopyImageToImageInfo"; + case StructureType::eSubresourceHostMemcpySize: return "SubresourceHostMemcpySize"; + case StructureType::eHostImageCopyDevicePerformanceQuery: return "HostImageCopyDevicePerformanceQuery"; case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR"; case StructureType::ePresentInfoKHR: return "PresentInfoKHR"; case StructureType::eDeviceGroupPresentCapabilitiesKHR: return "DeviceGroupPresentCapabilitiesKHR"; @@ -3326,7 +4186,6 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eDebugMarkerObjectNameInfoEXT: return "DebugMarkerObjectNameInfoEXT"; case StructureType::eDebugMarkerObjectTagInfoEXT: return "DebugMarkerObjectTagInfoEXT"; case StructureType::eDebugMarkerMarkerInfoEXT: return "DebugMarkerMarkerInfoEXT"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) case StructureType::eVideoProfileInfoKHR: return "VideoProfileInfoKHR"; case StructureType::eVideoCapabilitiesKHR: return "VideoCapabilitiesKHR"; case StructureType::eVideoPictureResourceInfoKHR: return "VideoPictureResourceInfoKHR"; @@ -3347,7 +4206,6 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eVideoDecodeInfoKHR: return "VideoDecodeInfoKHR"; case StructureType::eVideoDecodeCapabilitiesKHR: return "VideoDecodeCapabilitiesKHR"; case StructureType::eVideoDecodeUsageInfoKHR: return "VideoDecodeUsageInfoKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case StructureType::eDedicatedAllocationImageCreateInfoNV: return "DedicatedAllocationImageCreateInfoNV"; case StructureType::eDedicatedAllocationBufferCreateInfoNV: return "DedicatedAllocationBufferCreateInfoNV"; case StructureType::eDedicatedAllocationMemoryAllocateInfoNV: return "DedicatedAllocationMemoryAllocateInfoNV"; @@ -3357,44 +4215,44 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eCuModuleCreateInfoNVX: return "CuModuleCreateInfoNVX"; case StructureType::eCuFunctionCreateInfoNVX: return "CuFunctionCreateInfoNVX"; case StructureType::eCuLaunchInfoNVX: return "CuLaunchInfoNVX"; + case StructureType::eCuModuleTexturingModeCreateInfoNVX: return "CuModuleTexturingModeCreateInfoNVX"; case StructureType::eImageViewHandleInfoNVX: return "ImageViewHandleInfoNVX"; case StructureType::eImageViewAddressPropertiesNVX: return "ImageViewAddressPropertiesNVX"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - case StructureType::eVideoEncodeH264CapabilitiesEXT: return "VideoEncodeH264CapabilitiesEXT"; - case StructureType::eVideoEncodeH264SessionParametersCreateInfoEXT: return "VideoEncodeH264SessionParametersCreateInfoEXT"; - case StructureType::eVideoEncodeH264SessionParametersAddInfoEXT: return "VideoEncodeH264SessionParametersAddInfoEXT"; - case StructureType::eVideoEncodeH264VclFrameInfoEXT: return "VideoEncodeH264VclFrameInfoEXT"; - case StructureType::eVideoEncodeH264DpbSlotInfoEXT: return "VideoEncodeH264DpbSlotInfoEXT"; - case StructureType::eVideoEncodeH264NaluSliceInfoEXT: return "VideoEncodeH264NaluSliceInfoEXT"; - case StructureType::eVideoEncodeH264EmitPictureParametersInfoEXT: return "VideoEncodeH264EmitPictureParametersInfoEXT"; - case StructureType::eVideoEncodeH264ProfileInfoEXT: return "VideoEncodeH264ProfileInfoEXT"; - case StructureType::eVideoEncodeH264RateControlInfoEXT: return "VideoEncodeH264RateControlInfoEXT"; - case StructureType::eVideoEncodeH264RateControlLayerInfoEXT: return "VideoEncodeH264RateControlLayerInfoEXT"; - case StructureType::eVideoEncodeH264ReferenceListsInfoEXT: return "VideoEncodeH264ReferenceListsInfoEXT"; - case StructureType::eVideoEncodeH265CapabilitiesEXT: return "VideoEncodeH265CapabilitiesEXT"; - case StructureType::eVideoEncodeH265SessionParametersCreateInfoEXT: return "VideoEncodeH265SessionParametersCreateInfoEXT"; - case StructureType::eVideoEncodeH265SessionParametersAddInfoEXT: return "VideoEncodeH265SessionParametersAddInfoEXT"; - case StructureType::eVideoEncodeH265VclFrameInfoEXT: return "VideoEncodeH265VclFrameInfoEXT"; - case StructureType::eVideoEncodeH265DpbSlotInfoEXT: return "VideoEncodeH265DpbSlotInfoEXT"; - case StructureType::eVideoEncodeH265NaluSliceSegmentInfoEXT: return "VideoEncodeH265NaluSliceSegmentInfoEXT"; - case StructureType::eVideoEncodeH265EmitPictureParametersInfoEXT: return "VideoEncodeH265EmitPictureParametersInfoEXT"; - case StructureType::eVideoEncodeH265ProfileInfoEXT: return "VideoEncodeH265ProfileInfoEXT"; - case StructureType::eVideoEncodeH265ReferenceListsInfoEXT: return "VideoEncodeH265ReferenceListsInfoEXT"; - case StructureType::eVideoEncodeH265RateControlInfoEXT: return "VideoEncodeH265RateControlInfoEXT"; - case StructureType::eVideoEncodeH265RateControlLayerInfoEXT: return "VideoEncodeH265RateControlLayerInfoEXT"; - case StructureType::eVideoDecodeH264CapabilitiesEXT: return "VideoDecodeH264CapabilitiesEXT"; - case StructureType::eVideoDecodeH264PictureInfoEXT: return "VideoDecodeH264PictureInfoEXT"; - case StructureType::eVideoDecodeH264MvcInfoEXT: return "VideoDecodeH264MvcInfoEXT"; - case StructureType::eVideoDecodeH264ProfileInfoEXT: return "VideoDecodeH264ProfileInfoEXT"; - case StructureType::eVideoDecodeH264SessionParametersCreateInfoEXT: return "VideoDecodeH264SessionParametersCreateInfoEXT"; - case StructureType::eVideoDecodeH264SessionParametersAddInfoEXT: return "VideoDecodeH264SessionParametersAddInfoEXT"; - case StructureType::eVideoDecodeH264DpbSlotInfoEXT: return "VideoDecodeH264DpbSlotInfoEXT"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eVideoEncodeH264CapabilitiesKHR: return "VideoEncodeH264CapabilitiesKHR"; + case StructureType::eVideoEncodeH264SessionParametersCreateInfoKHR: return "VideoEncodeH264SessionParametersCreateInfoKHR"; + case StructureType::eVideoEncodeH264SessionParametersAddInfoKHR: return "VideoEncodeH264SessionParametersAddInfoKHR"; + case StructureType::eVideoEncodeH264PictureInfoKHR: return "VideoEncodeH264PictureInfoKHR"; + case StructureType::eVideoEncodeH264DpbSlotInfoKHR: return "VideoEncodeH264DpbSlotInfoKHR"; + case StructureType::eVideoEncodeH264NaluSliceInfoKHR: return "VideoEncodeH264NaluSliceInfoKHR"; + case StructureType::eVideoEncodeH264GopRemainingFrameInfoKHR: return "VideoEncodeH264GopRemainingFrameInfoKHR"; + case StructureType::eVideoEncodeH264ProfileInfoKHR: return "VideoEncodeH264ProfileInfoKHR"; + case StructureType::eVideoEncodeH264RateControlInfoKHR: return "VideoEncodeH264RateControlInfoKHR"; + case StructureType::eVideoEncodeH264RateControlLayerInfoKHR: return "VideoEncodeH264RateControlLayerInfoKHR"; + case StructureType::eVideoEncodeH264SessionCreateInfoKHR: return "VideoEncodeH264SessionCreateInfoKHR"; + case StructureType::eVideoEncodeH264QualityLevelPropertiesKHR: return "VideoEncodeH264QualityLevelPropertiesKHR"; + case StructureType::eVideoEncodeH264SessionParametersGetInfoKHR: return "VideoEncodeH264SessionParametersGetInfoKHR"; + case StructureType::eVideoEncodeH264SessionParametersFeedbackInfoKHR: return "VideoEncodeH264SessionParametersFeedbackInfoKHR"; + case StructureType::eVideoEncodeH265CapabilitiesKHR: return "VideoEncodeH265CapabilitiesKHR"; + case StructureType::eVideoEncodeH265SessionParametersCreateInfoKHR: return "VideoEncodeH265SessionParametersCreateInfoKHR"; + case StructureType::eVideoEncodeH265SessionParametersAddInfoKHR: return "VideoEncodeH265SessionParametersAddInfoKHR"; + case StructureType::eVideoEncodeH265PictureInfoKHR: return "VideoEncodeH265PictureInfoKHR"; + case StructureType::eVideoEncodeH265DpbSlotInfoKHR: return "VideoEncodeH265DpbSlotInfoKHR"; + case StructureType::eVideoEncodeH265NaluSliceSegmentInfoKHR: return "VideoEncodeH265NaluSliceSegmentInfoKHR"; + case StructureType::eVideoEncodeH265GopRemainingFrameInfoKHR: return "VideoEncodeH265GopRemainingFrameInfoKHR"; + case StructureType::eVideoEncodeH265ProfileInfoKHR: return "VideoEncodeH265ProfileInfoKHR"; + case StructureType::eVideoEncodeH265RateControlInfoKHR: return "VideoEncodeH265RateControlInfoKHR"; + case StructureType::eVideoEncodeH265RateControlLayerInfoKHR: return "VideoEncodeH265RateControlLayerInfoKHR"; + case StructureType::eVideoEncodeH265SessionCreateInfoKHR: return "VideoEncodeH265SessionCreateInfoKHR"; + case StructureType::eVideoEncodeH265QualityLevelPropertiesKHR: return "VideoEncodeH265QualityLevelPropertiesKHR"; + case StructureType::eVideoEncodeH265SessionParametersGetInfoKHR: return "VideoEncodeH265SessionParametersGetInfoKHR"; + case StructureType::eVideoEncodeH265SessionParametersFeedbackInfoKHR: return "VideoEncodeH265SessionParametersFeedbackInfoKHR"; + case StructureType::eVideoDecodeH264CapabilitiesKHR: return "VideoDecodeH264CapabilitiesKHR"; + case StructureType::eVideoDecodeH264PictureInfoKHR: return "VideoDecodeH264PictureInfoKHR"; + case StructureType::eVideoDecodeH264ProfileInfoKHR: return "VideoDecodeH264ProfileInfoKHR"; + case StructureType::eVideoDecodeH264SessionParametersCreateInfoKHR: return "VideoDecodeH264SessionParametersCreateInfoKHR"; + case StructureType::eVideoDecodeH264SessionParametersAddInfoKHR: return "VideoDecodeH264SessionParametersAddInfoKHR"; + case StructureType::eVideoDecodeH264DpbSlotInfoKHR: return "VideoDecodeH264DpbSlotInfoKHR"; case StructureType::eTextureLodGatherFormatPropertiesAMD: return "TextureLodGatherFormatPropertiesAMD"; - case StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR: return "RenderingFragmentShadingRateAttachmentInfoKHR"; - case StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT: return "RenderingFragmentDensityMapAttachmentInfoEXT"; - case StructureType::eAttachmentSampleCountInfoAMD: return "AttachmentSampleCountInfoAMD"; - case StructureType::eMultiviewPerViewAttributesInfoNVX: return "MultiviewPerViewAttributesInfoNVX"; #if defined( VK_USE_PLATFORM_GGP ) case StructureType::eStreamDescriptorSurfaceCreateInfoGGP: return "StreamDescriptorSurfaceCreateInfoGGP"; #endif /*VK_USE_PLATFORM_GGP*/ @@ -3412,9 +4270,6 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_VI_NN*/ case StructureType::eImageViewAstcDecodeModeEXT: return "ImageViewAstcDecodeModeEXT"; case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT: return "PhysicalDeviceAstcDecodeFeaturesEXT"; - case StructureType::ePipelineRobustnessCreateInfoEXT: return "PipelineRobustnessCreateInfoEXT"; - case StructureType::ePhysicalDevicePipelineRobustnessFeaturesEXT: return "PhysicalDevicePipelineRobustnessFeaturesEXT"; - case StructureType::ePhysicalDevicePipelineRobustnessPropertiesEXT: return "PhysicalDevicePipelineRobustnessPropertiesEXT"; #if defined( VK_USE_PLATFORM_WIN32_KHR ) case StructureType::eImportMemoryWin32HandleInfoKHR: return "ImportMemoryWin32HandleInfoKHR"; case StructureType::eExportMemoryWin32HandleInfoKHR: return "ExportMemoryWin32HandleInfoKHR"; @@ -3433,7 +4288,6 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_WIN32_KHR*/ case StructureType::eImportSemaphoreFdInfoKHR: return "ImportSemaphoreFdInfoKHR"; case StructureType::eSemaphoreGetFdInfoKHR: return "SemaphoreGetFdInfoKHR"; - case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR: return "PhysicalDevicePushDescriptorPropertiesKHR"; case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT: return "CommandBufferInheritanceConditionalRenderingInfoEXT"; case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT: return "PhysicalDeviceConditionalRenderingFeaturesEXT"; case StructureType::eConditionalRenderingBeginInfoEXT: return "ConditionalRenderingBeginInfoEXT"; @@ -3446,6 +4300,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT"; case StructureType::ePresentTimesInfoGOOGLE: return "PresentTimesInfoGOOGLE"; case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX: return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX"; + case StructureType::eMultiviewPerViewAttributesInfoNVX: return "MultiviewPerViewAttributesInfoNVX"; case StructureType::ePipelineViewportSwizzleStateCreateInfoNV: return "PipelineViewportSwizzleStateCreateInfoNV"; case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT: return "PhysicalDeviceDiscardRectanglePropertiesEXT"; case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT: return "PipelineDiscardRectangleStateCreateInfoEXT"; @@ -3454,6 +4309,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT: return "PhysicalDeviceDepthClipEnableFeaturesEXT"; case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT: return "PipelineRasterizationDepthClipStateCreateInfoEXT"; case StructureType::eHdrMetadataEXT: return "HdrMetadataEXT"; + case StructureType::ePhysicalDeviceRelaxedLineRasterizationFeaturesIMG: return "PhysicalDeviceRelaxedLineRasterizationFeaturesIMG"; case StructureType::eSharedPresentSurfaceCapabilitiesKHR: return "SharedPresentSurfaceCapabilitiesKHR"; #if defined( VK_USE_PLATFORM_WIN32_KHR ) case StructureType::eImportFenceWin32HandleInfoKHR: return "ImportFenceWin32HandleInfoKHR"; @@ -3497,6 +4353,14 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eExternalFormatANDROID: return "ExternalFormatANDROID"; case StructureType::eAndroidHardwareBufferFormatProperties2ANDROID: return "AndroidHardwareBufferFormatProperties2ANDROID"; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX: return "PhysicalDeviceShaderEnqueueFeaturesAMDX"; + case StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX: return "PhysicalDeviceShaderEnqueuePropertiesAMDX"; + case StructureType::eExecutionGraphPipelineScratchSizeAMDX: return "ExecutionGraphPipelineScratchSizeAMDX"; + case StructureType::eExecutionGraphPipelineCreateInfoAMDX: return "ExecutionGraphPipelineCreateInfoAMDX"; + case StructureType::ePipelineShaderStageNodeCreateInfoAMDX: return "PipelineShaderStageNodeCreateInfoAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eAttachmentSampleCountInfoAMD: return "AttachmentSampleCountInfoAMD"; case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT"; case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT"; case StructureType::ePipelineSampleLocationsStateCreateInfoEXT: return "PipelineSampleLocationsStateCreateInfoEXT"; @@ -3566,27 +4430,18 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT: return "PhysicalDeviceExternalMemoryHostPropertiesEXT"; case StructureType::ePhysicalDeviceShaderClockFeaturesKHR: return "PhysicalDeviceShaderClockFeaturesKHR"; case StructureType::ePipelineCompilerControlCreateInfoAMD: return "PipelineCompilerControlCreateInfoAMD"; - case StructureType::eCalibratedTimestampInfoEXT: return "CalibratedTimestampInfoEXT"; case StructureType::ePhysicalDeviceShaderCorePropertiesAMD: return "PhysicalDeviceShaderCorePropertiesAMD"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - case StructureType::eVideoDecodeH265CapabilitiesEXT: return "VideoDecodeH265CapabilitiesEXT"; - case StructureType::eVideoDecodeH265SessionParametersCreateInfoEXT: return "VideoDecodeH265SessionParametersCreateInfoEXT"; - case StructureType::eVideoDecodeH265SessionParametersAddInfoEXT: return "VideoDecodeH265SessionParametersAddInfoEXT"; - case StructureType::eVideoDecodeH265ProfileInfoEXT: return "VideoDecodeH265ProfileInfoEXT"; - case StructureType::eVideoDecodeH265PictureInfoEXT: return "VideoDecodeH265PictureInfoEXT"; - case StructureType::eVideoDecodeH265DpbSlotInfoEXT: return "VideoDecodeH265DpbSlotInfoEXT"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - case StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR: return "DeviceQueueGlobalPriorityCreateInfoKHR"; - case StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR: return "PhysicalDeviceGlobalPriorityQueryFeaturesKHR"; - case StructureType::eQueueFamilyGlobalPriorityPropertiesKHR: return "QueueFamilyGlobalPriorityPropertiesKHR"; + case StructureType::eVideoDecodeH265CapabilitiesKHR: return "VideoDecodeH265CapabilitiesKHR"; + case StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR: return "VideoDecodeH265SessionParametersCreateInfoKHR"; + case StructureType::eVideoDecodeH265SessionParametersAddInfoKHR: return "VideoDecodeH265SessionParametersAddInfoKHR"; + case StructureType::eVideoDecodeH265ProfileInfoKHR: return "VideoDecodeH265ProfileInfoKHR"; + case StructureType::eVideoDecodeH265PictureInfoKHR: return "VideoDecodeH265PictureInfoKHR"; + case StructureType::eVideoDecodeH265DpbSlotInfoKHR: return "VideoDecodeH265DpbSlotInfoKHR"; case StructureType::eDeviceMemoryOverallocationCreateInfoAMD: return "DeviceMemoryOverallocationCreateInfoAMD"; case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT: return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT"; - case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT: return "PipelineVertexInputDivisorStateCreateInfoEXT"; - case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT: return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT"; #if defined( VK_USE_PLATFORM_GGP ) case StructureType::ePresentFrameTokenGGP: return "PresentFrameTokenGGP"; #endif /*VK_USE_PLATFORM_GGP*/ - case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV: return "PhysicalDeviceComputeShaderDerivativesFeaturesNV"; case StructureType::ePhysicalDeviceMeshShaderFeaturesNV: return "PhysicalDeviceMeshShaderFeaturesNV"; case StructureType::ePhysicalDeviceMeshShaderPropertiesNV: return "PhysicalDeviceMeshShaderPropertiesNV"; case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV: return "PhysicalDeviceShaderImageFootprintFeaturesNV"; @@ -3594,6 +4449,8 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV: return "PhysicalDeviceExclusiveScissorFeaturesNV"; case StructureType::eCheckpointDataNV: return "CheckpointDataNV"; case StructureType::eQueueFamilyCheckpointPropertiesNV: return "QueueFamilyCheckpointPropertiesNV"; + case StructureType::eQueueFamilyCheckpointProperties2NV: return "QueueFamilyCheckpointProperties2NV"; + case StructureType::eCheckpointData2NV: return "CheckpointData2NV"; case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL: return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL"; case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL: return "QueryPoolPerformanceQueryCreateInfoINTEL"; case StructureType::eInitializePerformanceApiInfoINTEL: return "InitializePerformanceApiInfoINTEL"; @@ -3613,14 +4470,17 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT: return "PhysicalDeviceFragmentDensityMapFeaturesEXT"; case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT: return "PhysicalDeviceFragmentDensityMapPropertiesEXT"; case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT: return "RenderPassFragmentDensityMapCreateInfoEXT"; + case StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT: return "RenderingFragmentDensityMapAttachmentInfoEXT"; case StructureType::eFragmentShadingRateAttachmentInfoKHR: return "FragmentShadingRateAttachmentInfoKHR"; case StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR: return "PipelineFragmentShadingRateStateCreateInfoKHR"; case StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR: return "PhysicalDeviceFragmentShadingRatePropertiesKHR"; case StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR: return "PhysicalDeviceFragmentShadingRateFeaturesKHR"; case StructureType::ePhysicalDeviceFragmentShadingRateKHR: return "PhysicalDeviceFragmentShadingRateKHR"; + case StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR: return "RenderingFragmentShadingRateAttachmentInfoKHR"; case StructureType::ePhysicalDeviceShaderCoreProperties2AMD: return "PhysicalDeviceShaderCoreProperties2AMD"; case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD: return "PhysicalDeviceCoherentMemoryFeaturesAMD"; case StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT: return "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT"; + case StructureType::ePhysicalDeviceShaderQuadControlFeaturesKHR: return "PhysicalDeviceShaderQuadControlFeaturesKHR"; case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT: return "PhysicalDeviceMemoryBudgetPropertiesEXT"; case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT: return "PhysicalDeviceMemoryPriorityFeaturesEXT"; case StructureType::eMemoryPriorityAllocateInfoEXT: return "MemoryPriorityAllocateInfoEXT"; @@ -3647,11 +4507,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT: return "SurfaceFullScreenExclusiveWin32InfoEXT"; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ case StructureType::eHeadlessSurfaceCreateInfoEXT: return "HeadlessSurfaceCreateInfoEXT"; - case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT: return "PhysicalDeviceLineRasterizationFeaturesEXT"; - case StructureType::ePipelineRasterizationLineStateCreateInfoEXT: return "PipelineRasterizationLineStateCreateInfoEXT"; - case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT: return "PhysicalDeviceLineRasterizationPropertiesEXT"; case StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT: return "PhysicalDeviceShaderAtomicFloatFeaturesEXT"; - case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT: return "PhysicalDeviceIndexTypeUint8FeaturesEXT"; case StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT: return "PhysicalDeviceExtendedDynamicStateFeaturesEXT"; case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR: return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR"; case StructureType::ePipelineInfoKHR: return "PipelineInfoKHR"; @@ -3659,7 +4515,19 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePipelineExecutableInfoKHR: return "PipelineExecutableInfoKHR"; case StructureType::ePipelineExecutableStatisticKHR: return "PipelineExecutableStatisticKHR"; case StructureType::ePipelineExecutableInternalRepresentationKHR: return "PipelineExecutableInternalRepresentationKHR"; + case StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT: return "PhysicalDeviceMapMemoryPlacedFeaturesEXT"; + case StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT: return "PhysicalDeviceMapMemoryPlacedPropertiesEXT"; + case StructureType::eMemoryMapPlacedInfoEXT: return "MemoryMapPlacedInfoEXT"; case StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT: return "PhysicalDeviceShaderAtomicFloat2FeaturesEXT"; + case StructureType::eSurfacePresentModeEXT: return "SurfacePresentModeEXT"; + case StructureType::eSurfacePresentScalingCapabilitiesEXT: return "SurfacePresentScalingCapabilitiesEXT"; + case StructureType::eSurfacePresentModeCompatibilityEXT: return "SurfacePresentModeCompatibilityEXT"; + case StructureType::ePhysicalDeviceSwapchainMaintenance1FeaturesEXT: return "PhysicalDeviceSwapchainMaintenance1FeaturesEXT"; + case StructureType::eSwapchainPresentFenceInfoEXT: return "SwapchainPresentFenceInfoEXT"; + case StructureType::eSwapchainPresentModesCreateInfoEXT: return "SwapchainPresentModesCreateInfoEXT"; + case StructureType::eSwapchainPresentModeInfoEXT: return "SwapchainPresentModeInfoEXT"; + case StructureType::eSwapchainPresentScalingCreateInfoEXT: return "SwapchainPresentScalingCreateInfoEXT"; + case StructureType::eReleaseSwapchainImagesInfoEXT: return "ReleaseSwapchainImagesInfoEXT"; case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV: return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV"; case StructureType::eGraphicsShaderGroupCreateInfoNV: return "GraphicsShaderGroupCreateInfoNV"; case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV: return "GraphicsPipelineShaderGroupsCreateInfoNV"; @@ -3673,6 +4541,9 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT: return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT"; case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM: return "CommandBufferInheritanceRenderPassTransformInfoQCOM"; case StructureType::eRenderPassTransformBeginInfoQCOM: return "RenderPassTransformBeginInfoQCOM"; + case StructureType::ePhysicalDeviceDepthBiasControlFeaturesEXT: return "PhysicalDeviceDepthBiasControlFeaturesEXT"; + case StructureType::eDepthBiasInfoEXT: return "DepthBiasInfoEXT"; + case StructureType::eDepthBiasRepresentationInfoEXT: return "DepthBiasRepresentationInfoEXT"; case StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT: return "PhysicalDeviceDeviceMemoryReportFeaturesEXT"; case StructureType::eDeviceDeviceMemoryReportCreateInfoEXT: return "DeviceDeviceMemoryReportCreateInfoEXT"; case StructureType::eDeviceMemoryReportCallbackDataEXT: return "DeviceMemoryReportCallbackDataEXT"; @@ -3682,17 +4553,32 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT: return "PhysicalDeviceCustomBorderColorPropertiesEXT"; case StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT: return "PhysicalDeviceCustomBorderColorFeaturesEXT"; case StructureType::ePipelineLibraryCreateInfoKHR: return "PipelineLibraryCreateInfoKHR"; + case StructureType::ePhysicalDevicePresentBarrierFeaturesNV: return "PhysicalDevicePresentBarrierFeaturesNV"; + case StructureType::eSurfaceCapabilitiesPresentBarrierNV: return "SurfaceCapabilitiesPresentBarrierNV"; + case StructureType::eSwapchainPresentBarrierCreateInfoNV: return "SwapchainPresentBarrierCreateInfoNV"; case StructureType::ePresentIdKHR: return "PresentIdKHR"; case StructureType::ePhysicalDevicePresentIdFeaturesKHR: return "PhysicalDevicePresentIdFeaturesKHR"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) case StructureType::eVideoEncodeInfoKHR: return "VideoEncodeInfoKHR"; case StructureType::eVideoEncodeRateControlInfoKHR: return "VideoEncodeRateControlInfoKHR"; case StructureType::eVideoEncodeRateControlLayerInfoKHR: return "VideoEncodeRateControlLayerInfoKHR"; case StructureType::eVideoEncodeCapabilitiesKHR: return "VideoEncodeCapabilitiesKHR"; case StructureType::eVideoEncodeUsageInfoKHR: return "VideoEncodeUsageInfoKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eQueryPoolVideoEncodeFeedbackCreateInfoKHR: return "QueryPoolVideoEncodeFeedbackCreateInfoKHR"; + case StructureType::ePhysicalDeviceVideoEncodeQualityLevelInfoKHR: return "PhysicalDeviceVideoEncodeQualityLevelInfoKHR"; + case StructureType::eVideoEncodeQualityLevelPropertiesKHR: return "VideoEncodeQualityLevelPropertiesKHR"; + case StructureType::eVideoEncodeQualityLevelInfoKHR: return "VideoEncodeQualityLevelInfoKHR"; + case StructureType::eVideoEncodeSessionParametersGetInfoKHR: return "VideoEncodeSessionParametersGetInfoKHR"; + case StructureType::eVideoEncodeSessionParametersFeedbackInfoKHR: return "VideoEncodeSessionParametersFeedbackInfoKHR"; case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV: return "PhysicalDeviceDiagnosticsConfigFeaturesNV"; case StructureType::eDeviceDiagnosticsConfigCreateInfoNV: return "DeviceDiagnosticsConfigCreateInfoNV"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::eCudaModuleCreateInfoNV: return "CudaModuleCreateInfoNV"; + case StructureType::eCudaFunctionCreateInfoNV: return "CudaFunctionCreateInfoNV"; + case StructureType::eCudaLaunchInfoNV: return "CudaLaunchInfoNV"; + case StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV: return "PhysicalDeviceCudaKernelLaunchFeaturesNV"; + case StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV: return "PhysicalDeviceCudaKernelLaunchPropertiesNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eQueryLowLatencySupportNV: return "QueryLowLatencySupportNV"; #if defined( VK_USE_PLATFORM_METAL_EXT ) case StructureType::eExportMetalObjectCreateInfoEXT: return "ExportMetalObjectCreateInfoEXT"; case StructureType::eExportMetalObjectsInfoEXT: return "ExportMetalObjectsInfoEXT"; @@ -3707,8 +4593,19 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eExportMetalSharedEventInfoEXT: return "ExportMetalSharedEventInfoEXT"; case StructureType::eImportMetalSharedEventInfoEXT: return "ImportMetalSharedEventInfoEXT"; #endif /*VK_USE_PLATFORM_METAL_EXT*/ - case StructureType::eQueueFamilyCheckpointProperties2NV: return "QueueFamilyCheckpointProperties2NV"; - case StructureType::eCheckpointData2NV: return "CheckpointData2NV"; + case StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT: return "PhysicalDeviceDescriptorBufferPropertiesEXT"; + case StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT: return "PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT"; + case StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT: return "PhysicalDeviceDescriptorBufferFeaturesEXT"; + case StructureType::eDescriptorAddressInfoEXT: return "DescriptorAddressInfoEXT"; + case StructureType::eDescriptorGetInfoEXT: return "DescriptorGetInfoEXT"; + case StructureType::eBufferCaptureDescriptorDataInfoEXT: return "BufferCaptureDescriptorDataInfoEXT"; + case StructureType::eImageCaptureDescriptorDataInfoEXT: return "ImageCaptureDescriptorDataInfoEXT"; + case StructureType::eImageViewCaptureDescriptorDataInfoEXT: return "ImageViewCaptureDescriptorDataInfoEXT"; + case StructureType::eSamplerCaptureDescriptorDataInfoEXT: return "SamplerCaptureDescriptorDataInfoEXT"; + case StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT: return "OpaqueCaptureDescriptorDataCreateInfoEXT"; + case StructureType::eDescriptorBufferBindingInfoEXT: return "DescriptorBufferBindingInfoEXT"; + case StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT: return "DescriptorBufferBindingPushDescriptorBufferHandleEXT"; + case StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT: return "AccelerationStructureCaptureDescriptorDataInfoEXT"; case StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT: return "PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT"; case StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT: return "PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT"; case StructureType::eGraphicsPipelineLibraryCreateInfoEXT: return "GraphicsPipelineLibraryCreateInfoEXT"; @@ -3731,11 +4628,12 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR: return "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR"; case StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT: return "PhysicalDeviceImageCompressionControlFeaturesEXT"; case StructureType::eImageCompressionControlEXT: return "ImageCompressionControlEXT"; - case StructureType::eSubresourceLayout2EXT: return "SubresourceLayout2EXT"; - case StructureType::eImageSubresource2EXT: return "ImageSubresource2EXT"; case StructureType::eImageCompressionPropertiesEXT: return "ImageCompressionPropertiesEXT"; case StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT: return "PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT"; case StructureType::ePhysicalDevice4444FormatsFeaturesEXT: return "PhysicalDevice4444FormatsFeaturesEXT"; + case StructureType::ePhysicalDeviceFaultFeaturesEXT: return "PhysicalDeviceFaultFeaturesEXT"; + case StructureType::eDeviceFaultCountsEXT: return "DeviceFaultCountsEXT"; + case StructureType::eDeviceFaultInfoEXT: return "DeviceFaultInfoEXT"; case StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT: return "PhysicalDeviceRgba10X6FormatsFeaturesEXT"; #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) case StructureType::eDirectfbSurfaceCreateInfoEXT: return "DirectfbSurfaceCreateInfoEXT"; @@ -3744,9 +4642,12 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eVertexInputBindingDescription2EXT: return "VertexInputBindingDescription2EXT"; case StructureType::eVertexInputAttributeDescription2EXT: return "VertexInputAttributeDescription2EXT"; case StructureType::ePhysicalDeviceDrmPropertiesEXT: return "PhysicalDeviceDrmPropertiesEXT"; + case StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT: return "PhysicalDeviceAddressBindingReportFeaturesEXT"; + case StructureType::eDeviceAddressBindingCallbackDataEXT: return "DeviceAddressBindingCallbackDataEXT"; case StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT: return "PhysicalDeviceDepthClipControlFeaturesEXT"; case StructureType::ePipelineViewportDepthClipControlCreateInfoEXT: return "PipelineViewportDepthClipControlCreateInfoEXT"; case StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT: return "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT"; + case StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT: return "PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT"; #if defined( VK_USE_PLATFORM_FUCHSIA ) case StructureType::eImportMemoryZirconHandleInfoFUCHSIA: return "ImportMemoryZirconHandleInfoFUCHSIA"; case StructureType::eMemoryZirconHandlePropertiesFUCHSIA: return "MemoryZirconHandlePropertiesFUCHSIA"; @@ -3772,6 +4673,8 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV: return "PhysicalDeviceExternalMemoryRdmaFeaturesNV"; case StructureType::ePipelinePropertiesIdentifierEXT: return "PipelinePropertiesIdentifierEXT"; case StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT: return "PhysicalDevicePipelinePropertiesFeaturesEXT"; + case StructureType::ePhysicalDeviceFrameBoundaryFeaturesEXT: return "PhysicalDeviceFrameBoundaryFeaturesEXT"; + case StructureType::eFrameBoundaryEXT: return "FrameBoundaryEXT"; case StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT: return "PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT"; case StructureType::eSubpassResolvePerformanceQueryEXT: return "SubpassResolvePerformanceQueryEXT"; case StructureType::eMultisampledRenderToSingleSampledInfoEXT: return "MultisampledRenderToSingleSampledInfoEXT"; @@ -3788,39 +4691,264 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceMultiDrawFeaturesEXT: return "PhysicalDeviceMultiDrawFeaturesEXT"; case StructureType::ePhysicalDeviceMultiDrawPropertiesEXT: return "PhysicalDeviceMultiDrawPropertiesEXT"; case StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT: return "PhysicalDeviceImage2DViewOf3DFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderTileImageFeaturesEXT: return "PhysicalDeviceShaderTileImageFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderTileImagePropertiesEXT: return "PhysicalDeviceShaderTileImagePropertiesEXT"; + case StructureType::eMicromapBuildInfoEXT: return "MicromapBuildInfoEXT"; + case StructureType::eMicromapVersionInfoEXT: return "MicromapVersionInfoEXT"; + case StructureType::eCopyMicromapInfoEXT: return "CopyMicromapInfoEXT"; + case StructureType::eCopyMicromapToMemoryInfoEXT: return "CopyMicromapToMemoryInfoEXT"; + case StructureType::eCopyMemoryToMicromapInfoEXT: return "CopyMemoryToMicromapInfoEXT"; + case StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT: return "PhysicalDeviceOpacityMicromapFeaturesEXT"; + case StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT: return "PhysicalDeviceOpacityMicromapPropertiesEXT"; + case StructureType::eMicromapCreateInfoEXT: return "MicromapCreateInfoEXT"; + case StructureType::eMicromapBuildSizesInfoEXT: return "MicromapBuildSizesInfoEXT"; + case StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT: return "AccelerationStructureTrianglesOpacityMicromapEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::ePhysicalDeviceDisplacementMicromapFeaturesNV: return "PhysicalDeviceDisplacementMicromapFeaturesNV"; + case StructureType::ePhysicalDeviceDisplacementMicromapPropertiesNV: return "PhysicalDeviceDisplacementMicromapPropertiesNV"; + case StructureType::eAccelerationStructureTrianglesDisplacementMicromapNV: return "AccelerationStructureTrianglesDisplacementMicromapNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI: return "PhysicalDeviceClusterCullingShaderFeaturesHUAWEI"; + case StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI: return "PhysicalDeviceClusterCullingShaderPropertiesHUAWEI"; + case StructureType::ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI: return "PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI"; case StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT: return "PhysicalDeviceBorderColorSwizzleFeaturesEXT"; case StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT: return "SamplerBorderColorComponentMappingCreateInfoEXT"; case StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT: return "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderCorePropertiesARM: return "PhysicalDeviceShaderCorePropertiesARM"; + case StructureType::eDeviceQueueShaderCoreControlCreateInfoARM: return "DeviceQueueShaderCoreControlCreateInfoARM"; + case StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM: return "PhysicalDeviceSchedulingControlsFeaturesARM"; + case StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM: return "PhysicalDeviceSchedulingControlsPropertiesARM"; + case StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT: return "PhysicalDeviceImageSlicedViewOf3DFeaturesEXT"; + case StructureType::eImageViewSlicedCreateInfoEXT: return "ImageViewSlicedCreateInfoEXT"; case StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE: return "PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE"; case StructureType::eDescriptorSetBindingReferenceVALVE: return "DescriptorSetBindingReferenceVALVE"; case StructureType::eDescriptorSetLayoutHostMappingInfoVALVE: return "DescriptorSetLayoutHostMappingInfoVALVE"; - case StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesEXT: return "PhysicalDeviceDepthClampZeroOneFeaturesEXT"; case StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT: return "PhysicalDeviceNonSeamlessCubeMapFeaturesEXT"; + case StructureType::ePhysicalDeviceRenderPassStripedFeaturesARM: return "PhysicalDeviceRenderPassStripedFeaturesARM"; + case StructureType::ePhysicalDeviceRenderPassStripedPropertiesARM: return "PhysicalDeviceRenderPassStripedPropertiesARM"; + case StructureType::eRenderPassStripeBeginInfoARM: return "RenderPassStripeBeginInfoARM"; + case StructureType::eRenderPassStripeInfoARM: return "RenderPassStripeInfoARM"; + case StructureType::eRenderPassStripeSubmitInfoARM: return "RenderPassStripeSubmitInfoARM"; case StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM: return "PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM"; case StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM: return "PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM"; case StructureType::eSubpassFragmentDensityMapOffsetEndInfoQCOM: return "SubpassFragmentDensityMapOffsetEndInfoQCOM"; + case StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV: return "PhysicalDeviceCopyMemoryIndirectFeaturesNV"; + case StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV: return "PhysicalDeviceCopyMemoryIndirectPropertiesNV"; + case StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV: return "PhysicalDeviceMemoryDecompressionFeaturesNV"; + case StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV: return "PhysicalDeviceMemoryDecompressionPropertiesNV"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV: return "PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV"; + case StructureType::eComputePipelineIndirectBufferInfoNV: return "ComputePipelineIndirectBufferInfoNV"; + case StructureType::ePipelineIndirectDeviceAddressInfoNV: return "PipelineIndirectDeviceAddressInfoNV"; + case StructureType::ePhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV: return "PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV"; + case StructureType::eAccelerationStructureGeometryLinearSweptSpheresDataNV: return "AccelerationStructureGeometryLinearSweptSpheresDataNV"; + case StructureType::eAccelerationStructureGeometrySpheresDataNV: return "AccelerationStructureGeometrySpheresDataNV"; case StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV: return "PhysicalDeviceLinearColorAttachmentFeaturesNV"; + case StructureType::ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR: return "PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR"; case StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT: return "PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT"; case StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM: return "PhysicalDeviceImageProcessingFeaturesQCOM"; case StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM: return "PhysicalDeviceImageProcessingPropertiesQCOM"; case StructureType::eImageViewSampleWeightCreateInfoQCOM: return "ImageViewSampleWeightCreateInfoQCOM"; + case StructureType::ePhysicalDeviceNestedCommandBufferFeaturesEXT: return "PhysicalDeviceNestedCommandBufferFeaturesEXT"; + case StructureType::ePhysicalDeviceNestedCommandBufferPropertiesEXT: return "PhysicalDeviceNestedCommandBufferPropertiesEXT"; + case StructureType::eExternalMemoryAcquireUnmodifiedEXT: return "ExternalMemoryAcquireUnmodifiedEXT"; + case StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT: return "PhysicalDeviceExtendedDynamicState3FeaturesEXT"; + case StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT: return "PhysicalDeviceExtendedDynamicState3PropertiesEXT"; case StructureType::ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT: return "PhysicalDeviceSubpassMergeFeedbackFeaturesEXT"; case StructureType::eRenderPassCreationControlEXT: return "RenderPassCreationControlEXT"; case StructureType::eRenderPassCreationFeedbackCreateInfoEXT: return "RenderPassCreationFeedbackCreateInfoEXT"; case StructureType::eRenderPassSubpassFeedbackCreateInfoEXT: return "RenderPassSubpassFeedbackCreateInfoEXT"; + case StructureType::eDirectDriverLoadingInfoLUNARG: return "DirectDriverLoadingInfoLUNARG"; + case StructureType::eDirectDriverLoadingListLUNARG: return "DirectDriverLoadingListLUNARG"; case StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT: return "PhysicalDeviceShaderModuleIdentifierFeaturesEXT"; case StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT: return "PhysicalDeviceShaderModuleIdentifierPropertiesEXT"; case StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT: return "PipelineShaderStageModuleIdentifierCreateInfoEXT"; case StructureType::eShaderModuleIdentifierEXT: return "ShaderModuleIdentifierEXT"; case StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT: return "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT"; + case StructureType::ePhysicalDeviceOpticalFlowFeaturesNV: return "PhysicalDeviceOpticalFlowFeaturesNV"; + case StructureType::ePhysicalDeviceOpticalFlowPropertiesNV: return "PhysicalDeviceOpticalFlowPropertiesNV"; + case StructureType::eOpticalFlowImageFormatInfoNV: return "OpticalFlowImageFormatInfoNV"; + case StructureType::eOpticalFlowImageFormatPropertiesNV: return "OpticalFlowImageFormatPropertiesNV"; + case StructureType::eOpticalFlowSessionCreateInfoNV: return "OpticalFlowSessionCreateInfoNV"; + case StructureType::eOpticalFlowExecuteInfoNV: return "OpticalFlowExecuteInfoNV"; + case StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV: return "OpticalFlowSessionCreatePrivateDataInfoNV"; case StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT: return "PhysicalDeviceLegacyDitheringFeaturesEXT"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID: return "PhysicalDeviceExternalFormatResolveFeaturesANDROID"; + case StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID: return "PhysicalDeviceExternalFormatResolvePropertiesANDROID"; + case StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID: return "AndroidHardwareBufferFormatResolvePropertiesANDROID"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + case StructureType::ePhysicalDeviceAntiLagFeaturesAMD: return "PhysicalDeviceAntiLagFeaturesAMD"; + case StructureType::eAntiLagDataAMD: return "AntiLagDataAMD"; + case StructureType::eAntiLagPresentationInfoAMD: return "AntiLagPresentationInfoAMD"; + case StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR: return "PhysicalDeviceRayTracingPositionFetchFeaturesKHR"; + case StructureType::ePhysicalDeviceShaderObjectFeaturesEXT: return "PhysicalDeviceShaderObjectFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderObjectPropertiesEXT: return "PhysicalDeviceShaderObjectPropertiesEXT"; + case StructureType::eShaderCreateInfoEXT: return "ShaderCreateInfoEXT"; + case StructureType::ePhysicalDevicePipelineBinaryFeaturesKHR: return "PhysicalDevicePipelineBinaryFeaturesKHR"; + case StructureType::ePipelineBinaryCreateInfoKHR: return "PipelineBinaryCreateInfoKHR"; + case StructureType::ePipelineBinaryInfoKHR: return "PipelineBinaryInfoKHR"; + case StructureType::ePipelineBinaryKeyKHR: return "PipelineBinaryKeyKHR"; + case StructureType::ePhysicalDevicePipelineBinaryPropertiesKHR: return "PhysicalDevicePipelineBinaryPropertiesKHR"; + case StructureType::eReleaseCapturedPipelineDataInfoKHR: return "ReleaseCapturedPipelineDataInfoKHR"; + case StructureType::ePipelineBinaryDataInfoKHR: return "PipelineBinaryDataInfoKHR"; + case StructureType::ePipelineCreateInfoKHR: return "PipelineCreateInfoKHR"; + case StructureType::eDevicePipelineBinaryInternalCacheControlKHR: return "DevicePipelineBinaryInternalCacheControlKHR"; + case StructureType::ePipelineBinaryHandlesInfoKHR: return "PipelineBinaryHandlesInfoKHR"; case StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM: return "PhysicalDeviceTilePropertiesFeaturesQCOM"; case StructureType::eTilePropertiesQCOM: return "TilePropertiesQCOM"; case StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC: return "PhysicalDeviceAmigoProfilingFeaturesSEC"; case StructureType::eAmigoProfilingSubmitInfoSEC: return "AmigoProfilingSubmitInfoSEC"; + case StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM: return "PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM"; + case StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV: return "PhysicalDeviceRayTracingInvocationReorderFeaturesNV"; + case StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV: return "PhysicalDeviceRayTracingInvocationReorderPropertiesNV"; + case StructureType::ePhysicalDeviceCooperativeVectorFeaturesNV: return "PhysicalDeviceCooperativeVectorFeaturesNV"; + case StructureType::ePhysicalDeviceCooperativeVectorPropertiesNV: return "PhysicalDeviceCooperativeVectorPropertiesNV"; + case StructureType::eCooperativeVectorPropertiesNV: return "CooperativeVectorPropertiesNV"; + case StructureType::eConvertCooperativeVectorMatrixInfoNV: return "ConvertCooperativeVectorMatrixInfoNV"; + case StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV: return "PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV"; + case StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV: return "PhysicalDeviceExtendedSparseAddressSpacePropertiesNV"; case StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT: return "PhysicalDeviceMutableDescriptorTypeFeaturesEXT"; case StructureType::eMutableDescriptorTypeCreateInfoEXT: return "MutableDescriptorTypeCreateInfoEXT"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + case StructureType::ePhysicalDeviceLegacyVertexAttributesFeaturesEXT: return "PhysicalDeviceLegacyVertexAttributesFeaturesEXT"; + case StructureType::ePhysicalDeviceLegacyVertexAttributesPropertiesEXT: return "PhysicalDeviceLegacyVertexAttributesPropertiesEXT"; + case StructureType::eLayerSettingsCreateInfoEXT: return "LayerSettingsCreateInfoEXT"; + case StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM: return "PhysicalDeviceShaderCoreBuiltinsFeaturesARM"; + case StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM: return "PhysicalDeviceShaderCoreBuiltinsPropertiesARM"; + case StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT: return "PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT"; + case StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT: return "PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT"; + case StructureType::eLatencySleepModeInfoNV: return "LatencySleepModeInfoNV"; + case StructureType::eLatencySleepInfoNV: return "LatencySleepInfoNV"; + case StructureType::eSetLatencyMarkerInfoNV: return "SetLatencyMarkerInfoNV"; + case StructureType::eGetLatencyMarkerInfoNV: return "GetLatencyMarkerInfoNV"; + case StructureType::eLatencyTimingsFrameReportNV: return "LatencyTimingsFrameReportNV"; + case StructureType::eLatencySubmissionPresentIdNV: return "LatencySubmissionPresentIdNV"; + case StructureType::eOutOfBandQueueTypeInfoNV: return "OutOfBandQueueTypeInfoNV"; + case StructureType::eSwapchainLatencyCreateInfoNV: return "SwapchainLatencyCreateInfoNV"; + case StructureType::eLatencySurfaceCapabilitiesNV: return "LatencySurfaceCapabilitiesNV"; + case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR: return "PhysicalDeviceCooperativeMatrixFeaturesKHR"; + case StructureType::eCooperativeMatrixPropertiesKHR: return "CooperativeMatrixPropertiesKHR"; + case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR: return "PhysicalDeviceCooperativeMatrixPropertiesKHR"; + case StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM: return "PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM"; + case StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM: return "MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM"; + case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR: return "PhysicalDeviceComputeShaderDerivativesFeaturesKHR"; + case StructureType::ePhysicalDeviceComputeShaderDerivativesPropertiesKHR: return "PhysicalDeviceComputeShaderDerivativesPropertiesKHR"; + case StructureType::eVideoDecodeAv1CapabilitiesKHR: return "VideoDecodeAv1CapabilitiesKHR"; + case StructureType::eVideoDecodeAv1PictureInfoKHR: return "VideoDecodeAv1PictureInfoKHR"; + case StructureType::eVideoDecodeAv1ProfileInfoKHR: return "VideoDecodeAv1ProfileInfoKHR"; + case StructureType::eVideoDecodeAv1SessionParametersCreateInfoKHR: return "VideoDecodeAv1SessionParametersCreateInfoKHR"; + case StructureType::eVideoDecodeAv1DpbSlotInfoKHR: return "VideoDecodeAv1DpbSlotInfoKHR"; + case StructureType::eVideoEncodeAv1CapabilitiesKHR: return "VideoEncodeAv1CapabilitiesKHR"; + case StructureType::eVideoEncodeAv1SessionParametersCreateInfoKHR: return "VideoEncodeAv1SessionParametersCreateInfoKHR"; + case StructureType::eVideoEncodeAv1PictureInfoKHR: return "VideoEncodeAv1PictureInfoKHR"; + case StructureType::eVideoEncodeAv1DpbSlotInfoKHR: return "VideoEncodeAv1DpbSlotInfoKHR"; + case StructureType::ePhysicalDeviceVideoEncodeAv1FeaturesKHR: return "PhysicalDeviceVideoEncodeAv1FeaturesKHR"; + case StructureType::eVideoEncodeAv1ProfileInfoKHR: return "VideoEncodeAv1ProfileInfoKHR"; + case StructureType::eVideoEncodeAv1RateControlInfoKHR: return "VideoEncodeAv1RateControlInfoKHR"; + case StructureType::eVideoEncodeAv1RateControlLayerInfoKHR: return "VideoEncodeAv1RateControlLayerInfoKHR"; + case StructureType::eVideoEncodeAv1QualityLevelPropertiesKHR: return "VideoEncodeAv1QualityLevelPropertiesKHR"; + case StructureType::eVideoEncodeAv1SessionCreateInfoKHR: return "VideoEncodeAv1SessionCreateInfoKHR"; + case StructureType::eVideoEncodeAv1GopRemainingFrameInfoKHR: return "VideoEncodeAv1GopRemainingFrameInfoKHR"; + case StructureType::ePhysicalDeviceVideoMaintenance1FeaturesKHR: return "PhysicalDeviceVideoMaintenance1FeaturesKHR"; + case StructureType::eVideoInlineQueryInfoKHR: return "VideoInlineQueryInfoKHR"; + case StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV: return "PhysicalDevicePerStageDescriptorSetFeaturesNV"; + case StructureType::ePhysicalDeviceImageProcessing2FeaturesQCOM: return "PhysicalDeviceImageProcessing2FeaturesQCOM"; + case StructureType::ePhysicalDeviceImageProcessing2PropertiesQCOM: return "PhysicalDeviceImageProcessing2PropertiesQCOM"; + case StructureType::eSamplerBlockMatchWindowCreateInfoQCOM: return "SamplerBlockMatchWindowCreateInfoQCOM"; + case StructureType::eSamplerCubicWeightsCreateInfoQCOM: return "SamplerCubicWeightsCreateInfoQCOM"; + case StructureType::ePhysicalDeviceCubicWeightsFeaturesQCOM: return "PhysicalDeviceCubicWeightsFeaturesQCOM"; + case StructureType::eBlitImageCubicWeightsInfoQCOM: return "BlitImageCubicWeightsInfoQCOM"; + case StructureType::ePhysicalDeviceYcbcrDegammaFeaturesQCOM: return "PhysicalDeviceYcbcrDegammaFeaturesQCOM"; + case StructureType::eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM: return "SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM"; + case StructureType::ePhysicalDeviceCubicClampFeaturesQCOM: return "PhysicalDeviceCubicClampFeaturesQCOM"; + case StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT: return "PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT"; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + case StructureType::eScreenBufferPropertiesQNX: return "ScreenBufferPropertiesQNX"; + case StructureType::eScreenBufferFormatPropertiesQNX: return "ScreenBufferFormatPropertiesQNX"; + case StructureType::eImportScreenBufferInfoQNX: return "ImportScreenBufferInfoQNX"; + case StructureType::eExternalFormatQNX: return "ExternalFormatQNX"; + case StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX: return "PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX"; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + case StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT: return "PhysicalDeviceLayeredDriverPropertiesMSFT"; + case StructureType::eCalibratedTimestampInfoKHR: return "CalibratedTimestampInfoKHR"; + case StructureType::eSetDescriptorBufferOffsetsInfoEXT: return "SetDescriptorBufferOffsetsInfoEXT"; + case StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT: return "BindDescriptorBufferEmbeddedSamplersInfoEXT"; + case StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV: return "PhysicalDeviceDescriptorPoolOverallocationFeaturesNV"; + case StructureType::eDisplaySurfaceStereoCreateInfoNV: return "DisplaySurfaceStereoCreateInfoNV"; + case StructureType::eDisplayModeStereoPropertiesNV: return "DisplayModeStereoPropertiesNV"; + case StructureType::eVideoEncodeQuantizationMapCapabilitiesKHR: return "VideoEncodeQuantizationMapCapabilitiesKHR"; + case StructureType::eVideoFormatQuantizationMapPropertiesKHR: return "VideoFormatQuantizationMapPropertiesKHR"; + case StructureType::eVideoEncodeQuantizationMapInfoKHR: return "VideoEncodeQuantizationMapInfoKHR"; + case StructureType::eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR: return "VideoEncodeQuantizationMapSessionParametersCreateInfoKHR"; + case StructureType::ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR: return "PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR"; + case StructureType::eVideoEncodeH264QuantizationMapCapabilitiesKHR: return "VideoEncodeH264QuantizationMapCapabilitiesKHR"; + case StructureType::eVideoEncodeH265QuantizationMapCapabilitiesKHR: return "VideoEncodeH265QuantizationMapCapabilitiesKHR"; + case StructureType::eVideoFormatH265QuantizationMapPropertiesKHR: return "VideoFormatH265QuantizationMapPropertiesKHR"; + case StructureType::eVideoEncodeAv1QuantizationMapCapabilitiesKHR: return "VideoEncodeAv1QuantizationMapCapabilitiesKHR"; + case StructureType::eVideoFormatAv1QuantizationMapPropertiesKHR: return "VideoFormatAv1QuantizationMapPropertiesKHR"; + case StructureType::ePhysicalDeviceRawAccessChainsFeaturesNV: return "PhysicalDeviceRawAccessChainsFeaturesNV"; + case StructureType::ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR: return "PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR"; + case StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV: return "PhysicalDeviceCommandBufferInheritanceFeaturesNV"; + case StructureType::ePhysicalDeviceMaintenance7FeaturesKHR: return "PhysicalDeviceMaintenance7FeaturesKHR"; + case StructureType::ePhysicalDeviceMaintenance7PropertiesKHR: return "PhysicalDeviceMaintenance7PropertiesKHR"; + case StructureType::ePhysicalDeviceLayeredApiPropertiesListKHR: return "PhysicalDeviceLayeredApiPropertiesListKHR"; + case StructureType::ePhysicalDeviceLayeredApiPropertiesKHR: return "PhysicalDeviceLayeredApiPropertiesKHR"; + case StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR: return "PhysicalDeviceLayeredApiVulkanPropertiesKHR"; + case StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV: return "PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV"; + case StructureType::ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT: return "PhysicalDeviceShaderReplicatedCompositesFeaturesEXT"; + case StructureType::ePhysicalDeviceRayTracingValidationFeaturesNV: return "PhysicalDeviceRayTracingValidationFeaturesNV"; + case StructureType::ePhysicalDeviceClusterAccelerationStructureFeaturesNV: return "PhysicalDeviceClusterAccelerationStructureFeaturesNV"; + case StructureType::ePhysicalDeviceClusterAccelerationStructurePropertiesNV: return "PhysicalDeviceClusterAccelerationStructurePropertiesNV"; + case StructureType::eClusterAccelerationStructureClustersBottomLevelInputNV: return "ClusterAccelerationStructureClustersBottomLevelInputNV"; + case StructureType::eClusterAccelerationStructureTriangleClusterInputNV: return "ClusterAccelerationStructureTriangleClusterInputNV"; + case StructureType::eClusterAccelerationStructureMoveObjectsInputNV: return "ClusterAccelerationStructureMoveObjectsInputNV"; + case StructureType::eClusterAccelerationStructureInputInfoNV: return "ClusterAccelerationStructureInputInfoNV"; + case StructureType::eClusterAccelerationStructureCommandsInfoNV: return "ClusterAccelerationStructureCommandsInfoNV"; + case StructureType::eRayTracingPipelineClusterAccelerationStructureCreateInfoNV: return "RayTracingPipelineClusterAccelerationStructureCreateInfoNV"; + case StructureType::ePhysicalDevicePartitionedAccelerationStructureFeaturesNV: return "PhysicalDevicePartitionedAccelerationStructureFeaturesNV"; + case StructureType::ePhysicalDevicePartitionedAccelerationStructurePropertiesNV: return "PhysicalDevicePartitionedAccelerationStructurePropertiesNV"; + case StructureType::eWriteDescriptorSetPartitionedAccelerationStructureNV: return "WriteDescriptorSetPartitionedAccelerationStructureNV"; + case StructureType::ePartitionedAccelerationStructureInstancesInputNV: return "PartitionedAccelerationStructureInstancesInputNV"; + case StructureType::eBuildPartitionedAccelerationStructureInfoNV: return "BuildPartitionedAccelerationStructureInfoNV"; + case StructureType::ePartitionedAccelerationStructureFlagsNV: return "PartitionedAccelerationStructureFlagsNV"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT: return "PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT: return "PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT"; + case StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT: return "GeneratedCommandsMemoryRequirementsInfoEXT"; + case StructureType::eIndirectExecutionSetCreateInfoEXT: return "IndirectExecutionSetCreateInfoEXT"; + case StructureType::eGeneratedCommandsInfoEXT: return "GeneratedCommandsInfoEXT"; + case StructureType::eIndirectCommandsLayoutCreateInfoEXT: return "IndirectCommandsLayoutCreateInfoEXT"; + case StructureType::eIndirectCommandsLayoutTokenEXT: return "IndirectCommandsLayoutTokenEXT"; + case StructureType::eWriteIndirectExecutionSetPipelineEXT: return "WriteIndirectExecutionSetPipelineEXT"; + case StructureType::eWriteIndirectExecutionSetShaderEXT: return "WriteIndirectExecutionSetShaderEXT"; + case StructureType::eIndirectExecutionSetPipelineInfoEXT: return "IndirectExecutionSetPipelineInfoEXT"; + case StructureType::eIndirectExecutionSetShaderInfoEXT: return "IndirectExecutionSetShaderInfoEXT"; + case StructureType::eIndirectExecutionSetShaderLayoutInfoEXT: return "IndirectExecutionSetShaderLayoutInfoEXT"; + case StructureType::eGeneratedCommandsPipelineInfoEXT: return "GeneratedCommandsPipelineInfoEXT"; + case StructureType::eGeneratedCommandsShaderInfoEXT: return "GeneratedCommandsShaderInfoEXT"; + case StructureType::ePhysicalDeviceMaintenance8FeaturesKHR: return "PhysicalDeviceMaintenance8FeaturesKHR"; + case StructureType::eMemoryBarrierAccessFlags3KHR: return "MemoryBarrierAccessFlags3KHR"; + case StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA: return "PhysicalDeviceImageAlignmentControlFeaturesMESA"; + case StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA: return "PhysicalDeviceImageAlignmentControlPropertiesMESA"; + case StructureType::eImageAlignmentControlCreateInfoMESA: return "ImageAlignmentControlCreateInfoMESA"; + case StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT: return "PhysicalDeviceDepthClampControlFeaturesEXT"; + case StructureType::ePipelineViewportDepthClampControlCreateInfoEXT: return "PipelineViewportDepthClampControlCreateInfoEXT"; + case StructureType::ePhysicalDeviceVideoMaintenance2FeaturesKHR: return "PhysicalDeviceVideoMaintenance2FeaturesKHR"; + case StructureType::eVideoDecodeH264InlineSessionParametersInfoKHR: return "VideoDecodeH264InlineSessionParametersInfoKHR"; + case StructureType::eVideoDecodeH265InlineSessionParametersInfoKHR: return "VideoDecodeH265InlineSessionParametersInfoKHR"; + case StructureType::eVideoDecodeAv1InlineSessionParametersInfoKHR: return "VideoDecodeAv1InlineSessionParametersInfoKHR"; + case StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI: return "PhysicalDeviceHdrVividFeaturesHUAWEI"; + case StructureType::eHdrVividDynamicMetadataHUAWEI: return "HdrVividDynamicMetadataHUAWEI"; + case StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV: return "PhysicalDeviceCooperativeMatrix2FeaturesNV"; + case StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV: return "CooperativeMatrixFlexibleDimensionsPropertiesNV"; + case StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV: return "PhysicalDeviceCooperativeMatrix2PropertiesNV"; + case StructureType::ePhysicalDevicePipelineOpacityMicromapFeaturesARM: return "PhysicalDevicePipelineOpacityMicromapFeaturesARM"; +#if defined( VK_USE_PLATFORM_METAL_EXT ) + case StructureType::eImportMemoryMetalHandleInfoEXT: return "ImportMemoryMetalHandleInfoEXT"; + case StructureType::eMemoryMetalHandlePropertiesEXT: return "MemoryMetalHandlePropertiesEXT"; + case StructureType::eMemoryGetMetalHandleInfoEXT: return "MemoryGetMetalHandleInfoEXT"; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + case StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesKHR: return "PhysicalDeviceDepthClampZeroOneFeaturesKHR"; + case StructureType::ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT: return "PhysicalDeviceVertexAttributeRobustnessFeaturesEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -3871,10 +4999,8 @@ namespace VULKAN_HPP_NAMESPACE case ObjectType::eDisplayKHR: return "DisplayKHR"; case ObjectType::eDisplayModeKHR: return "DisplayModeKHR"; case ObjectType::eDebugReportCallbackEXT: return "DebugReportCallbackEXT"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) case ObjectType::eVideoSessionKHR: return "VideoSessionKHR"; case ObjectType::eVideoSessionParametersKHR: return "VideoSessionParametersKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case ObjectType::eCuModuleNVX: return "CuModuleNVX"; case ObjectType::eCuFunctionNVX: return "CuFunctionNVX"; case ObjectType::eDebugUtilsMessengerEXT: return "DebugUtilsMessengerEXT"; @@ -3884,9 +5010,19 @@ namespace VULKAN_HPP_NAMESPACE case ObjectType::ePerformanceConfigurationINTEL: return "PerformanceConfigurationINTEL"; case ObjectType::eDeferredOperationKHR: return "DeferredOperationKHR"; case ObjectType::eIndirectCommandsLayoutNV: return "IndirectCommandsLayoutNV"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ObjectType::eCudaModuleNV: return "CudaModuleNV"; + case ObjectType::eCudaFunctionNV: return "CudaFunctionNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) case ObjectType::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA"; #endif /*VK_USE_PLATFORM_FUCHSIA*/ + case ObjectType::eMicromapEXT: return "MicromapEXT"; + case ObjectType::eOpticalFlowSessionNV: return "OpticalFlowSessionNV"; + case ObjectType::eShaderEXT: return "ShaderEXT"; + case ObjectType::ePipelineBinaryKHR: return "PipelineBinaryKHR"; + case ObjectType::eIndirectCommandsLayoutEXT: return "IndirectCommandsLayoutEXT"; + case ObjectType::eIndirectExecutionSetEXT: return "IndirectExecutionSetEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -3895,12 +5031,14 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { + case VendorId::eKhronos: return "Khronos"; case VendorId::eVIV: return "VIV"; case VendorId::eVSI: return "VSI"; case VendorId::eKazan: return "Kazan"; case VendorId::eCodeplay: return "Codeplay"; case VendorId::eMESA: return "MESA"; case VendorId::ePocl: return "Pocl"; + case VendorId::eMobileye: return "Mobileye"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4148,6 +5286,8 @@ namespace VULKAN_HPP_NAMESPACE case Format::eAstc10x10SfloatBlock: return "Astc10x10SfloatBlock"; case Format::eAstc12x10SfloatBlock: return "Astc12x10SfloatBlock"; case Format::eAstc12x12SfloatBlock: return "Astc12x12SfloatBlock"; + case Format::eA1B5G5R5UnormPack16: return "A1B5G5R5UnormPack16"; + case Format::eA8Unorm: return "A8Unorm"; case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG"; case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG"; case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG"; @@ -4156,6 +5296,7 @@ namespace VULKAN_HPP_NAMESPACE case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG"; case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG"; case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG"; + case Format::eR16G16Sfixed5NV: return "R16G16Sfixed5NV"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4188,18 +5329,14 @@ namespace VULKAN_HPP_NAMESPACE case FormatFeatureFlagBits::eDisjoint: return "Disjoint"; case FormatFeatureFlagBits::eCositedChromaSamples: return "CositedChromaSamples"; case FormatFeatureFlagBits::eSampledImageFilterMinmax: return "SampledImageFilterMinmax"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) case FormatFeatureFlagBits::eVideoDecodeOutputKHR: return "VideoDecodeOutputKHR"; case FormatFeatureFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR"; case FormatFeatureFlagBits::eSampledImageFilterCubicEXT: return "SampledImageFilterCubicEXT"; case FormatFeatureFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; case FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) case FormatFeatureFlagBits::eVideoEncodeInputKHR: return "VideoEncodeInputKHR"; case FormatFeatureFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4223,9 +5360,11 @@ namespace VULKAN_HPP_NAMESPACE case ImageCreateFlagBits::eCornerSampledNV: return "CornerSampledNV"; case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT: return "SampleLocationsCompatibleDepthEXT"; case ImageCreateFlagBits::eSubsampledEXT: return "SubsampledEXT"; + case ImageCreateFlagBits::eDescriptorBufferCaptureReplayEXT: return "DescriptorBufferCaptureReplayEXT"; case ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT: return "MultisampledRenderToSingleSampledEXT"; case ImageCreateFlagBits::e2DViewCompatibleEXT: return "2DViewCompatibleEXT"; case ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM: return "FragmentDensityMapOffsetQCOM"; + case ImageCreateFlagBits::eVideoProfileIndependentKHR: return "VideoProfileIndependentKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4264,22 +5403,21 @@ namespace VULKAN_HPP_NAMESPACE case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment"; case ImageUsageFlagBits::eInputAttachment: return "InputAttachment"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case ImageUsageFlagBits::eHostTransfer: return "HostTransfer"; case ImageUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; case ImageUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; case ImageUsageFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case ImageUsageFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; case ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) case ImageUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; case ImageUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; case ImageUsageFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case ImageUsageFlagBits::eAttachmentFeedbackLoopEXT: return "AttachmentFeedbackLoopEXT"; case ImageUsageFlagBits::eInvocationMaskHUAWEI: return "InvocationMaskHUAWEI"; case ImageUsageFlagBits::eSampleWeightQCOM: return "SampleWeightQCOM"; case ImageUsageFlagBits::eSampleBlockMatchQCOM: return "SampleBlockMatchQCOM"; + case ImageUsageFlagBits::eVideoEncodeQuantizationDeltaMapKHR: return "VideoEncodeQuantizationDeltaMapKHR"; + case ImageUsageFlagBits::eVideoEncodeEmphasisMapKHR: return "VideoEncodeEmphasisMapKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4351,10 +5489,9 @@ namespace VULKAN_HPP_NAMESPACE case QueueFlagBits::eTransfer: return "Transfer"; case QueueFlagBits::eSparseBinding: return "SparseBinding"; case QueueFlagBits::eProtected: return "Protected"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) case QueueFlagBits::eVideoDecodeKHR: return "VideoDecodeKHR"; case QueueFlagBits::eVideoEncodeKHR: return "VideoEncodeKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case QueueFlagBits::eOpticalFlowNV: return "OpticalFlowNV"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4392,6 +5529,15 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } + VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value ) + { + switch ( value ) + { + case DeviceQueueCreateFlagBits::eProtected: return "Protected"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value ) { switch ( value ) @@ -4427,9 +5573,13 @@ namespace VULKAN_HPP_NAMESPACE } } - VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits ) + VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits value ) { - return "(void)"; + switch ( value ) + { + case MemoryMapFlagBits::ePlacedEXT: return "PlacedEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } } VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value ) @@ -4512,6 +5662,7 @@ namespace VULKAN_HPP_NAMESPACE case QueryPipelineStatisticFlagBits::eComputeShaderInvocations: return "ComputeShaderInvocations"; case QueryPipelineStatisticFlagBits::eTaskShaderInvocationsEXT: return "TaskShaderInvocationsEXT"; case QueryPipelineStatisticFlagBits::eMeshShaderInvocationsEXT: return "MeshShaderInvocationsEXT"; + case QueryPipelineStatisticFlagBits::eClusterCullingShaderInvocationsHUAWEI: return "ClusterCullingShaderInvocationsHUAWEI"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4524,9 +5675,7 @@ namespace VULKAN_HPP_NAMESPACE case QueryResultFlagBits::eWait: return "Wait"; case QueryResultFlagBits::eWithAvailability: return "WithAvailability"; case QueryResultFlagBits::ePartial: return "Partial"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) case QueryResultFlagBits::eWithStatusKHR: return "WithStatusKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4538,22 +5687,20 @@ namespace VULKAN_HPP_NAMESPACE case QueryType::eOcclusion: return "Occlusion"; case QueryType::ePipelineStatistics: return "PipelineStatistics"; case QueryType::eTimestamp: return "Timestamp"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) case QueryType::eResultStatusOnlyKHR: return "ResultStatusOnlyKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case QueryType::eTransformFeedbackStreamEXT: return "TransformFeedbackStreamEXT"; case QueryType::ePerformanceQueryKHR: return "PerformanceQueryKHR"; case QueryType::eAccelerationStructureCompactedSizeKHR: return "AccelerationStructureCompactedSizeKHR"; case QueryType::eAccelerationStructureSerializationSizeKHR: return "AccelerationStructureSerializationSizeKHR"; case QueryType::eAccelerationStructureCompactedSizeNV: return "AccelerationStructureCompactedSizeNV"; case QueryType::ePerformanceQueryINTEL: return "PerformanceQueryINTEL"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - case QueryType::eVideoEncodeBitstreamBufferRangeKHR: return "VideoEncodeBitstreamBufferRangeKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case QueryType::eVideoEncodeFeedbackKHR: return "VideoEncodeFeedbackKHR"; case QueryType::eMeshPrimitivesGeneratedEXT: return "MeshPrimitivesGeneratedEXT"; case QueryType::ePrimitivesGeneratedEXT: return "PrimitivesGeneratedEXT"; case QueryType::eAccelerationStructureSerializationBottomLevelPointersKHR: return "AccelerationStructureSerializationBottomLevelPointersKHR"; case QueryType::eAccelerationStructureSizeKHR: return "AccelerationStructureSizeKHR"; + case QueryType::eMicromapSerializationSizeEXT: return "MicromapSerializationSizeEXT"; + case QueryType::eMicromapCompactedSizeEXT: return "MicromapCompactedSizeEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4572,6 +5719,8 @@ namespace VULKAN_HPP_NAMESPACE case BufferCreateFlagBits::eSparseAliased: return "SparseAliased"; case BufferCreateFlagBits::eProtected: return "Protected"; case BufferCreateFlagBits::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + case BufferCreateFlagBits::eDescriptorBufferCaptureReplayEXT: return "DescriptorBufferCaptureReplayEXT"; + case BufferCreateFlagBits::eVideoProfileIndependentKHR: return "VideoProfileIndependentKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4590,20 +5739,24 @@ namespace VULKAN_HPP_NAMESPACE case BufferUsageFlagBits::eVertexBuffer: return "VertexBuffer"; case BufferUsageFlagBits::eIndirectBuffer: return "IndirectBuffer"; case BufferUsageFlagBits::eShaderDeviceAddress: return "ShaderDeviceAddress"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) case BufferUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; case BufferUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case BufferUsageFlagBits::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT"; case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT"; case BufferUsageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BufferUsageFlagBits::eExecutionGraphScratchAMDX: return "ExecutionGraphScratchAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR: return "AccelerationStructureBuildInputReadOnlyKHR"; case BufferUsageFlagBits::eAccelerationStructureStorageKHR: return "AccelerationStructureStorageKHR"; case BufferUsageFlagBits::eShaderBindingTableKHR: return "ShaderBindingTableKHR"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) case BufferUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; case BufferUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BufferUsageFlagBits::eSamplerDescriptorBufferEXT: return "SamplerDescriptorBufferEXT"; + case BufferUsageFlagBits::eResourceDescriptorBufferEXT: return "ResourceDescriptorBufferEXT"; + case BufferUsageFlagBits::ePushDescriptorsDescriptorBufferEXT: return "PushDescriptorsDescriptorBufferEXT"; + case BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT: return "MicromapBuildInputReadOnlyEXT"; + case BufferUsageFlagBits::eMicromapStorageEXT: return "MicromapStorageEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4644,21 +5797,19 @@ namespace VULKAN_HPP_NAMESPACE case ImageLayout::eStencilReadOnlyOptimal: return "StencilReadOnlyOptimal"; case ImageLayout::eReadOnlyOptimal: return "ReadOnlyOptimal"; case ImageLayout::eAttachmentOptimal: return "AttachmentOptimal"; + case ImageLayout::eRenderingLocalRead: return "RenderingLocalRead"; case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) case ImageLayout::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; case ImageLayout::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; case ImageLayout::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case ImageLayout::eSharedPresentKHR: return "SharedPresentKHR"; case ImageLayout::eFragmentDensityMapOptimalEXT: return "FragmentDensityMapOptimalEXT"; case ImageLayout::eFragmentShadingRateAttachmentOptimalKHR: return "FragmentShadingRateAttachmentOptimalKHR"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) case ImageLayout::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; case ImageLayout::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; case ImageLayout::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case ImageLayout::eAttachmentFeedbackLoopOptimalEXT: return "AttachmentFeedbackLoopOptimalEXT"; + case ImageLayout::eVideoEncodeQuantizationMapKHR: return "VideoEncodeQuantizationMapKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4683,6 +5834,7 @@ namespace VULKAN_HPP_NAMESPACE switch ( value ) { case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT: return "FragmentDensityMapDynamicEXT"; + case ImageViewCreateFlagBits::eDescriptorBufferCaptureReplayEXT: return "DescriptorBufferCaptureReplayEXT"; case ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT: return "FragmentDensityMapDeferredEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } @@ -4708,6 +5860,16 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } + VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits value ) + { + switch ( value ) + { + case PipelineCacheCreateFlagBits::eExternallySynchronized: return "ExternallySynchronized"; + case PipelineCacheCreateFlagBits::eInternallySynchronizedMergeKHR: return "InternallySynchronizedMergeKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + VULKAN_HPP_INLINE std::string to_string( BlendFactor value ) { switch ( value ) @@ -4862,19 +6024,55 @@ namespace VULKAN_HPP_NAMESPACE case DynamicState::eRasterizerDiscardEnable: return "RasterizerDiscardEnable"; case DynamicState::eDepthBiasEnable: return "DepthBiasEnable"; case DynamicState::ePrimitiveRestartEnable: return "PrimitiveRestartEnable"; + case DynamicState::eLineStipple: return "LineStipple"; case DynamicState::eViewportWScalingNV: return "ViewportWScalingNV"; case DynamicState::eDiscardRectangleEXT: return "DiscardRectangleEXT"; + case DynamicState::eDiscardRectangleEnableEXT: return "DiscardRectangleEnableEXT"; + case DynamicState::eDiscardRectangleModeEXT: return "DiscardRectangleModeEXT"; case DynamicState::eSampleLocationsEXT: return "SampleLocationsEXT"; case DynamicState::eRayTracingPipelineStackSizeKHR: return "RayTracingPipelineStackSizeKHR"; case DynamicState::eViewportShadingRatePaletteNV: return "ViewportShadingRatePaletteNV"; case DynamicState::eViewportCoarseSampleOrderNV: return "ViewportCoarseSampleOrderNV"; + case DynamicState::eExclusiveScissorEnableNV: return "ExclusiveScissorEnableNV"; case DynamicState::eExclusiveScissorNV: return "ExclusiveScissorNV"; case DynamicState::eFragmentShadingRateKHR: return "FragmentShadingRateKHR"; - case DynamicState::eLineStippleEXT: return "LineStippleEXT"; case DynamicState::eVertexInputEXT: return "VertexInputEXT"; case DynamicState::ePatchControlPointsEXT: return "PatchControlPointsEXT"; case DynamicState::eLogicOpEXT: return "LogicOpEXT"; case DynamicState::eColorWriteEnableEXT: return "ColorWriteEnableEXT"; + case DynamicState::eDepthClampEnableEXT: return "DepthClampEnableEXT"; + case DynamicState::ePolygonModeEXT: return "PolygonModeEXT"; + case DynamicState::eRasterizationSamplesEXT: return "RasterizationSamplesEXT"; + case DynamicState::eSampleMaskEXT: return "SampleMaskEXT"; + case DynamicState::eAlphaToCoverageEnableEXT: return "AlphaToCoverageEnableEXT"; + case DynamicState::eAlphaToOneEnableEXT: return "AlphaToOneEnableEXT"; + case DynamicState::eLogicOpEnableEXT: return "LogicOpEnableEXT"; + case DynamicState::eColorBlendEnableEXT: return "ColorBlendEnableEXT"; + case DynamicState::eColorBlendEquationEXT: return "ColorBlendEquationEXT"; + case DynamicState::eColorWriteMaskEXT: return "ColorWriteMaskEXT"; + case DynamicState::eTessellationDomainOriginEXT: return "TessellationDomainOriginEXT"; + case DynamicState::eRasterizationStreamEXT: return "RasterizationStreamEXT"; + case DynamicState::eConservativeRasterizationModeEXT: return "ConservativeRasterizationModeEXT"; + case DynamicState::eExtraPrimitiveOverestimationSizeEXT: return "ExtraPrimitiveOverestimationSizeEXT"; + case DynamicState::eDepthClipEnableEXT: return "DepthClipEnableEXT"; + case DynamicState::eSampleLocationsEnableEXT: return "SampleLocationsEnableEXT"; + case DynamicState::eColorBlendAdvancedEXT: return "ColorBlendAdvancedEXT"; + case DynamicState::eProvokingVertexModeEXT: return "ProvokingVertexModeEXT"; + case DynamicState::eLineRasterizationModeEXT: return "LineRasterizationModeEXT"; + case DynamicState::eLineStippleEnableEXT: return "LineStippleEnableEXT"; + case DynamicState::eDepthClipNegativeOneToOneEXT: return "DepthClipNegativeOneToOneEXT"; + case DynamicState::eViewportWScalingEnableNV: return "ViewportWScalingEnableNV"; + case DynamicState::eViewportSwizzleNV: return "ViewportSwizzleNV"; + case DynamicState::eCoverageToColorEnableNV: return "CoverageToColorEnableNV"; + case DynamicState::eCoverageToColorLocationNV: return "CoverageToColorLocationNV"; + case DynamicState::eCoverageModulationModeNV: return "CoverageModulationModeNV"; + case DynamicState::eCoverageModulationTableEnableNV: return "CoverageModulationTableEnableNV"; + case DynamicState::eCoverageModulationTableNV: return "CoverageModulationTableNV"; + case DynamicState::eShadingRateImageEnableNV: return "ShadingRateImageEnableNV"; + case DynamicState::eRepresentativeFragmentTestEnableNV: return "RepresentativeFragmentTestEnableNV"; + case DynamicState::eCoverageReductionModeNV: return "CoverageReductionModeNV"; + case DynamicState::eAttachmentFeedbackLoopEnableEXT: return "AttachmentFeedbackLoopEnableEXT"; + case DynamicState::eDepthClampRangeEXT: return "DepthClampRangeEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4924,8 +6122,8 @@ namespace VULKAN_HPP_NAMESPACE case PipelineCreateFlagBits::eDispatchBase: return "DispatchBase"; case PipelineCreateFlagBits::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired"; case PipelineCreateFlagBits::eEarlyReturnOnFailure: return "EarlyReturnOnFailure"; - case PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR: return "RenderingFragmentShadingRateAttachmentKHR"; - case PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT: return "RenderingFragmentDensityMapAttachmentEXT"; + case PipelineCreateFlagBits::eNoProtectedAccess: return "NoProtectedAccess"; + case PipelineCreateFlagBits::eProtectedAccessOnly: return "ProtectedAccessOnly"; case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR: return "RayTracingNoNullAnyHitShadersKHR"; case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR: return "RayTracingNoNullClosestHitShadersKHR"; case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR: return "RayTracingNoNullMissShadersKHR"; @@ -4934,15 +6132,22 @@ namespace VULKAN_HPP_NAMESPACE case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR: return "RayTracingSkipAabbsKHR"; case PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR: return "RayTracingShaderGroupHandleCaptureReplayKHR"; case PipelineCreateFlagBits::eDeferCompileNV: return "DeferCompileNV"; + case PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT: return "RenderingFragmentDensityMapAttachmentEXT"; + case PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR: return "RenderingFragmentShadingRateAttachmentKHR"; case PipelineCreateFlagBits::eCaptureStatisticsKHR: return "CaptureStatisticsKHR"; case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR: return "CaptureInternalRepresentationsKHR"; case PipelineCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV"; case PipelineCreateFlagBits::eLibraryKHR: return "LibraryKHR"; + case PipelineCreateFlagBits::eDescriptorBufferEXT: return "DescriptorBufferEXT"; case PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT: return "RetainLinkTimeOptimizationInfoEXT"; case PipelineCreateFlagBits::eLinkTimeOptimizationEXT: return "LinkTimeOptimizationEXT"; case PipelineCreateFlagBits::eRayTracingAllowMotionNV: return "RayTracingAllowMotionNV"; case PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT: return "ColorAttachmentFeedbackLoopEXT"; case PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT: return "DepthStencilAttachmentFeedbackLoopEXT"; + case PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT: return "RayTracingOpacityMicromapEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case PipelineCreateFlagBits::eRayTracingDisplacementMicromapNV: return "RayTracingDisplacementMicromapNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5009,6 +6214,7 @@ namespace VULKAN_HPP_NAMESPACE case ShaderStageFlagBits::eTaskEXT: return "TaskEXT"; case ShaderStageFlagBits::eMeshEXT: return "MeshEXT"; case ShaderStageFlagBits::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI"; + case ShaderStageFlagBits::eClusterCullingHUAWEI: return "ClusterCullingHUAWEI"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5039,6 +6245,25 @@ namespace VULKAN_HPP_NAMESPACE } } + VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits value ) + { + switch ( value ) + { + case PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessEXT: return "RasterizationOrderAttachmentAccessEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits value ) + { + switch ( value ) + { + case PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessEXT: return "RasterizationOrderAttachmentDepthAccessEXT"; + case PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessEXT: return "RasterizationOrderAttachmentStencilAccessEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits ) { return "(void)"; @@ -5049,6 +6274,15 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } + VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits value ) + { + switch ( value ) + { + case PipelineLayoutCreateFlagBits::eIndependentSetsEXT: return "IndependentSetsEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits ) { return "(void)"; @@ -5120,6 +6354,7 @@ namespace VULKAN_HPP_NAMESPACE { case SamplerCreateFlagBits::eSubsampledEXT: return "SubsampledEXT"; case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT: return "SubsampledCoarseReconstructionEXT"; + case SamplerCreateFlagBits::eDescriptorBufferCaptureReplayEXT: return "DescriptorBufferCaptureReplayEXT"; case SamplerCreateFlagBits::eNonSeamlessCubeMapEXT: return "NonSeamlessCubeMapEXT"; case SamplerCreateFlagBits::eImageProcessingQCOM: return "ImageProcessingQCOM"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; @@ -5143,6 +6378,8 @@ namespace VULKAN_HPP_NAMESPACE case DescriptorPoolCreateFlagBits::eFreeDescriptorSet: return "FreeDescriptorSet"; case DescriptorPoolCreateFlagBits::eUpdateAfterBind: return "UpdateAfterBind"; case DescriptorPoolCreateFlagBits::eHostOnlyEXT: return "HostOnlyEXT"; + case DescriptorPoolCreateFlagBits::eAllowOverallocationSetsNV: return "AllowOverallocationSetsNV"; + case DescriptorPoolCreateFlagBits::eAllowOverallocationPoolsNV: return "AllowOverallocationPoolsNV"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5152,8 +6389,12 @@ namespace VULKAN_HPP_NAMESPACE switch ( value ) { case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool: return "UpdateAfterBindPool"; - case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR"; + case DescriptorSetLayoutCreateFlagBits::ePushDescriptor: return "PushDescriptor"; + case DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT: return "DescriptorBufferEXT"; + case DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT: return "EmbeddedImmutableSamplersEXT"; + case DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV"; case DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT: return "HostOnlyPoolEXT"; + case DescriptorSetLayoutCreateFlagBits::ePerStageNV: return "PerStageNV"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5179,6 +6420,7 @@ namespace VULKAN_HPP_NAMESPACE case DescriptorType::eSampleWeightImageQCOM: return "SampleWeightImageQCOM"; case DescriptorType::eBlockMatchImageQCOM: return "BlockMatchImageQCOM"; case DescriptorType::eMutableEXT: return "MutableEXT"; + case DescriptorType::ePartitionedAccelerationStructureNV: return "PartitionedAccelerationStructureNV"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5241,7 +6483,7 @@ namespace VULKAN_HPP_NAMESPACE case AttachmentLoadOp::eLoad: return "Load"; case AttachmentLoadOp::eClear: return "Clear"; case AttachmentLoadOp::eDontCare: return "DontCare"; - case AttachmentLoadOp::eNoneEXT: return "NoneEXT"; + case AttachmentLoadOp::eNone: return "None"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5265,6 +6507,7 @@ namespace VULKAN_HPP_NAMESPACE case DependencyFlagBits::eDeviceGroup: return "DeviceGroup"; case DependencyFlagBits::eViewLocal: return "ViewLocal"; case DependencyFlagBits::eFeedbackLoopEXT: return "FeedbackLoopEXT"; + case DependencyFlagBits::eQueueFamilyOwnershipTransferUseAllStagesKHR: return "QueueFamilyOwnershipTransferUseAllStagesKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5284,6 +6527,9 @@ namespace VULKAN_HPP_NAMESPACE { case PipelineBindPoint::eGraphics: return "Graphics"; case PipelineBindPoint::eCompute: return "Compute"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case PipelineBindPoint::eExecutionGraphAMDX: return "ExecutionGraphAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case PipelineBindPoint::eRayTracingKHR: return "RayTracingKHR"; case PipelineBindPoint::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; @@ -5380,8 +6626,8 @@ namespace VULKAN_HPP_NAMESPACE { case IndexType::eUint16: return "Uint16"; case IndexType::eUint32: return "Uint32"; + case IndexType::eUint8: return "Uint8"; case IndexType::eNoneKHR: return "NoneKHR"; - case IndexType::eUint8EXT: return "Uint8EXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5403,6 +6649,7 @@ namespace VULKAN_HPP_NAMESPACE { case SubpassContents::eInline: return "Inline"; case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers"; + case SubpassContents::eInlineAndSecondaryCommandBuffersKHR: return "InlineAndSecondaryCommandBuffersKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5421,6 +6668,8 @@ namespace VULKAN_HPP_NAMESPACE case SubgroupFeatureFlagBits::eShuffleRelative: return "ShuffleRelative"; case SubgroupFeatureFlagBits::eClustered: return "Clustered"; case SubgroupFeatureFlagBits::eQuad: return "Quad"; + case SubgroupFeatureFlagBits::eRotate: return "Rotate"; + case SubgroupFeatureFlagBits::eRotateClustered: return "RotateClustered"; case SubgroupFeatureFlagBits::ePartitionedNV: return "PartitionedNV"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } @@ -5474,15 +6723,6 @@ namespace VULKAN_HPP_NAMESPACE } } - VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value ) - { - switch ( value ) - { - case DeviceQueueCreateFlagBits::eProtected: return "Protected"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrModelConversion value ) { switch ( value ) @@ -5521,7 +6761,7 @@ namespace VULKAN_HPP_NAMESPACE switch ( value ) { case DescriptorUpdateTemplateType::eDescriptorSet: return "DescriptorSet"; - case DescriptorUpdateTemplateType::ePushDescriptorsKHR: return "PushDescriptorsKHR"; + case DescriptorUpdateTemplateType::ePushDescriptors: return "PushDescriptors"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5552,6 +6792,14 @@ namespace VULKAN_HPP_NAMESPACE case ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA: return "ZirconVmoFUCHSIA"; #endif /*VK_USE_PLATFORM_FUCHSIA*/ case ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV: return "RdmaAddressNV"; +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + case ExternalMemoryHandleTypeFlagBits::eScreenBufferQNX: return "ScreenBufferQNX"; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ +#if defined( VK_USE_PLATFORM_METAL_EXT ) + case ExternalMemoryHandleTypeFlagBits::eMtlbufferEXT: return "MtlbufferEXT"; + case ExternalMemoryHandleTypeFlagBits::eMtltextureEXT: return "MtltextureEXT"; + case ExternalMemoryHandleTypeFlagBits::eMtlheapEXT: return "MtlheapEXT"; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5662,6 +6910,10 @@ namespace VULKAN_HPP_NAMESPACE case DriverId::eSamsungProprietary: return "SamsungProprietary"; case DriverId::eMesaVenus: return "MesaVenus"; case DriverId::eMesaDozen: return "MesaDozen"; + case DriverId::eMesaNvk: return "MesaNvk"; + case DriverId::eImaginationOpenSourceMESA: return "ImaginationOpenSourceMESA"; + case DriverId::eMesaHoneykrisp: return "MesaHoneykrisp"; + case DriverId::eVulkanScEmulationOnVulkan: return "VulkanScEmulationOnVulkan"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5698,6 +6950,9 @@ namespace VULKAN_HPP_NAMESPACE case ResolveModeFlagBits::eAverage: return "Average"; case ResolveModeFlagBits::eMin: return "Min"; case ResolveModeFlagBits::eMax: return "Max"; +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + case ResolveModeFlagBits::eExternalFormatDownsampleANDROID: return "ExternalFormatDownsampleANDROID"; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5709,6 +6964,7 @@ namespace VULKAN_HPP_NAMESPACE case SamplerReductionMode::eWeightedAverage: return "WeightedAverage"; case SamplerReductionMode::eMin: return "Min"; case SamplerReductionMode::eMax: return "Max"; + case SamplerReductionMode::eWeightedAverageRangeclampQCOM: return "WeightedAverageRangeclampQCOM"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5794,10 +7050,8 @@ namespace VULKAN_HPP_NAMESPACE case PipelineStageFlagBits2::eIndexInput: return "IndexInput"; case PipelineStageFlagBits2::eVertexAttributeInput: return "VertexAttributeInput"; case PipelineStageFlagBits2::ePreRasterizationShaders: return "PreRasterizationShaders"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) case PipelineStageFlagBits2::eVideoDecodeKHR: return "VideoDecodeKHR"; case PipelineStageFlagBits2::eVideoEncodeKHR: return "VideoEncodeKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case PipelineStageFlagBits2::eTransformFeedbackEXT: return "TransformFeedbackEXT"; case PipelineStageFlagBits2::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; case PipelineStageFlagBits2::eCommandPreprocessNV: return "CommandPreprocessNV"; @@ -5807,9 +7061,13 @@ namespace VULKAN_HPP_NAMESPACE case PipelineStageFlagBits2::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT"; case PipelineStageFlagBits2::eTaskShaderEXT: return "TaskShaderEXT"; case PipelineStageFlagBits2::eMeshShaderEXT: return "MeshShaderEXT"; - case PipelineStageFlagBits2::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI"; + case PipelineStageFlagBits2::eSubpassShaderHUAWEI: return "SubpassShaderHUAWEI"; case PipelineStageFlagBits2::eInvocationMaskHUAWEI: return "InvocationMaskHUAWEI"; case PipelineStageFlagBits2::eAccelerationStructureCopyKHR: return "AccelerationStructureCopyKHR"; + case PipelineStageFlagBits2::eMicromapBuildEXT: return "MicromapBuildEXT"; + case PipelineStageFlagBits2::eClusterCullingShaderHUAWEI: return "ClusterCullingShaderHUAWEI"; + case PipelineStageFlagBits2::eOpticalFlowNV: return "OpticalFlowNV"; + case PipelineStageFlagBits2::eConvertCooperativeVectorMatrixNV: return "ConvertCooperativeVectorMatrixNV"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5839,12 +7097,10 @@ namespace VULKAN_HPP_NAMESPACE case AccessFlagBits2::eShaderSampledRead: return "ShaderSampledRead"; case AccessFlagBits2::eShaderStorageRead: return "ShaderStorageRead"; case AccessFlagBits2::eShaderStorageWrite: return "ShaderStorageWrite"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) case AccessFlagBits2::eVideoDecodeReadKHR: return "VideoDecodeReadKHR"; case AccessFlagBits2::eVideoDecodeWriteKHR: return "VideoDecodeWriteKHR"; case AccessFlagBits2::eVideoEncodeReadKHR: return "VideoEncodeReadKHR"; case AccessFlagBits2::eVideoEncodeWriteKHR: return "VideoEncodeWriteKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case AccessFlagBits2::eTransformFeedbackWriteEXT: return "TransformFeedbackWriteEXT"; case AccessFlagBits2::eTransformFeedbackCounterReadEXT: return "TransformFeedbackCounterReadEXT"; case AccessFlagBits2::eTransformFeedbackCounterWriteEXT: return "TransformFeedbackCounterWriteEXT"; @@ -5856,8 +7112,13 @@ namespace VULKAN_HPP_NAMESPACE case AccessFlagBits2::eAccelerationStructureWriteKHR: return "AccelerationStructureWriteKHR"; case AccessFlagBits2::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT"; case AccessFlagBits2::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT"; + case AccessFlagBits2::eDescriptorBufferReadEXT: return "DescriptorBufferReadEXT"; case AccessFlagBits2::eInvocationMaskReadHUAWEI: return "InvocationMaskReadHUAWEI"; case AccessFlagBits2::eShaderBindingTableReadKHR: return "ShaderBindingTableReadKHR"; + case AccessFlagBits2::eMicromapReadEXT: return "MicromapReadEXT"; + case AccessFlagBits2::eMicromapWriteEXT: return "MicromapWriteEXT"; + case AccessFlagBits2::eOpticalFlowReadNV: return "OpticalFlowReadNV"; + case AccessFlagBits2::eOpticalFlowWriteNV: return "OpticalFlowWriteNV"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5879,6 +7140,7 @@ namespace VULKAN_HPP_NAMESPACE case RenderingFlagBits::eSuspending: return "Suspending"; case RenderingFlagBits::eResuming: return "Resuming"; case RenderingFlagBits::eEnableLegacyDitheringEXT: return "EnableLegacyDitheringEXT"; + case RenderingFlagBits::eContentsInlineKHR: return "ContentsInlineKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5900,7 +7162,6 @@ namespace VULKAN_HPP_NAMESPACE case FormatFeatureFlagBits2::eBlitSrc: return "BlitSrc"; case FormatFeatureFlagBits2::eBlitDst: return "BlitDst"; case FormatFeatureFlagBits2::eSampledImageFilterLinear: return "SampledImageFilterLinear"; - case FormatFeatureFlagBits2::eSampledImageFilterCubic: return "SampledImageFilterCubic"; case FormatFeatureFlagBits2::eTransferSrc: return "TransferSrc"; case FormatFeatureFlagBits2::eTransferDst: return "TransferDst"; case FormatFeatureFlagBits2::eSampledImageFilterMinmax: return "SampledImageFilterMinmax"; @@ -5915,22 +7176,178 @@ namespace VULKAN_HPP_NAMESPACE case FormatFeatureFlagBits2::eStorageReadWithoutFormat: return "StorageReadWithoutFormat"; case FormatFeatureFlagBits2::eStorageWriteWithoutFormat: return "StorageWriteWithoutFormat"; case FormatFeatureFlagBits2::eSampledImageDepthComparison: return "SampledImageDepthComparison"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case FormatFeatureFlagBits2::eSampledImageFilterCubic: return "SampledImageFilterCubic"; + case FormatFeatureFlagBits2::eHostImageTransfer: return "HostImageTransfer"; case FormatFeatureFlagBits2::eVideoDecodeOutputKHR: return "VideoDecodeOutputKHR"; case FormatFeatureFlagBits2::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR"; case FormatFeatureFlagBits2::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; case FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) case FormatFeatureFlagBits2::eVideoEncodeInputKHR: return "VideoEncodeInputKHR"; case FormatFeatureFlagBits2::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case FormatFeatureFlagBits2::eAccelerationStructureRadiusBufferNV: return "AccelerationStructureRadiusBufferNV"; case FormatFeatureFlagBits2::eLinearColorAttachmentNV: return "LinearColorAttachmentNV"; case FormatFeatureFlagBits2::eWeightImageQCOM: return "WeightImageQCOM"; case FormatFeatureFlagBits2::eWeightSampledImageQCOM: return "WeightSampledImageQCOM"; case FormatFeatureFlagBits2::eBlockMatchingQCOM: return "BlockMatchingQCOM"; case FormatFeatureFlagBits2::eBoxFilterSampledQCOM: return "BoxFilterSampledQCOM"; + case FormatFeatureFlagBits2::eOpticalFlowImageNV: return "OpticalFlowImageNV"; + case FormatFeatureFlagBits2::eOpticalFlowVectorNV: return "OpticalFlowVectorNV"; + case FormatFeatureFlagBits2::eOpticalFlowCostNV: return "OpticalFlowCostNV"; + case FormatFeatureFlagBits2::eVideoEncodeQuantizationDeltaMapKHR: return "VideoEncodeQuantizationDeltaMapKHR"; + case FormatFeatureFlagBits2::eVideoEncodeEmphasisMapKHR: return "VideoEncodeEmphasisMapKHR"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_VERSION_1_4 === + + VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriority value ) + { + switch ( value ) + { + case QueueGlobalPriority::eLow: return "Low"; + case QueueGlobalPriority::eMedium: return "Medium"; + case QueueGlobalPriority::eHigh: return "High"; + case QueueGlobalPriority::eRealtime: return "Realtime"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( LineRasterizationMode value ) + { + switch ( value ) + { + case LineRasterizationMode::eDefault: return "Default"; + case LineRasterizationMode::eRectangular: return "Rectangular"; + case LineRasterizationMode::eBresenham: return "Bresenham"; + case LineRasterizationMode::eRectangularSmooth: return "RectangularSmooth"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagBits value ) + { + switch ( value ) + { + case MemoryUnmapFlagBits::eReserveEXT: return "ReserveEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits2 value ) + { + switch ( value ) + { + case PipelineCreateFlagBits2::eDisableOptimization: return "DisableOptimization"; + case PipelineCreateFlagBits2::eAllowDerivatives: return "AllowDerivatives"; + case PipelineCreateFlagBits2::eDerivative: return "Derivative"; + case PipelineCreateFlagBits2::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; + case PipelineCreateFlagBits2::eDispatchBase: return "DispatchBase"; + case PipelineCreateFlagBits2::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired"; + case PipelineCreateFlagBits2::eEarlyReturnOnFailure: return "EarlyReturnOnFailure"; + case PipelineCreateFlagBits2::eNoProtectedAccess: return "NoProtectedAccess"; + case PipelineCreateFlagBits2::eProtectedAccessOnly: return "ProtectedAccessOnly"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case PipelineCreateFlagBits2::eExecutionGraphAMDX: return "ExecutionGraphAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case PipelineCreateFlagBits2::eRayTracingAllowSpheresAndLinearSweptSpheresNV: return "RayTracingAllowSpheresAndLinearSweptSpheresNV"; + case PipelineCreateFlagBits2::eEnableLegacyDitheringEXT: return "EnableLegacyDitheringEXT"; + case PipelineCreateFlagBits2::eDeferCompileNV: return "DeferCompileNV"; + case PipelineCreateFlagBits2::eCaptureStatisticsKHR: return "CaptureStatisticsKHR"; + case PipelineCreateFlagBits2::eCaptureInternalRepresentationsKHR: return "CaptureInternalRepresentationsKHR"; + case PipelineCreateFlagBits2::eLinkTimeOptimizationEXT: return "LinkTimeOptimizationEXT"; + case PipelineCreateFlagBits2::eRetainLinkTimeOptimizationInfoEXT: return "RetainLinkTimeOptimizationInfoEXT"; + case PipelineCreateFlagBits2::eLibraryKHR: return "LibraryKHR"; + case PipelineCreateFlagBits2::eRayTracingSkipTrianglesKHR: return "RayTracingSkipTrianglesKHR"; + case PipelineCreateFlagBits2::eRayTracingSkipAabbsKHR: return "RayTracingSkipAabbsKHR"; + case PipelineCreateFlagBits2::eRayTracingNoNullAnyHitShadersKHR: return "RayTracingNoNullAnyHitShadersKHR"; + case PipelineCreateFlagBits2::eRayTracingNoNullClosestHitShadersKHR: return "RayTracingNoNullClosestHitShadersKHR"; + case PipelineCreateFlagBits2::eRayTracingNoNullMissShadersKHR: return "RayTracingNoNullMissShadersKHR"; + case PipelineCreateFlagBits2::eRayTracingNoNullIntersectionShadersKHR: return "RayTracingNoNullIntersectionShadersKHR"; + case PipelineCreateFlagBits2::eRayTracingShaderGroupHandleCaptureReplayKHR: return "RayTracingShaderGroupHandleCaptureReplayKHR"; + case PipelineCreateFlagBits2::eIndirectBindableNV: return "IndirectBindableNV"; + case PipelineCreateFlagBits2::eRayTracingAllowMotionNV: return "RayTracingAllowMotionNV"; + case PipelineCreateFlagBits2::eRenderingFragmentShadingRateAttachmentKHR: return "RenderingFragmentShadingRateAttachmentKHR"; + case PipelineCreateFlagBits2::eRenderingFragmentDensityMapAttachmentEXT: return "RenderingFragmentDensityMapAttachmentEXT"; + case PipelineCreateFlagBits2::eRayTracingOpacityMicromapEXT: return "RayTracingOpacityMicromapEXT"; + case PipelineCreateFlagBits2::eColorAttachmentFeedbackLoopEXT: return "ColorAttachmentFeedbackLoopEXT"; + case PipelineCreateFlagBits2::eDepthStencilAttachmentFeedbackLoopEXT: return "DepthStencilAttachmentFeedbackLoopEXT"; + case PipelineCreateFlagBits2::eRayTracingDisplacementMicromapNV: return "RayTracingDisplacementMicromapNV"; + case PipelineCreateFlagBits2::eDescriptorBufferEXT: return "DescriptorBufferEXT"; + case PipelineCreateFlagBits2::eDisallowOpacityMicromapARM: return "DisallowOpacityMicromapARM"; + case PipelineCreateFlagBits2::eCaptureDataKHR: return "CaptureDataKHR"; + case PipelineCreateFlagBits2::eIndirectBindableEXT: return "IndirectBindableEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits2 value ) + { + switch ( value ) + { + case BufferUsageFlagBits2::eTransferSrc: return "TransferSrc"; + case BufferUsageFlagBits2::eTransferDst: return "TransferDst"; + case BufferUsageFlagBits2::eUniformTexelBuffer: return "UniformTexelBuffer"; + case BufferUsageFlagBits2::eStorageTexelBuffer: return "StorageTexelBuffer"; + case BufferUsageFlagBits2::eUniformBuffer: return "UniformBuffer"; + case BufferUsageFlagBits2::eStorageBuffer: return "StorageBuffer"; + case BufferUsageFlagBits2::eIndexBuffer: return "IndexBuffer"; + case BufferUsageFlagBits2::eVertexBuffer: return "VertexBuffer"; + case BufferUsageFlagBits2::eIndirectBuffer: return "IndirectBuffer"; + case BufferUsageFlagBits2::eShaderDeviceAddress: return "ShaderDeviceAddress"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BufferUsageFlagBits2::eExecutionGraphScratchAMDX: return "ExecutionGraphScratchAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BufferUsageFlagBits2::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case BufferUsageFlagBits2::eShaderBindingTableKHR: return "ShaderBindingTableKHR"; + case BufferUsageFlagBits2::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT"; + case BufferUsageFlagBits2::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT"; + case BufferUsageFlagBits2::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case BufferUsageFlagBits2::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; + case BufferUsageFlagBits2::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case BufferUsageFlagBits2::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; + case BufferUsageFlagBits2::eAccelerationStructureBuildInputReadOnlyKHR: return "AccelerationStructureBuildInputReadOnlyKHR"; + case BufferUsageFlagBits2::eAccelerationStructureStorageKHR: return "AccelerationStructureStorageKHR"; + case BufferUsageFlagBits2::eSamplerDescriptorBufferEXT: return "SamplerDescriptorBufferEXT"; + case BufferUsageFlagBits2::eResourceDescriptorBufferEXT: return "ResourceDescriptorBufferEXT"; + case BufferUsageFlagBits2::ePushDescriptorsDescriptorBufferEXT: return "PushDescriptorsDescriptorBufferEXT"; + case BufferUsageFlagBits2::eMicromapBuildInputReadOnlyEXT: return "MicromapBuildInputReadOnlyEXT"; + case BufferUsageFlagBits2::eMicromapStorageEXT: return "MicromapStorageEXT"; + case BufferUsageFlagBits2::ePreprocessBufferEXT: return "PreprocessBufferEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineRobustnessBufferBehavior value ) + { + switch ( value ) + { + case PipelineRobustnessBufferBehavior::eDeviceDefault: return "DeviceDefault"; + case PipelineRobustnessBufferBehavior::eDisabled: return "Disabled"; + case PipelineRobustnessBufferBehavior::eRobustBufferAccess: return "RobustBufferAccess"; + case PipelineRobustnessBufferBehavior::eRobustBufferAccess2: return "RobustBufferAccess2"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineRobustnessImageBehavior value ) + { + switch ( value ) + { + case PipelineRobustnessImageBehavior::eDeviceDefault: return "DeviceDefault"; + case PipelineRobustnessImageBehavior::eDisabled: return "Disabled"; + case PipelineRobustnessImageBehavior::eRobustImageAccess: return "RobustImageAccess"; + case PipelineRobustnessImageBehavior::eRobustImageAccess2: return "RobustImageAccess2"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( HostImageCopyFlagBits value ) + { + switch ( value ) + { + case HostImageCopyFlagBits::eMemcpy: return "Memcpy"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5964,6 +7381,7 @@ namespace VULKAN_HPP_NAMESPACE case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed"; case PresentModeKHR::eSharedDemandRefresh: return "SharedDemandRefresh"; case PresentModeKHR::eSharedContinuousRefresh: return "SharedContinuousRefresh"; + case PresentModeKHR::eFifoLatestReadyEXT: return "FifoLatestReadyEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -6013,6 +7431,7 @@ namespace VULKAN_HPP_NAMESPACE case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions: return "SplitInstanceBindRegions"; case SwapchainCreateFlagBitsKHR::eProtected: return "Protected"; case SwapchainCreateFlagBitsKHR::eMutableFormat: return "MutableFormat"; + case SwapchainCreateFlagBitsKHR::eDeferredMemoryAllocationEXT: return "DeferredMemoryAllocationEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -6155,6 +7574,10 @@ namespace VULKAN_HPP_NAMESPACE case DebugReportObjectTypeEXT::eCuFunctionNVX: return "CuFunctionNVX"; case DebugReportObjectTypeEXT::eAccelerationStructureKHR: return "AccelerationStructureKHR"; case DebugReportObjectTypeEXT::eAccelerationStructureNV: return "AccelerationStructureNV"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case DebugReportObjectTypeEXT::eCudaModuleNV: return "CudaModuleNV"; + case DebugReportObjectTypeEXT::eCudaFunctionNV: return "CudaFunctionNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) case DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA: return "BufferCollectionFUCHSIA"; #endif /*VK_USE_PLATFORM_FUCHSIA*/ @@ -6174,7 +7597,6 @@ namespace VULKAN_HPP_NAMESPACE } } -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_queue === VULKAN_HPP_INLINE std::string to_string( VideoCodecOperationFlagBitsKHR value ) @@ -6182,12 +7604,12 @@ namespace VULKAN_HPP_NAMESPACE switch ( value ) { case VideoCodecOperationFlagBitsKHR::eNone: return "None"; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - case VideoCodecOperationFlagBitsKHR::eEncodeH264EXT: return "EncodeH264EXT"; - case VideoCodecOperationFlagBitsKHR::eEncodeH265EXT: return "EncodeH265EXT"; - case VideoCodecOperationFlagBitsKHR::eDecodeH264EXT: return "DecodeH264EXT"; - case VideoCodecOperationFlagBitsKHR::eDecodeH265EXT: return "DecodeH265EXT"; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case VideoCodecOperationFlagBitsKHR::eEncodeH264: return "EncodeH264"; + case VideoCodecOperationFlagBitsKHR::eEncodeH265: return "EncodeH265"; + case VideoCodecOperationFlagBitsKHR::eDecodeH264: return "DecodeH264"; + case VideoCodecOperationFlagBitsKHR::eDecodeH265: return "DecodeH265"; + case VideoCodecOperationFlagBitsKHR::eDecodeAv1: return "DecodeAv1"; + case VideoCodecOperationFlagBitsKHR::eEncodeAv1: return "EncodeAv1"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -6232,6 +7654,11 @@ namespace VULKAN_HPP_NAMESPACE switch ( value ) { case VideoSessionCreateFlagBitsKHR::eProtectedContent: return "ProtectedContent"; + case VideoSessionCreateFlagBitsKHR::eAllowEncodeParameterOptimizations: return "AllowEncodeParameterOptimizations"; + case VideoSessionCreateFlagBitsKHR::eInlineQueries: return "InlineQueries"; + case VideoSessionCreateFlagBitsKHR::eAllowEncodeQuantizationDeltaMap: return "AllowEncodeQuantizationDeltaMap"; + case VideoSessionCreateFlagBitsKHR::eAllowEncodeEmphasisMap: return "AllowEncodeEmphasisMap"; + case VideoSessionCreateFlagBitsKHR::eInlineSessionParameters: return "InlineSessionParameters"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -6241,10 +7668,8 @@ namespace VULKAN_HPP_NAMESPACE switch ( value ) { case VideoCodingControlFlagBitsKHR::eReset: return "Reset"; -# if defined( VK_ENABLE_BETA_EXTENSIONS ) case VideoCodingControlFlagBitsKHR::eEncodeRateControl: return "EncodeRateControl"; - case VideoCodingControlFlagBitsKHR::eEncodeRateControlLayer: return "EncodeRateControlLayer"; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case VideoCodingControlFlagBitsKHR::eEncodeQualityLevel: return "EncodeQualityLevel"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -6256,13 +7681,18 @@ namespace VULKAN_HPP_NAMESPACE case QueryResultStatusKHR::eError: return "Error"; case QueryResultStatusKHR::eNotReady: return "NotReady"; case QueryResultStatusKHR::eComplete: return "Complete"; + case QueryResultStatusKHR::eInsufficientBitstreamBufferRange: return "InsufficientBitstreamBufferRange"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - VULKAN_HPP_INLINE std::string to_string( VideoSessionParametersCreateFlagBitsKHR ) + VULKAN_HPP_INLINE std::string to_string( VideoSessionParametersCreateFlagBitsKHR value ) { - return "(void)"; + switch ( value ) + { + case VideoSessionParametersCreateFlagBitsKHR::eQuantizationMapCompatible: return "QuantizationMapCompatible"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } } VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagBitsKHR ) @@ -6274,9 +7704,7 @@ namespace VULKAN_HPP_NAMESPACE { return "(void)"; } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_video_decode_queue === VULKAN_HPP_INLINE std::string to_string( VideoDecodeCapabilityFlagBitsKHR value ) @@ -6305,7 +7733,6 @@ namespace VULKAN_HPP_NAMESPACE { return "(void)"; } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_EXT_transform_feedback === @@ -6314,184 +7741,165 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_encode_h264 === - - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilityFlagBitsEXT value ) - { - switch ( value ) - { - case VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceEnabled: return "Direct8X8InferenceEnabled"; - case VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceDisabled: return "Direct8X8InferenceDisabled"; - case VideoEncodeH264CapabilityFlagBitsEXT::eSeparateColourPlane: return "SeparateColourPlane"; - case VideoEncodeH264CapabilityFlagBitsEXT::eQpprimeYZeroTransformBypass: return "QpprimeYZeroTransformBypass"; - case VideoEncodeH264CapabilityFlagBitsEXT::eScalingLists: return "ScalingLists"; - case VideoEncodeH264CapabilityFlagBitsEXT::eHrdCompliance: return "HrdCompliance"; - case VideoEncodeH264CapabilityFlagBitsEXT::eChromaQpOffset: return "ChromaQpOffset"; - case VideoEncodeH264CapabilityFlagBitsEXT::eSecondChromaQpOffset: return "SecondChromaQpOffset"; - case VideoEncodeH264CapabilityFlagBitsEXT::ePicInitQpMinus26: return "PicInitQpMinus26"; - case VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPred: return "WeightedPred"; - case VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredExplicit: return "WeightedBipredExplicit"; - case VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredImplicit: return "WeightedBipredImplicit"; - case VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPredNoTable: return "WeightedPredNoTable"; - case VideoEncodeH264CapabilityFlagBitsEXT::eTransform8X8: return "Transform8X8"; - case VideoEncodeH264CapabilityFlagBitsEXT::eCabac: return "Cabac"; - case VideoEncodeH264CapabilityFlagBitsEXT::eCavlc: return "Cavlc"; - case VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterDisabled: return "DeblockingFilterDisabled"; - case VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterEnabled: return "DeblockingFilterEnabled"; - case VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterPartial: return "DeblockingFilterPartial"; - case VideoEncodeH264CapabilityFlagBitsEXT::eDisableDirectSpatialMvPred: return "DisableDirectSpatialMvPred"; - case VideoEncodeH264CapabilityFlagBitsEXT::eMultipleSlicePerFrame: return "MultipleSlicePerFrame"; - case VideoEncodeH264CapabilityFlagBitsEXT::eSliceMbCount: return "SliceMbCount"; - case VideoEncodeH264CapabilityFlagBitsEXT::eRowUnalignedSlice: return "RowUnalignedSlice"; - case VideoEncodeH264CapabilityFlagBitsEXT::eDifferentSliceType: return "DifferentSliceType"; - case VideoEncodeH264CapabilityFlagBitsEXT::eBFrameInL1List: return "BFrameInL1List"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } + //=== VK_KHR_video_encode_h264 === - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264InputModeFlagBitsEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264CapabilityFlagBitsKHR value ) { switch ( value ) { - case VideoEncodeH264InputModeFlagBitsEXT::eFrame: return "Frame"; - case VideoEncodeH264InputModeFlagBitsEXT::eSlice: return "Slice"; - case VideoEncodeH264InputModeFlagBitsEXT::eNonVcl: return "NonVcl"; + case VideoEncodeH264CapabilityFlagBitsKHR::eHrdCompliance: return "HrdCompliance"; + case VideoEncodeH264CapabilityFlagBitsKHR::ePredictionWeightTableGenerated: return "PredictionWeightTableGenerated"; + case VideoEncodeH264CapabilityFlagBitsKHR::eRowUnalignedSlice: return "RowUnalignedSlice"; + case VideoEncodeH264CapabilityFlagBitsKHR::eDifferentSliceType: return "DifferentSliceType"; + case VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL0List: return "BFrameInL0List"; + case VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL1List: return "BFrameInL1List"; + case VideoEncodeH264CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp: return "PerPictureTypeMinMaxQp"; + case VideoEncodeH264CapabilityFlagBitsKHR::ePerSliceConstantQp: return "PerSliceConstantQp"; + case VideoEncodeH264CapabilityFlagBitsKHR::eGeneratePrefixNalu: return "GeneratePrefixNalu"; + case VideoEncodeH264CapabilityFlagBitsKHR::eMbQpDiffWraparound: return "MbQpDiffWraparound"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264OutputModeFlagBitsEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264StdFlagBitsKHR value ) { switch ( value ) { - case VideoEncodeH264OutputModeFlagBitsEXT::eFrame: return "Frame"; - case VideoEncodeH264OutputModeFlagBitsEXT::eSlice: return "Slice"; - case VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl: return "NonVcl"; + case VideoEncodeH264StdFlagBitsKHR::eSeparateColorPlaneFlagSet: return "SeparateColorPlaneFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eQpprimeYZeroTransformBypassFlagSet: return "QpprimeYZeroTransformBypassFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eScalingMatrixPresentFlagSet: return "ScalingMatrixPresentFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eChromaQpIndexOffset: return "ChromaQpIndexOffset"; + case VideoEncodeH264StdFlagBitsKHR::eSecondChromaQpIndexOffset: return "SecondChromaQpIndexOffset"; + case VideoEncodeH264StdFlagBitsKHR::ePicInitQpMinus26: return "PicInitQpMinus26"; + case VideoEncodeH264StdFlagBitsKHR::eWeightedPredFlagSet: return "WeightedPredFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcExplicit: return "WeightedBipredIdcExplicit"; + case VideoEncodeH264StdFlagBitsKHR::eWeightedBipredIdcImplicit: return "WeightedBipredIdcImplicit"; + case VideoEncodeH264StdFlagBitsKHR::eTransform8X8ModeFlagSet: return "Transform8X8ModeFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eDirectSpatialMvPredFlagUnset: return "DirectSpatialMvPredFlagUnset"; + case VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagUnset: return "EntropyCodingModeFlagUnset"; + case VideoEncodeH264StdFlagBitsKHR::eEntropyCodingModeFlagSet: return "EntropyCodingModeFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eDirect8X8InferenceFlagUnset: return "Direct8X8InferenceFlagUnset"; + case VideoEncodeH264StdFlagBitsKHR::eConstrainedIntraPredFlagSet: return "ConstrainedIntraPredFlagSet"; + case VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterDisabled: return "DeblockingFilterDisabled"; + case VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterEnabled: return "DeblockingFilterEnabled"; + case VideoEncodeH264StdFlagBitsKHR::eDeblockingFilterPartial: return "DeblockingFilterPartial"; + case VideoEncodeH264StdFlagBitsKHR::eSliceQpDelta: return "SliceQpDelta"; + case VideoEncodeH264StdFlagBitsKHR::eDifferentSliceQpDelta: return "DifferentSliceQpDelta"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264RateControlStructureEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH264RateControlFlagBitsKHR value ) { switch ( value ) { - case VideoEncodeH264RateControlStructureEXT::eUnknown: return "Unknown"; - case VideoEncodeH264RateControlStructureEXT::eFlat: return "Flat"; - case VideoEncodeH264RateControlStructureEXT::eDyadic: return "Dyadic"; + case VideoEncodeH264RateControlFlagBitsKHR::eAttemptHrdCompliance: return "AttemptHrdCompliance"; + case VideoEncodeH264RateControlFlagBitsKHR::eRegularGop: return "RegularGop"; + case VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternFlat: return "ReferencePatternFlat"; + case VideoEncodeH264RateControlFlagBitsKHR::eReferencePatternDyadic: return "ReferencePatternDyadic"; + case VideoEncodeH264RateControlFlagBitsKHR::eTemporalLayerPatternDyadic: return "TemporalLayerPatternDyadic"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_encode_h265 === - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CapabilityFlagBitsEXT value ) - { - switch ( value ) - { - case VideoEncodeH265CapabilityFlagBitsEXT::eSeparateColourPlane: return "SeparateColourPlane"; - case VideoEncodeH265CapabilityFlagBitsEXT::eScalingLists: return "ScalingLists"; - case VideoEncodeH265CapabilityFlagBitsEXT::eSampleAdaptiveOffsetEnabled: return "SampleAdaptiveOffsetEnabled"; - case VideoEncodeH265CapabilityFlagBitsEXT::ePcmEnable: return "PcmEnable"; - case VideoEncodeH265CapabilityFlagBitsEXT::eSpsTemporalMvpEnabled: return "SpsTemporalMvpEnabled"; - case VideoEncodeH265CapabilityFlagBitsEXT::eHrdCompliance: return "HrdCompliance"; - case VideoEncodeH265CapabilityFlagBitsEXT::eInitQpMinus26: return "InitQpMinus26"; - case VideoEncodeH265CapabilityFlagBitsEXT::eLog2ParallelMergeLevelMinus2: return "Log2ParallelMergeLevelMinus2"; - case VideoEncodeH265CapabilityFlagBitsEXT::eSignDataHidingEnabled: return "SignDataHidingEnabled"; - case VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipEnabled: return "TransformSkipEnabled"; - case VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipDisabled: return "TransformSkipDisabled"; - case VideoEncodeH265CapabilityFlagBitsEXT::ePpsSliceChromaQpOffsetsPresent: return "PpsSliceChromaQpOffsetsPresent"; - case VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPred: return "WeightedPred"; - case VideoEncodeH265CapabilityFlagBitsEXT::eWeightedBipred: return "WeightedBipred"; - case VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPredNoTable: return "WeightedPredNoTable"; - case VideoEncodeH265CapabilityFlagBitsEXT::eTransquantBypassEnabled: return "TransquantBypassEnabled"; - case VideoEncodeH265CapabilityFlagBitsEXT::eEntropyCodingSyncEnabled: return "EntropyCodingSyncEnabled"; - case VideoEncodeH265CapabilityFlagBitsEXT::eDeblockingFilterOverrideEnabled: return "DeblockingFilterOverrideEnabled"; - case VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerFrame: return "MultipleTilePerFrame"; - case VideoEncodeH265CapabilityFlagBitsEXT::eMultipleSlicePerTile: return "MultipleSlicePerTile"; - case VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerSlice: return "MultipleTilePerSlice"; - case VideoEncodeH265CapabilityFlagBitsEXT::eSliceSegmentCtbCount: return "SliceSegmentCtbCount"; - case VideoEncodeH265CapabilityFlagBitsEXT::eRowUnalignedSliceSegment: return "RowUnalignedSliceSegment"; - case VideoEncodeH265CapabilityFlagBitsEXT::eDependentSliceSegment: return "DependentSliceSegment"; - case VideoEncodeH265CapabilityFlagBitsEXT::eDifferentSliceType: return "DifferentSliceType"; - case VideoEncodeH265CapabilityFlagBitsEXT::eBFrameInL1List: return "BFrameInL1List"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } + //=== VK_KHR_video_encode_h265 === - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265InputModeFlagBitsEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CapabilityFlagBitsKHR value ) { switch ( value ) { - case VideoEncodeH265InputModeFlagBitsEXT::eFrame: return "Frame"; - case VideoEncodeH265InputModeFlagBitsEXT::eSliceSegment: return "SliceSegment"; - case VideoEncodeH265InputModeFlagBitsEXT::eNonVcl: return "NonVcl"; + case VideoEncodeH265CapabilityFlagBitsKHR::eHrdCompliance: return "HrdCompliance"; + case VideoEncodeH265CapabilityFlagBitsKHR::ePredictionWeightTableGenerated: return "PredictionWeightTableGenerated"; + case VideoEncodeH265CapabilityFlagBitsKHR::eRowUnalignedSliceSegment: return "RowUnalignedSliceSegment"; + case VideoEncodeH265CapabilityFlagBitsKHR::eDifferentSliceSegmentType: return "DifferentSliceSegmentType"; + case VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL0List: return "BFrameInL0List"; + case VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL1List: return "BFrameInL1List"; + case VideoEncodeH265CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp: return "PerPictureTypeMinMaxQp"; + case VideoEncodeH265CapabilityFlagBitsKHR::ePerSliceSegmentConstantQp: return "PerSliceSegmentConstantQp"; + case VideoEncodeH265CapabilityFlagBitsKHR::eMultipleTilesPerSliceSegment: return "MultipleTilesPerSliceSegment"; + case VideoEncodeH265CapabilityFlagBitsKHR::eMultipleSliceSegmentsPerTile: return "MultipleSliceSegmentsPerTile"; + case VideoEncodeH265CapabilityFlagBitsKHR::eCuQpDiffWraparound: return "CuQpDiffWraparound"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265OutputModeFlagBitsEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265StdFlagBitsKHR value ) { switch ( value ) { - case VideoEncodeH265OutputModeFlagBitsEXT::eFrame: return "Frame"; - case VideoEncodeH265OutputModeFlagBitsEXT::eSliceSegment: return "SliceSegment"; - case VideoEncodeH265OutputModeFlagBitsEXT::eNonVcl: return "NonVcl"; + case VideoEncodeH265StdFlagBitsKHR::eSeparateColorPlaneFlagSet: return "SeparateColorPlaneFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eSampleAdaptiveOffsetEnabledFlagSet: return "SampleAdaptiveOffsetEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eScalingListDataPresentFlagSet: return "ScalingListDataPresentFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::ePcmEnabledFlagSet: return "PcmEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eSpsTemporalMvpEnabledFlagSet: return "SpsTemporalMvpEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eInitQpMinus26: return "InitQpMinus26"; + case VideoEncodeH265StdFlagBitsKHR::eWeightedPredFlagSet: return "WeightedPredFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eWeightedBipredFlagSet: return "WeightedBipredFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eLog2ParallelMergeLevelMinus2: return "Log2ParallelMergeLevelMinus2"; + case VideoEncodeH265StdFlagBitsKHR::eSignDataHidingEnabledFlagSet: return "SignDataHidingEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagSet: return "TransformSkipEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eTransformSkipEnabledFlagUnset: return "TransformSkipEnabledFlagUnset"; + case VideoEncodeH265StdFlagBitsKHR::ePpsSliceChromaQpOffsetsPresentFlagSet: return "PpsSliceChromaQpOffsetsPresentFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eTransquantBypassEnabledFlagSet: return "TransquantBypassEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eConstrainedIntraPredFlagSet: return "ConstrainedIntraPredFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eEntropyCodingSyncEnabledFlagSet: return "EntropyCodingSyncEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eDeblockingFilterOverrideEnabledFlagSet: return "DeblockingFilterOverrideEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentsEnabledFlagSet: return "DependentSliceSegmentsEnabledFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eDependentSliceSegmentFlagSet: return "DependentSliceSegmentFlagSet"; + case VideoEncodeH265StdFlagBitsKHR::eSliceQpDelta: return "SliceQpDelta"; + case VideoEncodeH265StdFlagBitsKHR::eDifferentSliceQpDelta: return "DifferentSliceQpDelta"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CtbSizeFlagBitsEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265CtbSizeFlagBitsKHR value ) { switch ( value ) { - case VideoEncodeH265CtbSizeFlagBitsEXT::e16: return "16"; - case VideoEncodeH265CtbSizeFlagBitsEXT::e32: return "32"; - case VideoEncodeH265CtbSizeFlagBitsEXT::e64: return "64"; + case VideoEncodeH265CtbSizeFlagBitsKHR::e16: return "16"; + case VideoEncodeH265CtbSizeFlagBitsKHR::e32: return "32"; + case VideoEncodeH265CtbSizeFlagBitsKHR::e64: return "64"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265TransformBlockSizeFlagBitsEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265TransformBlockSizeFlagBitsKHR value ) { switch ( value ) { - case VideoEncodeH265TransformBlockSizeFlagBitsEXT::e4: return "4"; - case VideoEncodeH265TransformBlockSizeFlagBitsEXT::e8: return "8"; - case VideoEncodeH265TransformBlockSizeFlagBitsEXT::e16: return "16"; - case VideoEncodeH265TransformBlockSizeFlagBitsEXT::e32: return "32"; + case VideoEncodeH265TransformBlockSizeFlagBitsKHR::e4: return "4"; + case VideoEncodeH265TransformBlockSizeFlagBitsKHR::e8: return "8"; + case VideoEncodeH265TransformBlockSizeFlagBitsKHR::e16: return "16"; + case VideoEncodeH265TransformBlockSizeFlagBitsKHR::e32: return "32"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265RateControlStructureEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeH265RateControlFlagBitsKHR value ) { switch ( value ) { - case VideoEncodeH265RateControlStructureEXT::eUnknown: return "Unknown"; - case VideoEncodeH265RateControlStructureEXT::eFlat: return "Flat"; - case VideoEncodeH265RateControlStructureEXT::eDyadic: return "Dyadic"; + case VideoEncodeH265RateControlFlagBitsKHR::eAttemptHrdCompliance: return "AttemptHrdCompliance"; + case VideoEncodeH265RateControlFlagBitsKHR::eRegularGop: return "RegularGop"; + case VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternFlat: return "ReferencePatternFlat"; + case VideoEncodeH265RateControlFlagBitsKHR::eReferencePatternDyadic: return "ReferencePatternDyadic"; + case VideoEncodeH265RateControlFlagBitsKHR::eTemporalSubLayerPatternDyadic: return "TemporalSubLayerPatternDyadic"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_EXT_video_decode_h264 === + //=== VK_KHR_video_decode_h264 === - VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264PictureLayoutFlagBitsEXT value ) + VULKAN_HPP_INLINE std::string to_string( VideoDecodeH264PictureLayoutFlagBitsKHR value ) { switch ( value ) { - case VideoDecodeH264PictureLayoutFlagBitsEXT::eProgressive: return "Progressive"; - case VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedInterleavedLines: return "InterlacedInterleavedLines"; - case VideoDecodeH264PictureLayoutFlagBitsEXT::eInterlacedSeparatePlanes: return "InterlacedSeparatePlanes"; + case VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive: return "Progressive"; + case VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedInterleavedLines: return "InterlacedInterleavedLines"; + case VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedSeparatePlanes: return "InterlacedSeparatePlanes"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_AMD_shader_info === @@ -6561,32 +7969,6 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VK_USE_PLATFORM_VI_NN*/ - //=== VK_EXT_pipeline_robustness === - - VULKAN_HPP_INLINE std::string to_string( PipelineRobustnessBufferBehaviorEXT value ) - { - switch ( value ) - { - case PipelineRobustnessBufferBehaviorEXT::eDeviceDefault: return "DeviceDefault"; - case PipelineRobustnessBufferBehaviorEXT::eDisabled: return "Disabled"; - case PipelineRobustnessBufferBehaviorEXT::eRobustBufferAccess: return "RobustBufferAccess"; - case PipelineRobustnessBufferBehaviorEXT::eRobustBufferAccess2: return "RobustBufferAccess2"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - - VULKAN_HPP_INLINE std::string to_string( PipelineRobustnessImageBehaviorEXT value ) - { - switch ( value ) - { - case PipelineRobustnessImageBehaviorEXT::eDeviceDefault: return "DeviceDefault"; - case PipelineRobustnessImageBehaviorEXT::eDisabled: return "Disabled"; - case PipelineRobustnessImageBehaviorEXT::eRobustImageAccess: return "RobustImageAccess"; - case PipelineRobustnessImageBehaviorEXT::eRobustImageAccess2: return "RobustImageAccess2"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - //=== VK_EXT_conditional_rendering === VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value ) @@ -6805,6 +8187,7 @@ namespace VULKAN_HPP_NAMESPACE case DebugUtilsMessageTypeFlagBitsEXT::eGeneral: return "General"; case DebugUtilsMessageTypeFlagBitsEXT::eValidation: return "Validation"; case DebugUtilsMessageTypeFlagBitsEXT::ePerformance: return "Performance"; + case DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding: return "DeviceAddressBinding"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -6881,6 +8264,8 @@ namespace VULKAN_HPP_NAMESPACE case GeometryInstanceFlagBitsKHR::eTriangleFlipFacing: return "TriangleFlipFacing"; case GeometryInstanceFlagBitsKHR::eForceOpaque: return "ForceOpaque"; case GeometryInstanceFlagBitsKHR::eForceNoOpaque: return "ForceNoOpaque"; + case GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT: return "ForceOpacityMicromap2StateEXT"; + case GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT: return "DisableOpacityMicromapsEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -6895,6 +8280,13 @@ namespace VULKAN_HPP_NAMESPACE case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild: return "PreferFastBuild"; case BuildAccelerationStructureFlagBitsKHR::eLowMemory: return "LowMemory"; case BuildAccelerationStructureFlagBitsKHR::eMotionNV: return "MotionNV"; + case BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT: return "AllowOpacityMicromapUpdateEXT"; + case BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT: return "AllowDisableOpacityMicromapsEXT"; + case BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT: return "AllowOpacityMicromapDataUpdateEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BuildAccelerationStructureFlagBitsKHR::eAllowDisplacementMicromapUpdateNV: return "AllowDisplacementMicromapUpdateNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BuildAccelerationStructureFlagBitsKHR::eAllowDataAccess: return "AllowDataAccess"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -6918,6 +8310,8 @@ namespace VULKAN_HPP_NAMESPACE case GeometryTypeKHR::eTriangles: return "Triangles"; case GeometryTypeKHR::eAabbs: return "Aabbs"; case GeometryTypeKHR::eInstances: return "Instances"; + case GeometryTypeKHR::eSpheresNV: return "SpheresNV"; + case GeometryTypeKHR::eLinearSweptSpheresNV: return "LinearSweptSpheresNV"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -6937,6 +8331,7 @@ namespace VULKAN_HPP_NAMESPACE switch ( value ) { case AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + case AccelerationStructureCreateFlagBitsKHR::eDescriptorBufferCaptureReplayEXT: return "DescriptorBufferCaptureReplayEXT"; case AccelerationStructureCreateFlagBitsKHR::eMotionNV: return "MotionNV"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } @@ -6952,6 +8347,31 @@ namespace VULKAN_HPP_NAMESPACE } } + //=== VK_KHR_ray_tracing_pipeline === + + VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeKHR value ) + { + switch ( value ) + { + case RayTracingShaderGroupTypeKHR::eGeneral: return "General"; + case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup: return "TrianglesHitGroup"; + case RayTracingShaderGroupTypeKHR::eProceduralHitGroup: return "ProceduralHitGroup"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ShaderGroupShaderKHR value ) + { + switch ( value ) + { + case ShaderGroupShaderKHR::eGeneral: return "General"; + case ShaderGroupShaderKHR::eClosestHit: return "ClosestHit"; + case ShaderGroupShaderKHR::eAnyHit: return "AnyHit"; + case ShaderGroupShaderKHR::eIntersection: return "Intersection"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + //=== VK_NV_framebuffer_mixed_samples === VULKAN_HPP_INLINE std::string to_string( CoverageModulationModeNV value ) @@ -7041,54 +8461,26 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - //=== VK_EXT_calibrated_timestamps === + //=== VK_AMD_memory_overallocation_behavior === - VULKAN_HPP_INLINE std::string to_string( TimeDomainEXT value ) + VULKAN_HPP_INLINE std::string to_string( MemoryOverallocationBehaviorAMD value ) { switch ( value ) { - case TimeDomainEXT::eDevice: return "Device"; - case TimeDomainEXT::eClockMonotonic: return "ClockMonotonic"; - case TimeDomainEXT::eClockMonotonicRaw: return "ClockMonotonicRaw"; - case TimeDomainEXT::eQueryPerformanceCounter: return "QueryPerformanceCounter"; + case MemoryOverallocationBehaviorAMD::eDefault: return "Default"; + case MemoryOverallocationBehaviorAMD::eAllowed: return "Allowed"; + case MemoryOverallocationBehaviorAMD::eDisallowed: return "Disallowed"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - //=== VK_KHR_global_priority === + //=== VK_INTEL_performance_query === - VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriorityKHR value ) + VULKAN_HPP_INLINE std::string to_string( PerformanceConfigurationTypeINTEL value ) { switch ( value ) { - case QueueGlobalPriorityKHR::eLow: return "Low"; - case QueueGlobalPriorityKHR::eMedium: return "Medium"; - case QueueGlobalPriorityKHR::eHigh: return "High"; - case QueueGlobalPriorityKHR::eRealtime: return "Realtime"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - - //=== VK_AMD_memory_overallocation_behavior === - - VULKAN_HPP_INLINE std::string to_string( MemoryOverallocationBehaviorAMD value ) - { - switch ( value ) - { - case MemoryOverallocationBehaviorAMD::eDefault: return "Default"; - case MemoryOverallocationBehaviorAMD::eAllowed: return "Allowed"; - case MemoryOverallocationBehaviorAMD::eDisallowed: return "Disallowed"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - - //=== VK_INTEL_performance_query === - - VULKAN_HPP_INLINE std::string to_string( PerformanceConfigurationTypeINTEL value ) - { - switch ( value ) - { - case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated: return "CommandQueueMetricsDiscoveryActivated"; + case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated: return "CommandQueueMetricsDiscoveryActivated"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -7206,39 +8598,6 @@ namespace VULKAN_HPP_NAMESPACE } } - //=== VK_NV_cooperative_matrix === - - VULKAN_HPP_INLINE std::string to_string( ScopeNV value ) - { - switch ( value ) - { - case ScopeNV::eDevice: return "Device"; - case ScopeNV::eWorkgroup: return "Workgroup"; - case ScopeNV::eSubgroup: return "Subgroup"; - case ScopeNV::eQueueFamily: return "QueueFamily"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - - VULKAN_HPP_INLINE std::string to_string( ComponentTypeNV value ) - { - switch ( value ) - { - case ComponentTypeNV::eFloat16: return "Float16"; - case ComponentTypeNV::eFloat32: return "Float32"; - case ComponentTypeNV::eFloat64: return "Float64"; - case ComponentTypeNV::eSint8: return "Sint8"; - case ComponentTypeNV::eSint16: return "Sint16"; - case ComponentTypeNV::eSint32: return "Sint32"; - case ComponentTypeNV::eSint64: return "Sint64"; - case ComponentTypeNV::eUint8: return "Uint8"; - case ComponentTypeNV::eUint16: return "Uint16"; - case ComponentTypeNV::eUint32: return "Uint32"; - case ComponentTypeNV::eUint64: return "Uint64"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - //=== VK_NV_coverage_reduction_mode === VULKAN_HPP_INLINE std::string to_string( CoverageReductionModeNV value ) @@ -7291,30 +8650,40 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - //=== VK_EXT_line_rasterization === + //=== VK_KHR_pipeline_executable_properties === - VULKAN_HPP_INLINE std::string to_string( LineRasterizationModeEXT value ) + VULKAN_HPP_INLINE std::string to_string( PipelineExecutableStatisticFormatKHR value ) { switch ( value ) { - case LineRasterizationModeEXT::eDefault: return "Default"; - case LineRasterizationModeEXT::eRectangular: return "Rectangular"; - case LineRasterizationModeEXT::eBresenham: return "Bresenham"; - case LineRasterizationModeEXT::eRectangularSmooth: return "RectangularSmooth"; + case PipelineExecutableStatisticFormatKHR::eBool32: return "Bool32"; + case PipelineExecutableStatisticFormatKHR::eInt64: return "Int64"; + case PipelineExecutableStatisticFormatKHR::eUint64: return "Uint64"; + case PipelineExecutableStatisticFormatKHR::eFloat64: return "Float64"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - //=== VK_KHR_pipeline_executable_properties === + //=== VK_EXT_surface_maintenance1 === - VULKAN_HPP_INLINE std::string to_string( PipelineExecutableStatisticFormatKHR value ) + VULKAN_HPP_INLINE std::string to_string( PresentScalingFlagBitsEXT value ) { switch ( value ) { - case PipelineExecutableStatisticFormatKHR::eBool32: return "Bool32"; - case PipelineExecutableStatisticFormatKHR::eInt64: return "Int64"; - case PipelineExecutableStatisticFormatKHR::eUint64: return "Uint64"; - case PipelineExecutableStatisticFormatKHR::eFloat64: return "Float64"; + case PresentScalingFlagBitsEXT::eOneToOne: return "OneToOne"; + case PresentScalingFlagBitsEXT::eAspectRatioStretch: return "AspectRatioStretch"; + case PresentScalingFlagBitsEXT::eStretch: return "Stretch"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PresentGravityFlagBitsEXT value ) + { + switch ( value ) + { + case PresentGravityFlagBitsEXT::eMin: return "Min"; + case PresentGravityFlagBitsEXT::eMax: return "Max"; + case PresentGravityFlagBitsEXT::eCentered: return "Centered"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -7343,6 +8712,8 @@ namespace VULKAN_HPP_NAMESPACE case IndirectCommandsTokenTypeNV::eDraw: return "Draw"; case IndirectCommandsTokenTypeNV::eDrawTasks: return "DrawTasks"; case IndirectCommandsTokenTypeNV::eDrawMeshTasks: return "DrawMeshTasks"; + case IndirectCommandsTokenTypeNV::ePipeline: return "Pipeline"; + case IndirectCommandsTokenTypeNV::eDispatch: return "Dispatch"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -7358,6 +8729,19 @@ namespace VULKAN_HPP_NAMESPACE } } + //=== VK_EXT_depth_bias_control === + + VULKAN_HPP_INLINE std::string to_string( DepthBiasRepresentationEXT value ) + { + switch ( value ) + { + case DepthBiasRepresentationEXT::eLeastRepresentableValueFormat: return "LeastRepresentableValueFormat"; + case DepthBiasRepresentationEXT::eLeastRepresentableValueForceUnorm: return "LeastRepresentableValueForceUnorm"; + case DepthBiasRepresentationEXT::eFloat: return "Float"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + //=== VK_EXT_device_memory_report === VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportEventTypeEXT value ) @@ -7378,25 +8762,27 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - //=== VK_EXT_pipeline_creation_cache_control === + //=== VK_KHR_video_encode_queue === - VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeCapabilityFlagBitsKHR value ) { switch ( value ) { - case PipelineCacheCreateFlagBits::eExternallySynchronized: return "ExternallySynchronized"; + case VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes: return "PrecedingExternallyEncodedBytes"; + case VideoEncodeCapabilityFlagBitsKHR::eInsufficientBitstreamBufferRangeDetection: return "InsufficientBitstreamBufferRangeDetection"; + case VideoEncodeCapabilityFlagBitsKHR::eQuantizationDeltaMap: return "QuantizationDeltaMap"; + case VideoEncodeCapabilityFlagBitsKHR::eEmphasisMap: return "EmphasisMap"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_KHR_video_encode_queue === - - VULKAN_HPP_INLINE std::string to_string( VideoEncodeCapabilityFlagBitsKHR value ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFeedbackFlagBitsKHR value ) { switch ( value ) { - case VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes: return "PrecedingExternallyEncodedBytes"; + case VideoEncodeFeedbackFlagBitsKHR::eBitstreamBufferOffset: return "BitstreamBufferOffset"; + case VideoEncodeFeedbackFlagBitsKHR::eBitstreamBytesWritten: return "BitstreamBytesWritten"; + case VideoEncodeFeedbackFlagBitsKHR::eBitstreamHasOverrides: return "BitstreamHasOverrides"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -7443,23 +8829,28 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case VideoEncodeRateControlModeFlagBitsKHR::eNone: return "None"; + case VideoEncodeRateControlModeFlagBitsKHR::eDefault: return "Default"; + case VideoEncodeRateControlModeFlagBitsKHR::eDisabled: return "Disabled"; case VideoEncodeRateControlModeFlagBitsKHR::eCbr: return "Cbr"; case VideoEncodeRateControlModeFlagBitsKHR::eVbr: return "Vbr"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagBitsKHR ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagBitsKHR value ) { - return "(void)"; + switch ( value ) + { + case VideoEncodeFlagBitsKHR::eWithQuantizationDeltaMap: return "WithQuantizationDeltaMap"; + case VideoEncodeFlagBitsKHR::eWithEmphasisMap: return "WithEmphasisMap"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } } VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagBitsKHR ) { return "(void)"; } -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_NV_device_diagnostics_config === @@ -7507,15 +8898,6 @@ namespace VULKAN_HPP_NAMESPACE } } - VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits value ) - { - switch ( value ) - { - case PipelineLayoutCreateFlagBits::eIndependentSetsEXT: return "IndependentSetsEXT"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - //=== VK_NV_fragment_shading_rate_enums === VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateNV value ) @@ -7618,6 +9000,32 @@ namespace VULKAN_HPP_NAMESPACE } } + //=== VK_EXT_device_fault === + + VULKAN_HPP_INLINE std::string to_string( DeviceFaultAddressTypeEXT value ) + { + switch ( value ) + { + case DeviceFaultAddressTypeEXT::eNone: return "None"; + case DeviceFaultAddressTypeEXT::eReadInvalid: return "ReadInvalid"; + case DeviceFaultAddressTypeEXT::eWriteInvalid: return "WriteInvalid"; + case DeviceFaultAddressTypeEXT::eExecuteInvalid: return "ExecuteInvalid"; + case DeviceFaultAddressTypeEXT::eInstructionPointerUnknown: return "InstructionPointerUnknown"; + case DeviceFaultAddressTypeEXT::eInstructionPointerInvalid: return "InstructionPointerInvalid"; + case DeviceFaultAddressTypeEXT::eInstructionPointerFault: return "InstructionPointerFault"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( DeviceFaultVendorBinaryHeaderVersionEXT value ) + { + switch ( value ) + { + case DeviceFaultVendorBinaryHeaderVersionEXT::eOne: return "One"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) //=== VK_EXT_directfb_surface === @@ -7627,27 +9035,23 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - //=== VK_KHR_ray_tracing_pipeline === + //=== VK_EXT_device_address_binding_report === - VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeKHR value ) + VULKAN_HPP_INLINE std::string to_string( DeviceAddressBindingFlagBitsEXT value ) { switch ( value ) { - case RayTracingShaderGroupTypeKHR::eGeneral: return "General"; - case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup: return "TrianglesHitGroup"; - case RayTracingShaderGroupTypeKHR::eProceduralHitGroup: return "ProceduralHitGroup"; + case DeviceAddressBindingFlagBitsEXT::eInternalObject: return "InternalObject"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - VULKAN_HPP_INLINE std::string to_string( ShaderGroupShaderKHR value ) + VULKAN_HPP_INLINE std::string to_string( DeviceAddressBindingTypeEXT value ) { switch ( value ) { - case ShaderGroupShaderKHR::eGeneral: return "General"; - case ShaderGroupShaderKHR::eClosestHit: return "ClosestHit"; - case ShaderGroupShaderKHR::eAnyHit: return "AnyHit"; - case ShaderGroupShaderKHR::eIntersection: return "Intersection"; + case DeviceAddressBindingTypeEXT::eBind: return "Bind"; + case DeviceAddressBindingTypeEXT::eUnbind: return "Unbind"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -7674,6 +9078,17 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VK_USE_PLATFORM_FUCHSIA*/ + //=== VK_EXT_frame_boundary === + + VULKAN_HPP_INLINE std::string to_string( FrameBoundaryFlagBitsEXT value ) + { + switch ( value ) + { + case FrameBoundaryFlagBitsEXT::eFrameEnd: return "FrameEnd"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + #if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === @@ -7683,6 +9098,143 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + //=== VK_EXT_opacity_micromap === + + VULKAN_HPP_INLINE std::string to_string( MicromapTypeEXT value ) + { + switch ( value ) + { + case MicromapTypeEXT::eOpacityMicromap: return "OpacityMicromap"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case MicromapTypeEXT::eDisplacementMicromapNV: return "DisplacementMicromapNV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( BuildMicromapFlagBitsEXT value ) + { + switch ( value ) + { + case BuildMicromapFlagBitsEXT::ePreferFastTrace: return "PreferFastTrace"; + case BuildMicromapFlagBitsEXT::ePreferFastBuild: return "PreferFastBuild"; + case BuildMicromapFlagBitsEXT::eAllowCompaction: return "AllowCompaction"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( CopyMicromapModeEXT value ) + { + switch ( value ) + { + case CopyMicromapModeEXT::eClone: return "Clone"; + case CopyMicromapModeEXT::eSerialize: return "Serialize"; + case CopyMicromapModeEXT::eDeserialize: return "Deserialize"; + case CopyMicromapModeEXT::eCompact: return "Compact"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( MicromapCreateFlagBitsEXT value ) + { + switch ( value ) + { + case MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay: return "DeviceAddressCaptureReplay"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( BuildMicromapModeEXT value ) + { + switch ( value ) + { + case BuildMicromapModeEXT::eBuild: return "Build"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( OpacityMicromapFormatEXT value ) + { + switch ( value ) + { + case OpacityMicromapFormatEXT::e2State: return "2State"; + case OpacityMicromapFormatEXT::e4State: return "4State"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( OpacityMicromapSpecialIndexEXT value ) + { + switch ( value ) + { + case OpacityMicromapSpecialIndexEXT::eFullyTransparent: return "FullyTransparent"; + case OpacityMicromapSpecialIndexEXT::eFullyOpaque: return "FullyOpaque"; + case OpacityMicromapSpecialIndexEXT::eFullyUnknownTransparent: return "FullyUnknownTransparent"; + case OpacityMicromapSpecialIndexEXT::eFullyUnknownOpaque: return "FullyUnknownOpaque"; + case OpacityMicromapSpecialIndexEXT::eClusterGeometryDisableOpacityMicromapNV: return "ClusterGeometryDisableOpacityMicromapNV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + + VULKAN_HPP_INLINE std::string to_string( DisplacementMicromapFormatNV value ) + { + switch ( value ) + { + case DisplacementMicromapFormatNV::e64Triangles64Bytes: return "64Triangles64Bytes"; + case DisplacementMicromapFormatNV::e256Triangles128Bytes: return "256Triangles128Bytes"; + case DisplacementMicromapFormatNV::e1024Triangles128Bytes: return "1024Triangles128Bytes"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_ARM_scheduling_controls === + + VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceSchedulingControlsFlagBitsARM value ) + { + switch ( value ) + { + case PhysicalDeviceSchedulingControlsFlagBitsARM::eShaderCoreCount: return "ShaderCoreCount"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_memory_decompression === + + VULKAN_HPP_INLINE std::string to_string( MemoryDecompressionMethodFlagBitsNV value ) + { + switch ( value ) + { + case MemoryDecompressionMethodFlagBitsNV::eGdeflate10: return "Gdeflate10"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_ray_tracing_linear_swept_spheres === + + VULKAN_HPP_INLINE std::string to_string( RayTracingLssIndexingModeNV value ) + { + switch ( value ) + { + case RayTracingLssIndexingModeNV::eList: return "List"; + case RayTracingLssIndexingModeNV::eSuccessive: return "Successive"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( RayTracingLssPrimitiveEndCapsModeNV value ) + { + switch ( value ) + { + case RayTracingLssPrimitiveEndCapsModeNV::eNone: return "None"; + case RayTracingLssPrimitiveEndCapsModeNV::eChained: return "Chained"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + //=== VK_EXT_subpass_merge_feedback === VULKAN_HPP_INLINE std::string to_string( SubpassMergeStatusEXT value ) @@ -7707,26 +9259,612 @@ namespace VULKAN_HPP_NAMESPACE } } - //=== VK_EXT_rasterization_order_attachment_access === + //=== VK_LUNARG_direct_driver_loading === - VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits value ) + VULKAN_HPP_INLINE std::string to_string( DirectDriverLoadingModeLUNARG value ) { switch ( value ) { - case PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessEXT: return "RasterizationOrderAttachmentAccessEXT"; + case DirectDriverLoadingModeLUNARG::eExclusive: return "Exclusive"; + case DirectDriverLoadingModeLUNARG::eInclusive: return "Inclusive"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } - VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits value ) + VULKAN_HPP_INLINE std::string to_string( DirectDriverLoadingFlagBitsLUNARG ) + { + return "(void)"; + } + + //=== VK_NV_optical_flow === + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowUsageFlagBitsNV value ) { switch ( value ) { - case PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessEXT: return "RasterizationOrderAttachmentDepthAccessEXT"; - case PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessEXT: return "RasterizationOrderAttachmentStencilAccessEXT"; + case OpticalFlowUsageFlagBitsNV::eUnknown: return "Unknown"; + case OpticalFlowUsageFlagBitsNV::eInput: return "Input"; + case OpticalFlowUsageFlagBitsNV::eOutput: return "Output"; + case OpticalFlowUsageFlagBitsNV::eHint: return "Hint"; + case OpticalFlowUsageFlagBitsNV::eCost: return "Cost"; + case OpticalFlowUsageFlagBitsNV::eGlobalFlow: return "GlobalFlow"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } -} // namespace VULKAN_HPP_NAMESPACE + VULKAN_HPP_INLINE std::string to_string( OpticalFlowGridSizeFlagBitsNV value ) + { + switch ( value ) + { + case OpticalFlowGridSizeFlagBitsNV::eUnknown: return "Unknown"; + case OpticalFlowGridSizeFlagBitsNV::e1X1: return "1X1"; + case OpticalFlowGridSizeFlagBitsNV::e2X2: return "2X2"; + case OpticalFlowGridSizeFlagBitsNV::e4X4: return "4X4"; + case OpticalFlowGridSizeFlagBitsNV::e8X8: return "8X8"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowPerformanceLevelNV value ) + { + switch ( value ) + { + case OpticalFlowPerformanceLevelNV::eUnknown: return "Unknown"; + case OpticalFlowPerformanceLevelNV::eSlow: return "Slow"; + case OpticalFlowPerformanceLevelNV::eMedium: return "Medium"; + case OpticalFlowPerformanceLevelNV::eFast: return "Fast"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowSessionBindingPointNV value ) + { + switch ( value ) + { + case OpticalFlowSessionBindingPointNV::eUnknown: return "Unknown"; + case OpticalFlowSessionBindingPointNV::eInput: return "Input"; + case OpticalFlowSessionBindingPointNV::eReference: return "Reference"; + case OpticalFlowSessionBindingPointNV::eHint: return "Hint"; + case OpticalFlowSessionBindingPointNV::eFlowVector: return "FlowVector"; + case OpticalFlowSessionBindingPointNV::eBackwardFlowVector: return "BackwardFlowVector"; + case OpticalFlowSessionBindingPointNV::eCost: return "Cost"; + case OpticalFlowSessionBindingPointNV::eBackwardCost: return "BackwardCost"; + case OpticalFlowSessionBindingPointNV::eGlobalFlow: return "GlobalFlow"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowSessionCreateFlagBitsNV value ) + { + switch ( value ) + { + case OpticalFlowSessionCreateFlagBitsNV::eEnableHint: return "EnableHint"; + case OpticalFlowSessionCreateFlagBitsNV::eEnableCost: return "EnableCost"; + case OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow: return "EnableGlobalFlow"; + case OpticalFlowSessionCreateFlagBitsNV::eAllowRegions: return "AllowRegions"; + case OpticalFlowSessionCreateFlagBitsNV::eBothDirections: return "BothDirections"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( OpticalFlowExecuteFlagBitsNV value ) + { + switch ( value ) + { + case OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints: return "DisableTemporalHints"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_AMD_anti_lag === + + VULKAN_HPP_INLINE std::string to_string( AntiLagModeAMD value ) + { + switch ( value ) + { + case AntiLagModeAMD::eDriverControl: return "DriverControl"; + case AntiLagModeAMD::eOn: return "On"; + case AntiLagModeAMD::eOff: return "Off"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( AntiLagStageAMD value ) + { + switch ( value ) + { + case AntiLagStageAMD::eInput: return "Input"; + case AntiLagStageAMD::ePresent: return "Present"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_shader_object === + + VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagBitsEXT value ) + { + switch ( value ) + { + case ShaderCreateFlagBitsEXT::eLinkStage: return "LinkStage"; + case ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize: return "AllowVaryingSubgroupSize"; + case ShaderCreateFlagBitsEXT::eRequireFullSubgroups: return "RequireFullSubgroups"; + case ShaderCreateFlagBitsEXT::eNoTaskShader: return "NoTaskShader"; + case ShaderCreateFlagBitsEXT::eDispatchBase: return "DispatchBase"; + case ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment: return "FragmentShadingRateAttachment"; + case ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment: return "FragmentDensityMapAttachment"; + case ShaderCreateFlagBitsEXT::eIndirectBindable: return "IndirectBindable"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ShaderCodeTypeEXT value ) + { + switch ( value ) + { + case ShaderCodeTypeEXT::eBinary: return "Binary"; + case ShaderCodeTypeEXT::eSpirv: return "Spirv"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_ray_tracing_invocation_reorder === + + VULKAN_HPP_INLINE std::string to_string( RayTracingInvocationReorderModeNV value ) + { + switch ( value ) + { + case RayTracingInvocationReorderModeNV::eNone: return "None"; + case RayTracingInvocationReorderModeNV::eReorder: return "Reorder"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_cooperative_vector === + + VULKAN_HPP_INLINE std::string to_string( CooperativeVectorMatrixLayoutNV value ) + { + switch ( value ) + { + case CooperativeVectorMatrixLayoutNV::eRowMajor: return "RowMajor"; + case CooperativeVectorMatrixLayoutNV::eColumnMajor: return "ColumnMajor"; + case CooperativeVectorMatrixLayoutNV::eInferencingOptimal: return "InferencingOptimal"; + case CooperativeVectorMatrixLayoutNV::eTrainingOptimal: return "TrainingOptimal"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ComponentTypeKHR value ) + { + switch ( value ) + { + case ComponentTypeKHR::eFloat16: return "Float16"; + case ComponentTypeKHR::eFloat32: return "Float32"; + case ComponentTypeKHR::eFloat64: return "Float64"; + case ComponentTypeKHR::eSint8: return "Sint8"; + case ComponentTypeKHR::eSint16: return "Sint16"; + case ComponentTypeKHR::eSint32: return "Sint32"; + case ComponentTypeKHR::eSint64: return "Sint64"; + case ComponentTypeKHR::eUint8: return "Uint8"; + case ComponentTypeKHR::eUint16: return "Uint16"; + case ComponentTypeKHR::eUint32: return "Uint32"; + case ComponentTypeKHR::eUint64: return "Uint64"; + case ComponentTypeKHR::eSint8PackedNV: return "Sint8PackedNV"; + case ComponentTypeKHR::eUint8PackedNV: return "Uint8PackedNV"; + case ComponentTypeKHR::eFloatE4M3NV: return "FloatE4M3NV"; + case ComponentTypeKHR::eFloatE5M2NV: return "FloatE5M2NV"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_layer_settings === + + VULKAN_HPP_INLINE std::string to_string( LayerSettingTypeEXT value ) + { + switch ( value ) + { + case LayerSettingTypeEXT::eBool32: return "Bool32"; + case LayerSettingTypeEXT::eInt32: return "Int32"; + case LayerSettingTypeEXT::eInt64: return "Int64"; + case LayerSettingTypeEXT::eUint32: return "Uint32"; + case LayerSettingTypeEXT::eUint64: return "Uint64"; + case LayerSettingTypeEXT::eFloat32: return "Float32"; + case LayerSettingTypeEXT::eFloat64: return "Float64"; + case LayerSettingTypeEXT::eString: return "String"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_low_latency2 === + + VULKAN_HPP_INLINE std::string to_string( LatencyMarkerNV value ) + { + switch ( value ) + { + case LatencyMarkerNV::eSimulationStart: return "SimulationStart"; + case LatencyMarkerNV::eSimulationEnd: return "SimulationEnd"; + case LatencyMarkerNV::eRendersubmitStart: return "RendersubmitStart"; + case LatencyMarkerNV::eRendersubmitEnd: return "RendersubmitEnd"; + case LatencyMarkerNV::ePresentStart: return "PresentStart"; + case LatencyMarkerNV::ePresentEnd: return "PresentEnd"; + case LatencyMarkerNV::eInputSample: return "InputSample"; + case LatencyMarkerNV::eTriggerFlash: return "TriggerFlash"; + case LatencyMarkerNV::eOutOfBandRendersubmitStart: return "OutOfBandRendersubmitStart"; + case LatencyMarkerNV::eOutOfBandRendersubmitEnd: return "OutOfBandRendersubmitEnd"; + case LatencyMarkerNV::eOutOfBandPresentStart: return "OutOfBandPresentStart"; + case LatencyMarkerNV::eOutOfBandPresentEnd: return "OutOfBandPresentEnd"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( OutOfBandQueueTypeNV value ) + { + switch ( value ) + { + case OutOfBandQueueTypeNV::eRender: return "Render"; + case OutOfBandQueueTypeNV::ePresent: return "Present"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_cooperative_matrix === + + VULKAN_HPP_INLINE std::string to_string( ScopeKHR value ) + { + switch ( value ) + { + case ScopeKHR::eDevice: return "Device"; + case ScopeKHR::eWorkgroup: return "Workgroup"; + case ScopeKHR::eSubgroup: return "Subgroup"; + case ScopeKHR::eQueueFamily: return "QueueFamily"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_video_encode_av1 === + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1PredictionModeKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1PredictionModeKHR::eIntraOnly: return "IntraOnly"; + case VideoEncodeAV1PredictionModeKHR::eSingleReference: return "SingleReference"; + case VideoEncodeAV1PredictionModeKHR::eUnidirectionalCompound: return "UnidirectionalCompound"; + case VideoEncodeAV1PredictionModeKHR::eBidirectionalCompound: return "BidirectionalCompound"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1RateControlGroupKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1RateControlGroupKHR::eIntra: return "Intra"; + case VideoEncodeAV1RateControlGroupKHR::ePredictive: return "Predictive"; + case VideoEncodeAV1RateControlGroupKHR::eBipredictive: return "Bipredictive"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1CapabilityFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1CapabilityFlagBitsKHR::ePerRateControlGroupMinMaxQIndex: return "PerRateControlGroupMinMaxQIndex"; + case VideoEncodeAV1CapabilityFlagBitsKHR::eGenerateObuExtensionHeader: return "GenerateObuExtensionHeader"; + case VideoEncodeAV1CapabilityFlagBitsKHR::ePrimaryReferenceCdfOnly: return "PrimaryReferenceCdfOnly"; + case VideoEncodeAV1CapabilityFlagBitsKHR::eFrameSizeOverride: return "FrameSizeOverride"; + case VideoEncodeAV1CapabilityFlagBitsKHR::eMotionVectorScaling: return "MotionVectorScaling"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1StdFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1StdFlagBitsKHR::eUniformTileSpacingFlagSet: return "UniformTileSpacingFlagSet"; + case VideoEncodeAV1StdFlagBitsKHR::eSkipModePresentUnset: return "SkipModePresentUnset"; + case VideoEncodeAV1StdFlagBitsKHR::ePrimaryRefFrame: return "PrimaryRefFrame"; + case VideoEncodeAV1StdFlagBitsKHR::eDeltaQ: return "DeltaQ"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1SuperblockSizeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1SuperblockSizeFlagBitsKHR::e64: return "64"; + case VideoEncodeAV1SuperblockSizeFlagBitsKHR::e128: return "128"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1RateControlFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1RateControlFlagBitsKHR::eRegularGop: return "RegularGop"; + case VideoEncodeAV1RateControlFlagBitsKHR::eTemporalLayerPatternDyadic: return "TemporalLayerPatternDyadic"; + case VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternFlat: return "ReferencePatternFlat"; + case VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternDyadic: return "ReferencePatternDyadic"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_QCOM_image_processing2 === + + VULKAN_HPP_INLINE std::string to_string( BlockMatchWindowCompareModeQCOM value ) + { + switch ( value ) + { + case BlockMatchWindowCompareModeQCOM::eMin: return "Min"; + case BlockMatchWindowCompareModeQCOM::eMax: return "Max"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_QCOM_filter_cubic_weights === + + VULKAN_HPP_INLINE std::string to_string( CubicFilterWeightsQCOM value ) + { + switch ( value ) + { + case CubicFilterWeightsQCOM::eCatmullRom: return "CatmullRom"; + case CubicFilterWeightsQCOM::eZeroTangentCardinal: return "ZeroTangentCardinal"; + case CubicFilterWeightsQCOM::eBSpline: return "BSpline"; + case CubicFilterWeightsQCOM::eMitchellNetravali: return "MitchellNetravali"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_MSFT_layered_driver === + + VULKAN_HPP_INLINE std::string to_string( LayeredDriverUnderlyingApiMSFT value ) + { + switch ( value ) + { + case LayeredDriverUnderlyingApiMSFT::eNone: return "None"; + case LayeredDriverUnderlyingApiMSFT::eD3D12: return "D3D12"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_calibrated_timestamps === + + VULKAN_HPP_INLINE std::string to_string( TimeDomainKHR value ) + { + switch ( value ) + { + case TimeDomainKHR::eDevice: return "Device"; + case TimeDomainKHR::eClockMonotonic: return "ClockMonotonic"; + case TimeDomainKHR::eClockMonotonicRaw: return "ClockMonotonicRaw"; + case TimeDomainKHR::eQueryPerformanceCounter: return "QueryPerformanceCounter"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_display_stereo === + + VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceStereoTypeNV value ) + { + switch ( value ) + { + case DisplaySurfaceStereoTypeNV::eNone: return "None"; + case DisplaySurfaceStereoTypeNV::eOnboardDin: return "OnboardDin"; + case DisplaySurfaceStereoTypeNV::eHdmi3D: return "Hdmi3D"; + case DisplaySurfaceStereoTypeNV::eInbandDisplayport: return "InbandDisplayport"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_maintenance7 === + + VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceLayeredApiKHR value ) + { + switch ( value ) + { + case PhysicalDeviceLayeredApiKHR::eVulkan: return "Vulkan"; + case PhysicalDeviceLayeredApiKHR::eD3D12: return "D3D12"; + case PhysicalDeviceLayeredApiKHR::eMetal: return "Metal"; + case PhysicalDeviceLayeredApiKHR::eOpengl: return "Opengl"; + case PhysicalDeviceLayeredApiKHR::eOpengles: return "Opengles"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_cluster_acceleration_structure === + + VULKAN_HPP_INLINE std::string to_string( ClusterAccelerationStructureClusterFlagBitsNV value ) + { + switch ( value ) + { + case ClusterAccelerationStructureClusterFlagBitsNV::eAllowDisableOpacityMicromaps: return "AllowDisableOpacityMicromaps"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ClusterAccelerationStructureGeometryFlagBitsNV value ) + { + switch ( value ) + { + case ClusterAccelerationStructureGeometryFlagBitsNV::eCullDisable: return "CullDisable"; + case ClusterAccelerationStructureGeometryFlagBitsNV::eNoDuplicateAnyhitInvocation: return "NoDuplicateAnyhitInvocation"; + case ClusterAccelerationStructureGeometryFlagBitsNV::eOpaque: return "Opaque"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ClusterAccelerationStructureAddressResolutionFlagBitsNV value ) + { + switch ( value ) + { + case ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstImplicitData: return "IndirectedDstImplicitData"; + case ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedScratchData: return "IndirectedScratchData"; + case ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstAddressArray: return "IndirectedDstAddressArray"; + case ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedDstSizesArray: return "IndirectedDstSizesArray"; + case ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedSrcInfosArray: return "IndirectedSrcInfosArray"; + case ClusterAccelerationStructureAddressResolutionFlagBitsNV::eIndirectedSrcInfosCount: return "IndirectedSrcInfosCount"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ClusterAccelerationStructureIndexFormatFlagBitsNV value ) + { + switch ( value ) + { + case ClusterAccelerationStructureIndexFormatFlagBitsNV::e8: return "8"; + case ClusterAccelerationStructureIndexFormatFlagBitsNV::e16: return "16"; + case ClusterAccelerationStructureIndexFormatFlagBitsNV::e32: return "32"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ClusterAccelerationStructureTypeNV value ) + { + switch ( value ) + { + case ClusterAccelerationStructureTypeNV::eClustersBottomLevel: return "ClustersBottomLevel"; + case ClusterAccelerationStructureTypeNV::eTriangleCluster: return "TriangleCluster"; + case ClusterAccelerationStructureTypeNV::eTriangleClusterTemplate: return "TriangleClusterTemplate"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ClusterAccelerationStructureOpTypeNV value ) + { + switch ( value ) + { + case ClusterAccelerationStructureOpTypeNV::eMoveObjects: return "MoveObjects"; + case ClusterAccelerationStructureOpTypeNV::eBuildClustersBottomLevel: return "BuildClustersBottomLevel"; + case ClusterAccelerationStructureOpTypeNV::eBuildTriangleCluster: return "BuildTriangleCluster"; + case ClusterAccelerationStructureOpTypeNV::eBuildTriangleClusterTemplate: return "BuildTriangleClusterTemplate"; + case ClusterAccelerationStructureOpTypeNV::eInstantiateTriangleCluster: return "InstantiateTriangleCluster"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( ClusterAccelerationStructureOpModeNV value ) + { + switch ( value ) + { + case ClusterAccelerationStructureOpModeNV::eImplicitDestinations: return "ImplicitDestinations"; + case ClusterAccelerationStructureOpModeNV::eExplicitDestinations: return "ExplicitDestinations"; + case ClusterAccelerationStructureOpModeNV::eComputeSizes: return "ComputeSizes"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_NV_partitioned_acceleration_structure === + + VULKAN_HPP_INLINE std::string to_string( PartitionedAccelerationStructureOpTypeNV value ) + { + switch ( value ) + { + case PartitionedAccelerationStructureOpTypeNV::eWriteInstance: return "WriteInstance"; + case PartitionedAccelerationStructureOpTypeNV::eUpdateInstance: return "UpdateInstance"; + case PartitionedAccelerationStructureOpTypeNV::eWritePartitionTranslation: return "WritePartitionTranslation"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PartitionedAccelerationStructureInstanceFlagBitsNV value ) + { + switch ( value ) + { + case PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagTriangleFacingCullDisable: return "FlagTriangleFacingCullDisable"; + case PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagTriangleFlipFacing: return "FlagTriangleFlipFacing"; + case PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagForceOpaque: return "FlagForceOpaque"; + case PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagForceNoOpaque: return "FlagForceNoOpaque"; + case PartitionedAccelerationStructureInstanceFlagBitsNV::eFlagEnableExplicitBoundingBox: return "FlagEnableExplicitBoundingBox"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_device_generated_commands === + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeEXT value ) + { + switch ( value ) + { + case IndirectCommandsTokenTypeEXT::eExecutionSet: return "ExecutionSet"; + case IndirectCommandsTokenTypeEXT::ePushConstant: return "PushConstant"; + case IndirectCommandsTokenTypeEXT::eSequenceIndex: return "SequenceIndex"; + case IndirectCommandsTokenTypeEXT::eIndexBuffer: return "IndexBuffer"; + case IndirectCommandsTokenTypeEXT::eVertexBuffer: return "VertexBuffer"; + case IndirectCommandsTokenTypeEXT::eDrawIndexed: return "DrawIndexed"; + case IndirectCommandsTokenTypeEXT::eDraw: return "Draw"; + case IndirectCommandsTokenTypeEXT::eDrawIndexedCount: return "DrawIndexedCount"; + case IndirectCommandsTokenTypeEXT::eDrawCount: return "DrawCount"; + case IndirectCommandsTokenTypeEXT::eDispatch: return "Dispatch"; + case IndirectCommandsTokenTypeEXT::eDrawMeshTasksNV: return "DrawMeshTasksNV"; + case IndirectCommandsTokenTypeEXT::eDrawMeshTasksCountNV: return "DrawMeshTasksCountNV"; + case IndirectCommandsTokenTypeEXT::eDrawMeshTasks: return "DrawMeshTasks"; + case IndirectCommandsTokenTypeEXT::eDrawMeshTasksCount: return "DrawMeshTasksCount"; + case IndirectCommandsTokenTypeEXT::eTraceRays2: return "TraceRays2"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( IndirectExecutionSetInfoTypeEXT value ) + { + switch ( value ) + { + case IndirectExecutionSetInfoTypeEXT::ePipelines: return "Pipelines"; + case IndirectExecutionSetInfoTypeEXT::eShaderObjects: return "ShaderObjects"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsEXT value ) + { + switch ( value ) + { + case IndirectCommandsLayoutUsageFlagBitsEXT::eExplicitPreprocess: return "ExplicitPreprocess"; + case IndirectCommandsLayoutUsageFlagBitsEXT::eUnorderedSequences: return "UnorderedSequences"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsInputModeFlagBitsEXT value ) + { + switch ( value ) + { + case IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer: return "VulkanIndexBuffer"; + case IndirectCommandsInputModeFlagBitsEXT::eDxgiIndexBuffer: return "DxgiIndexBuffer"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_KHR_maintenance8 === + + VULKAN_HPP_INLINE std::string to_string( AccessFlagBits3KHR value ) + { + switch ( value ) + { + case AccessFlagBits3KHR::eNone: return "None"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_depth_clamp_control === + + VULKAN_HPP_INLINE std::string to_string( DepthClampModeEXT value ) + { + switch ( value ) + { + case DepthClampModeEXT::eViewportRange: return "ViewportRange"; + case DepthClampModeEXT::eUserDefinedRange: return "UserDefinedRange"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + +} // namespace VULKAN_HPP_NAMESPACE + +#if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +#elif defined( _MSC_VER ) +# pragma warning( pop ) +#endif + #endif diff --git a/src/libraries/vulkanheaders/vulkan_vi.h b/src/libraries/vulkanheaders/vulkan_vi.h index 0355e7a16..a30bfb1b4 100644 --- a/src/libraries/vulkanheaders/vulkan_vi.h +++ b/src/libraries/vulkanheaders/vulkan_vi.h @@ -2,7 +2,7 @@ #define VULKAN_VI_H_ 1 /* -** Copyright 2015-2022 The Khronos Group Inc. +** Copyright 2015-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -19,6 +19,7 @@ extern "C" { +// VK_NN_vi_surface is a preprocessor guard. Do not pass it to API calls. #define VK_NN_vi_surface 1 #define VK_NN_VI_SURFACE_SPEC_VERSION 1 #define VK_NN_VI_SURFACE_EXTENSION_NAME "VK_NN_vi_surface" diff --git a/src/libraries/vulkanheaders/vulkan_video.hpp b/src/libraries/vulkanheaders/vulkan_video.hpp new file mode 100644 index 000000000..d9f87d5e4 --- /dev/null +++ b/src/libraries/vulkanheaders/vulkan_video.hpp @@ -0,0 +1,4057 @@ +// Copyright 2021-2025 The Khronos Group Inc. +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#ifndef VULKAN_VIDEO_HPP +#define VULKAN_VIDEO_HPP + +// clang-format off +#include +// clang-format on + +#include +#include +#if ( 301 < VK_HEADER_VERSION ) +# include +#endif +#include +#include +#include +#include +#include +#include +#include + +#if !defined( VULKAN_HPP_VIDEO_NAMESPACE ) +# define VULKAN_HPP_VIDEO_NAMESPACE video +#endif + +namespace VULKAN_HPP_NAMESPACE +{ + namespace VULKAN_HPP_VIDEO_NAMESPACE + { + + //============= + //=== ENUMs === + //============= + + //=== vulkan_video_codec_h264std === + + enum class H264ChromaFormatIdc + { + eMonochrome = STD_VIDEO_H264_CHROMA_FORMAT_IDC_MONOCHROME, + e420 = STD_VIDEO_H264_CHROMA_FORMAT_IDC_420, + e422 = STD_VIDEO_H264_CHROMA_FORMAT_IDC_422, + e444 = STD_VIDEO_H264_CHROMA_FORMAT_IDC_444, + eInvalid = STD_VIDEO_H264_CHROMA_FORMAT_IDC_INVALID + }; + + enum class H264ProfileIdc + { + eBaseline = STD_VIDEO_H264_PROFILE_IDC_BASELINE, + eMain = STD_VIDEO_H264_PROFILE_IDC_MAIN, + eHigh = STD_VIDEO_H264_PROFILE_IDC_HIGH, + eHigh444Predictive = STD_VIDEO_H264_PROFILE_IDC_HIGH_444_PREDICTIVE, + eInvalid = STD_VIDEO_H264_PROFILE_IDC_INVALID + }; + + enum class H264LevelIdc + { + e1_0 = STD_VIDEO_H264_LEVEL_IDC_1_0, + e1_1 = STD_VIDEO_H264_LEVEL_IDC_1_1, + e1_2 = STD_VIDEO_H264_LEVEL_IDC_1_2, + e1_3 = STD_VIDEO_H264_LEVEL_IDC_1_3, + e2_0 = STD_VIDEO_H264_LEVEL_IDC_2_0, + e2_1 = STD_VIDEO_H264_LEVEL_IDC_2_1, + e2_2 = STD_VIDEO_H264_LEVEL_IDC_2_2, + e3_0 = STD_VIDEO_H264_LEVEL_IDC_3_0, + e3_1 = STD_VIDEO_H264_LEVEL_IDC_3_1, + e3_2 = STD_VIDEO_H264_LEVEL_IDC_3_2, + e4_0 = STD_VIDEO_H264_LEVEL_IDC_4_0, + e4_1 = STD_VIDEO_H264_LEVEL_IDC_4_1, + e4_2 = STD_VIDEO_H264_LEVEL_IDC_4_2, + e5_0 = STD_VIDEO_H264_LEVEL_IDC_5_0, + e5_1 = STD_VIDEO_H264_LEVEL_IDC_5_1, + e5_2 = STD_VIDEO_H264_LEVEL_IDC_5_2, + e6_0 = STD_VIDEO_H264_LEVEL_IDC_6_0, + e6_1 = STD_VIDEO_H264_LEVEL_IDC_6_1, + e6_2 = STD_VIDEO_H264_LEVEL_IDC_6_2, + eInvalid = STD_VIDEO_H264_LEVEL_IDC_INVALID + }; + + enum class H264PocType + { + e0 = STD_VIDEO_H264_POC_TYPE_0, + e1 = STD_VIDEO_H264_POC_TYPE_1, + e2 = STD_VIDEO_H264_POC_TYPE_2, + eInvalid = STD_VIDEO_H264_POC_TYPE_INVALID + }; + + enum class H264AspectRatioIdc + { + eUnspecified = STD_VIDEO_H264_ASPECT_RATIO_IDC_UNSPECIFIED, + eSquare = STD_VIDEO_H264_ASPECT_RATIO_IDC_SQUARE, + e12_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_12_11, + e10_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_10_11, + e16_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_16_11, + e40_33 = STD_VIDEO_H264_ASPECT_RATIO_IDC_40_33, + e24_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_24_11, + e20_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_20_11, + e32_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_32_11, + e80_33 = STD_VIDEO_H264_ASPECT_RATIO_IDC_80_33, + e18_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_18_11, + e15_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_15_11, + e64_33 = STD_VIDEO_H264_ASPECT_RATIO_IDC_64_33, + e160_99 = STD_VIDEO_H264_ASPECT_RATIO_IDC_160_99, + e4_3 = STD_VIDEO_H264_ASPECT_RATIO_IDC_4_3, + e3_2 = STD_VIDEO_H264_ASPECT_RATIO_IDC_3_2, + e2_1 = STD_VIDEO_H264_ASPECT_RATIO_IDC_2_1, + eExtendedSar = STD_VIDEO_H264_ASPECT_RATIO_IDC_EXTENDED_SAR, + eInvalid = STD_VIDEO_H264_ASPECT_RATIO_IDC_INVALID + }; + + enum class H264WeightedBipredIdc + { + eDefault = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_DEFAULT, + eExplicit = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_EXPLICIT, + eImplicit = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_IMPLICIT, + eInvalid = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_INVALID + }; + + enum class H264ModificationOfPicNumsIdc + { + eShortTermSubtract = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_SUBTRACT, + eShortTermAdd = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_ADD, + eLongTerm = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_LONG_TERM, + eEnd = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_END, + eInvalid = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_INVALID + }; + + enum class H264MemMgmtControlOp + { + eEnd = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_END, + eUnmarkShortTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_SHORT_TERM, + eUnmarkLongTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_LONG_TERM, + eMarkLongTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_LONG_TERM, + eSetMaxLongTermIndex = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_SET_MAX_LONG_TERM_INDEX, + eUnmarkAll = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_ALL, + eMarkCurrentAsLongTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_CURRENT_AS_LONG_TERM, + eInvalid = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_INVALID + }; + + enum class H264CabacInitIdc + { + e0 = STD_VIDEO_H264_CABAC_INIT_IDC_0, + e1 = STD_VIDEO_H264_CABAC_INIT_IDC_1, + e2 = STD_VIDEO_H264_CABAC_INIT_IDC_2, + eInvalid = STD_VIDEO_H264_CABAC_INIT_IDC_INVALID + }; + + enum class H264DisableDeblockingFilterIdc + { + eDisabled = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_DISABLED, + eEnabled = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_ENABLED, + ePartial = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_PARTIAL, + eInvalid = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_INVALID + }; + + enum class H264SliceType + { + eP = STD_VIDEO_H264_SLICE_TYPE_P, + eB = STD_VIDEO_H264_SLICE_TYPE_B, + eI = STD_VIDEO_H264_SLICE_TYPE_I, + eInvalid = STD_VIDEO_H264_SLICE_TYPE_INVALID + }; + + enum class H264PictureType + { + eP = STD_VIDEO_H264_PICTURE_TYPE_P, + eB = STD_VIDEO_H264_PICTURE_TYPE_B, + eI = STD_VIDEO_H264_PICTURE_TYPE_I, + eIdr = STD_VIDEO_H264_PICTURE_TYPE_IDR, + eInvalid = STD_VIDEO_H264_PICTURE_TYPE_INVALID + }; + + enum class H264NonVclNaluType + { + eSps = STD_VIDEO_H264_NON_VCL_NALU_TYPE_SPS, + ePps = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PPS, + eAud = STD_VIDEO_H264_NON_VCL_NALU_TYPE_AUD, + ePrefix = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PREFIX, + eEndOfSequence = STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_SEQUENCE, + eEndOfStream = STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_STREAM, + ePrecoded = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PRECODED, + eInvalid = STD_VIDEO_H264_NON_VCL_NALU_TYPE_INVALID + }; + + //=== vulkan_video_codec_h264std_decode === + + enum class DecodeH264FieldOrderCount + { + eTop = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_TOP, + eBottom = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_BOTTOM, + eInvalid = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_INVALID + }; + + //=== vulkan_video_codec_h265std === + + enum class H265ChromaFormatIdc + { + eMonochrome = STD_VIDEO_H265_CHROMA_FORMAT_IDC_MONOCHROME, + e420 = STD_VIDEO_H265_CHROMA_FORMAT_IDC_420, + e422 = STD_VIDEO_H265_CHROMA_FORMAT_IDC_422, + e444 = STD_VIDEO_H265_CHROMA_FORMAT_IDC_444, + eInvalid = STD_VIDEO_H265_CHROMA_FORMAT_IDC_INVALID + }; + + enum class H265ProfileIdc + { + eMain = STD_VIDEO_H265_PROFILE_IDC_MAIN, + eMain10 = STD_VIDEO_H265_PROFILE_IDC_MAIN_10, + eMainStillPicture = STD_VIDEO_H265_PROFILE_IDC_MAIN_STILL_PICTURE, + eFormatRangeExtensions = STD_VIDEO_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSIONS, + eSccExtensions = STD_VIDEO_H265_PROFILE_IDC_SCC_EXTENSIONS, + eInvalid = STD_VIDEO_H265_PROFILE_IDC_INVALID + }; + + enum class H265LevelIdc + { + e1_0 = STD_VIDEO_H265_LEVEL_IDC_1_0, + e2_0 = STD_VIDEO_H265_LEVEL_IDC_2_0, + e2_1 = STD_VIDEO_H265_LEVEL_IDC_2_1, + e3_0 = STD_VIDEO_H265_LEVEL_IDC_3_0, + e3_1 = STD_VIDEO_H265_LEVEL_IDC_3_1, + e4_0 = STD_VIDEO_H265_LEVEL_IDC_4_0, + e4_1 = STD_VIDEO_H265_LEVEL_IDC_4_1, + e5_0 = STD_VIDEO_H265_LEVEL_IDC_5_0, + e5_1 = STD_VIDEO_H265_LEVEL_IDC_5_1, + e5_2 = STD_VIDEO_H265_LEVEL_IDC_5_2, + e6_0 = STD_VIDEO_H265_LEVEL_IDC_6_0, + e6_1 = STD_VIDEO_H265_LEVEL_IDC_6_1, + e6_2 = STD_VIDEO_H265_LEVEL_IDC_6_2, + eInvalid = STD_VIDEO_H265_LEVEL_IDC_INVALID + }; + + enum class H265SliceType + { + eB = STD_VIDEO_H265_SLICE_TYPE_B, + eP = STD_VIDEO_H265_SLICE_TYPE_P, + eI = STD_VIDEO_H265_SLICE_TYPE_I, + eInvalid = STD_VIDEO_H265_SLICE_TYPE_INVALID + }; + + enum class H265PictureType + { + eP = STD_VIDEO_H265_PICTURE_TYPE_P, + eB = STD_VIDEO_H265_PICTURE_TYPE_B, + eI = STD_VIDEO_H265_PICTURE_TYPE_I, + eIdr = STD_VIDEO_H265_PICTURE_TYPE_IDR, + eInvalid = STD_VIDEO_H265_PICTURE_TYPE_INVALID + }; + + enum class H265AspectRatioIdc + { + eUnspecified = STD_VIDEO_H265_ASPECT_RATIO_IDC_UNSPECIFIED, + eSquare = STD_VIDEO_H265_ASPECT_RATIO_IDC_SQUARE, + e12_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_12_11, + e10_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_10_11, + e16_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_16_11, + e40_33 = STD_VIDEO_H265_ASPECT_RATIO_IDC_40_33, + e24_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_24_11, + e20_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_20_11, + e32_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_32_11, + e80_33 = STD_VIDEO_H265_ASPECT_RATIO_IDC_80_33, + e18_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_18_11, + e15_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_15_11, + e64_33 = STD_VIDEO_H265_ASPECT_RATIO_IDC_64_33, + e160_99 = STD_VIDEO_H265_ASPECT_RATIO_IDC_160_99, + e4_3 = STD_VIDEO_H265_ASPECT_RATIO_IDC_4_3, + e3_2 = STD_VIDEO_H265_ASPECT_RATIO_IDC_3_2, + e2_1 = STD_VIDEO_H265_ASPECT_RATIO_IDC_2_1, + eExtendedSar = STD_VIDEO_H265_ASPECT_RATIO_IDC_EXTENDED_SAR, + eInvalid = STD_VIDEO_H265_ASPECT_RATIO_IDC_INVALID + }; + + //=== vulkan_video_codec_av1std === + + enum class AV1Profile + { + eMain = STD_VIDEO_AV1_PROFILE_MAIN, + eHigh = STD_VIDEO_AV1_PROFILE_HIGH, + eProfessional = STD_VIDEO_AV1_PROFILE_PROFESSIONAL, + eInvalid = STD_VIDEO_AV1_PROFILE_INVALID + }; + + enum class AV1Level + { + e2_0 = STD_VIDEO_AV1_LEVEL_2_0, + e2_1 = STD_VIDEO_AV1_LEVEL_2_1, + e2_2 = STD_VIDEO_AV1_LEVEL_2_2, + e2_3 = STD_VIDEO_AV1_LEVEL_2_3, + e3_0 = STD_VIDEO_AV1_LEVEL_3_0, + e3_1 = STD_VIDEO_AV1_LEVEL_3_1, + e3_2 = STD_VIDEO_AV1_LEVEL_3_2, + e3_3 = STD_VIDEO_AV1_LEVEL_3_3, + e4_0 = STD_VIDEO_AV1_LEVEL_4_0, + e4_1 = STD_VIDEO_AV1_LEVEL_4_1, + e4_2 = STD_VIDEO_AV1_LEVEL_4_2, + e4_3 = STD_VIDEO_AV1_LEVEL_4_3, + e5_0 = STD_VIDEO_AV1_LEVEL_5_0, + e5_1 = STD_VIDEO_AV1_LEVEL_5_1, + e5_2 = STD_VIDEO_AV1_LEVEL_5_2, + e5_3 = STD_VIDEO_AV1_LEVEL_5_3, + e6_0 = STD_VIDEO_AV1_LEVEL_6_0, + e6_1 = STD_VIDEO_AV1_LEVEL_6_1, + e6_2 = STD_VIDEO_AV1_LEVEL_6_2, + e6_3 = STD_VIDEO_AV1_LEVEL_6_3, + e7_0 = STD_VIDEO_AV1_LEVEL_7_0, + e7_1 = STD_VIDEO_AV1_LEVEL_7_1, + e7_2 = STD_VIDEO_AV1_LEVEL_7_2, + e7_3 = STD_VIDEO_AV1_LEVEL_7_3, + eInvalid = STD_VIDEO_AV1_LEVEL_INVALID + }; + + enum class AV1FrameType + { + eKey = STD_VIDEO_AV1_FRAME_TYPE_KEY, + eInter = STD_VIDEO_AV1_FRAME_TYPE_INTER, + eIntraOnly = STD_VIDEO_AV1_FRAME_TYPE_INTRA_ONLY, + eSwitch = STD_VIDEO_AV1_FRAME_TYPE_SWITCH, + eInvalid = STD_VIDEO_AV1_FRAME_TYPE_INVALID + }; + + enum class AV1ReferenceName + { + eIntraFrame = STD_VIDEO_AV1_REFERENCE_NAME_INTRA_FRAME, + eLastFrame = STD_VIDEO_AV1_REFERENCE_NAME_LAST_FRAME, + eLast2Frame = STD_VIDEO_AV1_REFERENCE_NAME_LAST2_FRAME, + eLast3Frame = STD_VIDEO_AV1_REFERENCE_NAME_LAST3_FRAME, + eGoldenFrame = STD_VIDEO_AV1_REFERENCE_NAME_GOLDEN_FRAME, + eBwdrefFrame = STD_VIDEO_AV1_REFERENCE_NAME_BWDREF_FRAME, + eAltref2Frame = STD_VIDEO_AV1_REFERENCE_NAME_ALTREF2_FRAME, + eAltrefFrame = STD_VIDEO_AV1_REFERENCE_NAME_ALTREF_FRAME, + eInvalid = STD_VIDEO_AV1_REFERENCE_NAME_INVALID + }; + + enum class AV1InterpolationFilter + { + eEighttap = STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP, + eEighttapSmooth = STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP_SMOOTH, + eEighttapSharp = STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP_SHARP, + eBilinear = STD_VIDEO_AV1_INTERPOLATION_FILTER_BILINEAR, + eSwitchable = STD_VIDEO_AV1_INTERPOLATION_FILTER_SWITCHABLE, + eInvalid = STD_VIDEO_AV1_INTERPOLATION_FILTER_INVALID + }; + + enum class AV1TxMode + { + eOnly4X4 = STD_VIDEO_AV1_TX_MODE_ONLY_4X4, + eLargest = STD_VIDEO_AV1_TX_MODE_LARGEST, + eSelect = STD_VIDEO_AV1_TX_MODE_SELECT, + eInvalid = STD_VIDEO_AV1_TX_MODE_INVALID + }; + + enum class AV1FrameRestorationType + { + eNone = STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_NONE, + eWiener = STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_WIENER, + eSgrproj = STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_SGRPROJ, + eSwitchable = STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_SWITCHABLE, + eInvalid = STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_INVALID + }; + + enum class AV1ColorPrimaries + { + eBt709 = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_709, + eUnspecified = STD_VIDEO_AV1_COLOR_PRIMARIES_UNSPECIFIED, + eBtUnspecified VULKAN_HPP_DEPRECATED_17( "eBtUnspecified is deprecated, eUnspecified should be used instead." ) = + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_UNSPECIFIED, + eBt470M = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_M, + eBt470BG = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_B_G, + eBt601 = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_601, + eSmpte240 = STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_240, + eGenericFilm = STD_VIDEO_AV1_COLOR_PRIMARIES_GENERIC_FILM, + eBt2020 = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_2020, + eXyz = STD_VIDEO_AV1_COLOR_PRIMARIES_XYZ, + eSmpte431 = STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_431, + eSmpte432 = STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_432, + eEbu3213 = STD_VIDEO_AV1_COLOR_PRIMARIES_EBU_3213, + eInvalid = STD_VIDEO_AV1_COLOR_PRIMARIES_INVALID + }; + + enum class AV1TransferCharacteristics + { + eReserved0 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_RESERVED_0, + eBt709 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_709, + eUnspecified = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_UNSPECIFIED, + eReserved3 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_RESERVED_3, + eBt470M = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_470_M, + eBt470BG = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_470_B_G, + eBt601 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_601, + eSmpte240 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_240, + eLinear = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LINEAR, + eLog100 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LOG_100, + eLog100Sqrt10 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LOG_100_SQRT10, + eIec61966 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_IEC_61966, + eBt1361 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_1361, + eSrgb = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SRGB, + eBt2020_10Bit = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_2020_10_BIT, + eBt2020_12Bit = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_2020_12_BIT, + eSmpte2084 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_2084, + eSmpte428 = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_428, + eHlg = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_HLG, + eInvalid = STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_INVALID + }; + + enum class AV1MatrixCoefficients + { + eIdentity = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_IDENTITY, + eBt709 = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_709, + eUnspecified = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_UNSPECIFIED, + eReserved3 = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_RESERVED_3, + eFcc = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_FCC, + eBt470BG = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_470_B_G, + eBt601 = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_601, + eSmpte240 = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_240, + eSmpteYcgco = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_YCGCO, + eBt2020Ncl = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_2020_NCL, + eBt2020Cl = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_2020_CL, + eSmpte2085 = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_2085, + eChromatNcl = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_CHROMAT_NCL, + eChromatCl = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_CHROMAT_CL, + eIctcp = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_ICTCP, + eInvalid = STD_VIDEO_AV1_MATRIX_COEFFICIENTS_INVALID + }; + + enum class AV1ChromaSamplePosition + { + eUnknown = STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_UNKNOWN, + eVertical = STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_VERTICAL, + eColocated = STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_COLOCATED, + eReserved = STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_RESERVED, + eInvalid = STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_INVALID + }; + + //=============== + //=== STRUCTS === + //=============== + + //=== vulkan_video_codec_h264std === + + struct H264SpsVuiFlags + { + using NativeType = StdVideoH264SpsVuiFlags; + + operator StdVideoH264SpsVuiFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SpsVuiFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspect_ratio_info_present_flag == rhs.aspect_ratio_info_present_flag ) && ( overscan_info_present_flag == rhs.overscan_info_present_flag ) && + ( overscan_appropriate_flag == rhs.overscan_appropriate_flag ) && ( video_signal_type_present_flag == rhs.video_signal_type_present_flag ) && + ( video_full_range_flag == rhs.video_full_range_flag ) && ( color_description_present_flag == rhs.color_description_present_flag ) && + ( chroma_loc_info_present_flag == rhs.chroma_loc_info_present_flag ) && ( timing_info_present_flag == rhs.timing_info_present_flag ) && + ( fixed_frame_rate_flag == rhs.fixed_frame_rate_flag ) && ( bitstream_restriction_flag == rhs.bitstream_restriction_flag ) && + ( nal_hrd_parameters_present_flag == rhs.nal_hrd_parameters_present_flag ) && + ( vcl_hrd_parameters_present_flag == rhs.vcl_hrd_parameters_present_flag ); + } + + bool operator!=( H264SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t aspect_ratio_info_present_flag : 1; + uint32_t overscan_info_present_flag : 1; + uint32_t overscan_appropriate_flag : 1; + uint32_t video_signal_type_present_flag : 1; + uint32_t video_full_range_flag : 1; + uint32_t color_description_present_flag : 1; + uint32_t chroma_loc_info_present_flag : 1; + uint32_t timing_info_present_flag : 1; + uint32_t fixed_frame_rate_flag : 1; + uint32_t bitstream_restriction_flag : 1; + uint32_t nal_hrd_parameters_present_flag : 1; + uint32_t vcl_hrd_parameters_present_flag : 1; + }; + + struct H264HrdParameters + { + using NativeType = StdVideoH264HrdParameters; + + operator StdVideoH264HrdParameters const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264HrdParameters &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( cpb_cnt_minus1 == rhs.cpb_cnt_minus1 ) && ( bit_rate_scale == rhs.bit_rate_scale ) && ( cpb_size_scale == rhs.cpb_size_scale ) && + ( reserved1 == rhs.reserved1 ) && ( bit_rate_value_minus1 == rhs.bit_rate_value_minus1 ) && + ( cpb_size_value_minus1 == rhs.cpb_size_value_minus1 ) && ( cbr_flag == rhs.cbr_flag ) && + ( initial_cpb_removal_delay_length_minus1 == rhs.initial_cpb_removal_delay_length_minus1 ) && + ( cpb_removal_delay_length_minus1 == rhs.cpb_removal_delay_length_minus1 ) && + ( dpb_output_delay_length_minus1 == rhs.dpb_output_delay_length_minus1 ) && ( time_offset_length == rhs.time_offset_length ); + } + + bool operator!=( H264HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint8_t cpb_cnt_minus1 = {}; + uint8_t bit_rate_scale = {}; + uint8_t cpb_size_scale = {}; + uint8_t reserved1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D bit_rate_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cpb_size_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cbr_flag = {}; + uint32_t initial_cpb_removal_delay_length_minus1 = {}; + uint32_t cpb_removal_delay_length_minus1 = {}; + uint32_t dpb_output_delay_length_minus1 = {}; + uint32_t time_offset_length = {}; + }; + + struct H264SequenceParameterSetVui + { + using NativeType = StdVideoH264SequenceParameterSetVui; + + operator StdVideoH264SequenceParameterSetVui const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SequenceParameterSetVui &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( aspect_ratio_idc == rhs.aspect_ratio_idc ) && ( sar_width == rhs.sar_width ) && ( sar_height == rhs.sar_height ) && + ( video_format == rhs.video_format ) && ( colour_primaries == rhs.colour_primaries ) && + ( transfer_characteristics == rhs.transfer_characteristics ) && ( matrix_coefficients == rhs.matrix_coefficients ) && + ( num_units_in_tick == rhs.num_units_in_tick ) && ( time_scale == rhs.time_scale ) && ( max_num_reorder_frames == rhs.max_num_reorder_frames ) && + ( max_dec_frame_buffering == rhs.max_dec_frame_buffering ) && ( chroma_sample_loc_type_top_field == rhs.chroma_sample_loc_type_top_field ) && + ( chroma_sample_loc_type_bottom_field == rhs.chroma_sample_loc_type_bottom_field ) && ( reserved1 == rhs.reserved1 ) && + ( pHrdParameters == rhs.pHrdParameters ); + } + + bool operator!=( H264SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SpsVuiFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264AspectRatioIdc aspect_ratio_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264AspectRatioIdc::eUnspecified; + uint16_t sar_width = {}; + uint16_t sar_height = {}; + uint8_t video_format = {}; + uint8_t colour_primaries = {}; + uint8_t transfer_characteristics = {}; + uint8_t matrix_coefficients = {}; + uint32_t num_units_in_tick = {}; + uint32_t time_scale = {}; + uint8_t max_num_reorder_frames = {}; + uint8_t max_dec_frame_buffering = {}; + uint8_t chroma_sample_loc_type_top_field = {}; + uint8_t chroma_sample_loc_type_bottom_field = {}; + uint32_t reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264HrdParameters * pHrdParameters = {}; + }; + + struct H264SpsFlags + { + using NativeType = StdVideoH264SpsFlags; + + operator StdVideoH264SpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( constraint_set0_flag == rhs.constraint_set0_flag ) && ( constraint_set1_flag == rhs.constraint_set1_flag ) && + ( constraint_set2_flag == rhs.constraint_set2_flag ) && ( constraint_set3_flag == rhs.constraint_set3_flag ) && + ( constraint_set4_flag == rhs.constraint_set4_flag ) && ( constraint_set5_flag == rhs.constraint_set5_flag ) && + ( direct_8x8_inference_flag == rhs.direct_8x8_inference_flag ) && ( mb_adaptive_frame_field_flag == rhs.mb_adaptive_frame_field_flag ) && + ( frame_mbs_only_flag == rhs.frame_mbs_only_flag ) && ( delta_pic_order_always_zero_flag == rhs.delta_pic_order_always_zero_flag ) && + ( separate_colour_plane_flag == rhs.separate_colour_plane_flag ) && + ( gaps_in_frame_num_value_allowed_flag == rhs.gaps_in_frame_num_value_allowed_flag ) && + ( qpprime_y_zero_transform_bypass_flag == rhs.qpprime_y_zero_transform_bypass_flag ) && ( frame_cropping_flag == rhs.frame_cropping_flag ) && + ( seq_scaling_matrix_present_flag == rhs.seq_scaling_matrix_present_flag ) && ( vui_parameters_present_flag == rhs.vui_parameters_present_flag ); + } + + bool operator!=( H264SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t constraint_set0_flag : 1; + uint32_t constraint_set1_flag : 1; + uint32_t constraint_set2_flag : 1; + uint32_t constraint_set3_flag : 1; + uint32_t constraint_set4_flag : 1; + uint32_t constraint_set5_flag : 1; + uint32_t direct_8x8_inference_flag : 1; + uint32_t mb_adaptive_frame_field_flag : 1; + uint32_t frame_mbs_only_flag : 1; + uint32_t delta_pic_order_always_zero_flag : 1; + uint32_t separate_colour_plane_flag : 1; + uint32_t gaps_in_frame_num_value_allowed_flag : 1; + uint32_t qpprime_y_zero_transform_bypass_flag : 1; + uint32_t frame_cropping_flag : 1; + uint32_t seq_scaling_matrix_present_flag : 1; + uint32_t vui_parameters_present_flag : 1; + }; + + struct H264ScalingLists + { + using NativeType = StdVideoH264ScalingLists; + + operator StdVideoH264ScalingLists const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264ScalingLists &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( scaling_list_present_mask == rhs.scaling_list_present_mask ) && ( use_default_scaling_matrix_mask == rhs.use_default_scaling_matrix_mask ) && + ( ScalingList4x4 == rhs.ScalingList4x4 ) && ( ScalingList8x8 == rhs.ScalingList8x8 ); + } + + bool operator!=( H264ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint16_t scaling_list_present_mask = {}; + uint16_t use_default_scaling_matrix_mask = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList4x4 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList8x8 = {}; + }; + + struct H264SequenceParameterSet + { + using NativeType = StdVideoH264SequenceParameterSet; + + operator StdVideoH264SequenceParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SequenceParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( profile_idc == rhs.profile_idc ) && ( level_idc == rhs.level_idc ) && + ( chroma_format_idc == rhs.chroma_format_idc ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && + ( bit_depth_luma_minus8 == rhs.bit_depth_luma_minus8 ) && ( bit_depth_chroma_minus8 == rhs.bit_depth_chroma_minus8 ) && + ( log2_max_frame_num_minus4 == rhs.log2_max_frame_num_minus4 ) && ( pic_order_cnt_type == rhs.pic_order_cnt_type ) && + ( offset_for_non_ref_pic == rhs.offset_for_non_ref_pic ) && ( offset_for_top_to_bottom_field == rhs.offset_for_top_to_bottom_field ) && + ( log2_max_pic_order_cnt_lsb_minus4 == rhs.log2_max_pic_order_cnt_lsb_minus4 ) && + ( num_ref_frames_in_pic_order_cnt_cycle == rhs.num_ref_frames_in_pic_order_cnt_cycle ) && ( max_num_ref_frames == rhs.max_num_ref_frames ) && + ( reserved1 == rhs.reserved1 ) && ( pic_width_in_mbs_minus1 == rhs.pic_width_in_mbs_minus1 ) && + ( pic_height_in_map_units_minus1 == rhs.pic_height_in_map_units_minus1 ) && ( frame_crop_left_offset == rhs.frame_crop_left_offset ) && + ( frame_crop_right_offset == rhs.frame_crop_right_offset ) && ( frame_crop_top_offset == rhs.frame_crop_top_offset ) && + ( frame_crop_bottom_offset == rhs.frame_crop_bottom_offset ) && ( reserved2 == rhs.reserved2 ) && + ( pOffsetForRefFrame == rhs.pOffsetForRefFrame ) && ( pScalingLists == rhs.pScalingLists ) && + ( pSequenceParameterSetVui == rhs.pSequenceParameterSetVui ); + } + + bool operator!=( H264SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SpsFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ProfileIdc profile_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ProfileIdc::eBaseline; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264LevelIdc level_idc = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264LevelIdc::e1_0; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ChromaFormatIdc chroma_format_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ChromaFormatIdc::eMonochrome; + uint8_t seq_parameter_set_id = {}; + uint8_t bit_depth_luma_minus8 = {}; + uint8_t bit_depth_chroma_minus8 = {}; + uint8_t log2_max_frame_num_minus4 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PocType pic_order_cnt_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PocType::e0; + int32_t offset_for_non_ref_pic = {}; + int32_t offset_for_top_to_bottom_field = {}; + uint8_t log2_max_pic_order_cnt_lsb_minus4 = {}; + uint8_t num_ref_frames_in_pic_order_cnt_cycle = {}; + uint8_t max_num_ref_frames = {}; + uint8_t reserved1 = {}; + uint32_t pic_width_in_mbs_minus1 = {}; + uint32_t pic_height_in_map_units_minus1 = {}; + uint32_t frame_crop_left_offset = {}; + uint32_t frame_crop_right_offset = {}; + uint32_t frame_crop_top_offset = {}; + uint32_t frame_crop_bottom_offset = {}; + uint32_t reserved2 = {}; + const int32_t * pOffsetForRefFrame = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ScalingLists * pScalingLists = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SequenceParameterSetVui * pSequenceParameterSetVui = {}; + }; + + struct H264PpsFlags + { + using NativeType = StdVideoH264PpsFlags; + + operator StdVideoH264PpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264PpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( transform_8x8_mode_flag == rhs.transform_8x8_mode_flag ) && ( redundant_pic_cnt_present_flag == rhs.redundant_pic_cnt_present_flag ) && + ( constrained_intra_pred_flag == rhs.constrained_intra_pred_flag ) && + ( deblocking_filter_control_present_flag == rhs.deblocking_filter_control_present_flag ) && ( weighted_pred_flag == rhs.weighted_pred_flag ) && + ( bottom_field_pic_order_in_frame_present_flag == rhs.bottom_field_pic_order_in_frame_present_flag ) && + ( entropy_coding_mode_flag == rhs.entropy_coding_mode_flag ) && ( pic_scaling_matrix_present_flag == rhs.pic_scaling_matrix_present_flag ); + } + + bool operator!=( H264PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t transform_8x8_mode_flag : 1; + uint32_t redundant_pic_cnt_present_flag : 1; + uint32_t constrained_intra_pred_flag : 1; + uint32_t deblocking_filter_control_present_flag : 1; + uint32_t weighted_pred_flag : 1; + uint32_t bottom_field_pic_order_in_frame_present_flag : 1; + uint32_t entropy_coding_mode_flag : 1; + uint32_t pic_scaling_matrix_present_flag : 1; + }; + + struct H264PictureParameterSet + { + using NativeType = StdVideoH264PictureParameterSet; + + operator StdVideoH264PictureParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264PictureParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && + ( num_ref_idx_l0_default_active_minus1 == rhs.num_ref_idx_l0_default_active_minus1 ) && + ( num_ref_idx_l1_default_active_minus1 == rhs.num_ref_idx_l1_default_active_minus1 ) && ( weighted_bipred_idc == rhs.weighted_bipred_idc ) && + ( pic_init_qp_minus26 == rhs.pic_init_qp_minus26 ) && ( pic_init_qs_minus26 == rhs.pic_init_qs_minus26 ) && + ( chroma_qp_index_offset == rhs.chroma_qp_index_offset ) && ( second_chroma_qp_index_offset == rhs.second_chroma_qp_index_offset ) && + ( pScalingLists == rhs.pScalingLists ); + } + + bool operator!=( H264PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PpsFlags flags = {}; + uint8_t seq_parameter_set_id = {}; + uint8_t pic_parameter_set_id = {}; + uint8_t num_ref_idx_l0_default_active_minus1 = {}; + uint8_t num_ref_idx_l1_default_active_minus1 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264WeightedBipredIdc weighted_bipred_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264WeightedBipredIdc::eDefault; + int8_t pic_init_qp_minus26 = {}; + int8_t pic_init_qs_minus26 = {}; + int8_t chroma_qp_index_offset = {}; + int8_t second_chroma_qp_index_offset = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ScalingLists * pScalingLists = {}; + }; + + //=== vulkan_video_codec_h264std_decode === + + struct DecodeH264PictureInfoFlags + { + using NativeType = StdVideoDecodeH264PictureInfoFlags; + + operator StdVideoDecodeH264PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( field_pic_flag == rhs.field_pic_flag ) && ( is_intra == rhs.is_intra ) && ( IdrPicFlag == rhs.IdrPicFlag ) && + ( bottom_field_flag == rhs.bottom_field_flag ) && ( is_reference == rhs.is_reference ) && + ( complementary_field_pair == rhs.complementary_field_pair ); + } + + bool operator!=( DecodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t field_pic_flag : 1; + uint32_t is_intra : 1; + uint32_t IdrPicFlag : 1; + uint32_t bottom_field_flag : 1; + uint32_t is_reference : 1; + uint32_t complementary_field_pair : 1; + }; + + struct DecodeH264PictureInfo + { + using NativeType = StdVideoDecodeH264PictureInfo; + + operator StdVideoDecodeH264PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && + ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( frame_num == rhs.frame_num ) && ( idr_pic_id == rhs.idr_pic_id ) && + ( PicOrderCnt == rhs.PicOrderCnt ); + } + + bool operator!=( DecodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH264PictureInfoFlags flags = {}; + uint8_t seq_parameter_set_id = {}; + uint8_t pic_parameter_set_id = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + uint16_t frame_num = {}; + uint16_t idr_pic_id = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D PicOrderCnt = {}; + }; + + struct DecodeH264ReferenceInfoFlags + { + using NativeType = StdVideoDecodeH264ReferenceInfoFlags; + + operator StdVideoDecodeH264ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( top_field_flag == rhs.top_field_flag ) && ( bottom_field_flag == rhs.bottom_field_flag ) && + ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( is_non_existing == rhs.is_non_existing ); + } + + bool operator!=( DecodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t top_field_flag : 1; + uint32_t bottom_field_flag : 1; + uint32_t used_for_long_term_reference : 1; + uint32_t is_non_existing : 1; + }; + + struct DecodeH264ReferenceInfo + { + using NativeType = StdVideoDecodeH264ReferenceInfo; + + operator StdVideoDecodeH264ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( FrameNum == rhs.FrameNum ) && ( reserved == rhs.reserved ) && ( PicOrderCnt == rhs.PicOrderCnt ); + } + + bool operator!=( DecodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH264ReferenceInfoFlags flags = {}; + uint16_t FrameNum = {}; + uint16_t reserved = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D PicOrderCnt = {}; + }; + + //=== vulkan_video_codec_h264std_encode === + + struct EncodeH264WeightTableFlags + { + using NativeType = StdVideoEncodeH264WeightTableFlags; + + operator StdVideoEncodeH264WeightTableFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264WeightTableFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( luma_weight_l0_flag == rhs.luma_weight_l0_flag ) && ( chroma_weight_l0_flag == rhs.chroma_weight_l0_flag ) && + ( luma_weight_l1_flag == rhs.luma_weight_l1_flag ) && ( chroma_weight_l1_flag == rhs.chroma_weight_l1_flag ); + } + + bool operator!=( EncodeH264WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t luma_weight_l0_flag = {}; + uint32_t chroma_weight_l0_flag = {}; + uint32_t luma_weight_l1_flag = {}; + uint32_t chroma_weight_l1_flag = {}; + }; + + struct EncodeH264WeightTable + { + using NativeType = StdVideoEncodeH264WeightTable; + + operator StdVideoEncodeH264WeightTable const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264WeightTable &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( luma_log2_weight_denom == rhs.luma_log2_weight_denom ) && + ( chroma_log2_weight_denom == rhs.chroma_log2_weight_denom ) && ( luma_weight_l0 == rhs.luma_weight_l0 ) && + ( luma_offset_l0 == rhs.luma_offset_l0 ) && ( chroma_weight_l0 == rhs.chroma_weight_l0 ) && ( chroma_offset_l0 == rhs.chroma_offset_l0 ) && + ( luma_weight_l1 == rhs.luma_weight_l1 ) && ( luma_offset_l1 == rhs.luma_offset_l1 ) && ( chroma_weight_l1 == rhs.chroma_weight_l1 ) && + ( chroma_offset_l1 == rhs.chroma_offset_l1 ); + } + + bool operator!=( EncodeH264WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264WeightTableFlags flags = {}; + uint8_t luma_log2_weight_denom = {}; + uint8_t chroma_log2_weight_denom = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_weight_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_offset_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D chroma_weight_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D chroma_offset_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_weight_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_offset_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D chroma_weight_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D chroma_offset_l1 = {}; + }; + + struct EncodeH264SliceHeaderFlags + { + using NativeType = StdVideoEncodeH264SliceHeaderFlags; + + operator StdVideoEncodeH264SliceHeaderFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264SliceHeaderFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264SliceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( direct_spatial_mv_pred_flag == rhs.direct_spatial_mv_pred_flag ) && + ( num_ref_idx_active_override_flag == rhs.num_ref_idx_active_override_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH264SliceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t direct_spatial_mv_pred_flag : 1; + uint32_t num_ref_idx_active_override_flag : 1; + uint32_t reserved : 30; + }; + + struct EncodeH264PictureInfoFlags + { + using NativeType = StdVideoEncodeH264PictureInfoFlags; + + operator StdVideoEncodeH264PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( IdrPicFlag == rhs.IdrPicFlag ) && ( is_reference == rhs.is_reference ) && + ( no_output_of_prior_pics_flag == rhs.no_output_of_prior_pics_flag ) && ( long_term_reference_flag == rhs.long_term_reference_flag ) && + ( adaptive_ref_pic_marking_mode_flag == rhs.adaptive_ref_pic_marking_mode_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t IdrPicFlag : 1; + uint32_t is_reference : 1; + uint32_t no_output_of_prior_pics_flag : 1; + uint32_t long_term_reference_flag : 1; + uint32_t adaptive_ref_pic_marking_mode_flag : 1; + uint32_t reserved : 27; + }; + + struct EncodeH264ReferenceInfoFlags + { + using NativeType = StdVideoEncodeH264ReferenceInfoFlags; + + operator StdVideoEncodeH264ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t used_for_long_term_reference : 1; + uint32_t reserved : 31; + }; + + struct EncodeH264ReferenceListsInfoFlags + { + using NativeType = StdVideoEncodeH264ReferenceListsInfoFlags; + + operator StdVideoEncodeH264ReferenceListsInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceListsInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( ref_pic_list_modification_flag_l0 == rhs.ref_pic_list_modification_flag_l0 ) && + ( ref_pic_list_modification_flag_l1 == rhs.ref_pic_list_modification_flag_l1 ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH264ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t ref_pic_list_modification_flag_l0 : 1; + uint32_t ref_pic_list_modification_flag_l1 : 1; + uint32_t reserved : 30; + }; + + struct EncodeH264RefListModEntry + { + using NativeType = StdVideoEncodeH264RefListModEntry; + + operator StdVideoEncodeH264RefListModEntry const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264RefListModEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264RefListModEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( modification_of_pic_nums_idc == rhs.modification_of_pic_nums_idc ) && ( abs_diff_pic_num_minus1 == rhs.abs_diff_pic_num_minus1 ) && + ( long_term_pic_num == rhs.long_term_pic_num ); + } + + bool operator!=( EncodeH264RefListModEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ModificationOfPicNumsIdc modification_of_pic_nums_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ModificationOfPicNumsIdc::eShortTermSubtract; + uint16_t abs_diff_pic_num_minus1 = {}; + uint16_t long_term_pic_num = {}; + }; + + struct EncodeH264RefPicMarkingEntry + { + using NativeType = StdVideoEncodeH264RefPicMarkingEntry; + + operator StdVideoEncodeH264RefPicMarkingEntry const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264RefPicMarkingEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264RefPicMarkingEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( memory_management_control_operation == rhs.memory_management_control_operation ) && + ( difference_of_pic_nums_minus1 == rhs.difference_of_pic_nums_minus1 ) && ( long_term_pic_num == rhs.long_term_pic_num ) && + ( long_term_frame_idx == rhs.long_term_frame_idx ) && ( max_long_term_frame_idx_plus1 == rhs.max_long_term_frame_idx_plus1 ); + } + + bool operator!=( EncodeH264RefPicMarkingEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264MemMgmtControlOp memory_management_control_operation = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264MemMgmtControlOp::eEnd; + uint16_t difference_of_pic_nums_minus1 = {}; + uint16_t long_term_pic_num = {}; + uint16_t long_term_frame_idx = {}; + uint16_t max_long_term_frame_idx_plus1 = {}; + }; + + struct EncodeH264ReferenceListsInfo + { + using NativeType = StdVideoEncodeH264ReferenceListsInfo; + + operator StdVideoEncodeH264ReferenceListsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceListsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( num_ref_idx_l0_active_minus1 == rhs.num_ref_idx_l0_active_minus1 ) && + ( num_ref_idx_l1_active_minus1 == rhs.num_ref_idx_l1_active_minus1 ) && ( RefPicList0 == rhs.RefPicList0 ) && + ( RefPicList1 == rhs.RefPicList1 ) && ( refList0ModOpCount == rhs.refList0ModOpCount ) && ( refList1ModOpCount == rhs.refList1ModOpCount ) && + ( refPicMarkingOpCount == rhs.refPicMarkingOpCount ) && ( reserved1 == rhs.reserved1 ) && + ( pRefList0ModOperations == rhs.pRefList0ModOperations ) && ( pRefList1ModOperations == rhs.pRefList1ModOperations ) && + ( pRefPicMarkingOperations == rhs.pRefPicMarkingOperations ); + } + + bool operator!=( EncodeH264ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceListsInfoFlags flags = {}; + uint8_t num_ref_idx_l0_active_minus1 = {}; + uint8_t num_ref_idx_l1_active_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicList0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicList1 = {}; + uint8_t refList0ModOpCount = {}; + uint8_t refList1ModOpCount = {}; + uint8_t refPicMarkingOpCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefListModEntry * pRefList0ModOperations = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefListModEntry * pRefList1ModOperations = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefPicMarkingEntry * pRefPicMarkingOperations = {}; + }; + + struct EncodeH264PictureInfo + { + using NativeType = StdVideoEncodeH264PictureInfo; + + operator StdVideoEncodeH264PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && + ( idr_pic_id == rhs.idr_pic_id ) && ( primary_pic_type == rhs.primary_pic_type ) && ( frame_num == rhs.frame_num ) && + ( PicOrderCnt == rhs.PicOrderCnt ) && ( temporal_id == rhs.temporal_id ) && ( reserved1 == rhs.reserved1 ) && ( pRefLists == rhs.pRefLists ); + } + + bool operator!=( EncodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264PictureInfoFlags flags = {}; + uint8_t seq_parameter_set_id = {}; + uint8_t pic_parameter_set_id = {}; + uint16_t idr_pic_id = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType primary_pic_type = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType::eP; + uint32_t frame_num = {}; + int32_t PicOrderCnt = {}; + uint8_t temporal_id = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceListsInfo * pRefLists = {}; + }; + + struct EncodeH264ReferenceInfo + { + using NativeType = StdVideoEncodeH264ReferenceInfo; + + operator StdVideoEncodeH264ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( primary_pic_type == rhs.primary_pic_type ) && ( FrameNum == rhs.FrameNum ) && ( PicOrderCnt == rhs.PicOrderCnt ) && + ( long_term_pic_num == rhs.long_term_pic_num ) && ( long_term_frame_idx == rhs.long_term_frame_idx ) && ( temporal_id == rhs.temporal_id ); + } + + bool operator!=( EncodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceInfoFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType primary_pic_type = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType::eP; + uint32_t FrameNum = {}; + int32_t PicOrderCnt = {}; + uint16_t long_term_pic_num = {}; + uint16_t long_term_frame_idx = {}; + uint8_t temporal_id = {}; + }; + + struct EncodeH264SliceHeader + { + using NativeType = StdVideoEncodeH264SliceHeader; + + operator StdVideoEncodeH264SliceHeader const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264SliceHeader &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264SliceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( first_mb_in_slice == rhs.first_mb_in_slice ) && ( slice_type == rhs.slice_type ) && + ( slice_alpha_c0_offset_div2 == rhs.slice_alpha_c0_offset_div2 ) && ( slice_beta_offset_div2 == rhs.slice_beta_offset_div2 ) && + ( slice_qp_delta == rhs.slice_qp_delta ) && ( reserved1 == rhs.reserved1 ) && ( cabac_init_idc == rhs.cabac_init_idc ) && + ( disable_deblocking_filter_idc == rhs.disable_deblocking_filter_idc ) && ( pWeightTable == rhs.pWeightTable ); + } + + bool operator!=( EncodeH264SliceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264SliceHeaderFlags flags = {}; + uint32_t first_mb_in_slice = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SliceType slice_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SliceType::eP; + int8_t slice_alpha_c0_offset_div2 = {}; + int8_t slice_beta_offset_div2 = {}; + int8_t slice_qp_delta = {}; + uint8_t reserved1 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264CabacInitIdc cabac_init_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264CabacInitIdc::e0; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264DisableDeblockingFilterIdc disable_deblocking_filter_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264DisableDeblockingFilterIdc::eDisabled; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264WeightTable * pWeightTable = {}; + }; + + //=== vulkan_video_codec_h265std === + + struct H265DecPicBufMgr + { + using NativeType = StdVideoH265DecPicBufMgr; + + operator StdVideoH265DecPicBufMgr const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265DecPicBufMgr &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265DecPicBufMgr const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( max_latency_increase_plus1 == rhs.max_latency_increase_plus1 ) && ( max_dec_pic_buffering_minus1 == rhs.max_dec_pic_buffering_minus1 ) && + ( max_num_reorder_pics == rhs.max_num_reorder_pics ); + } + + bool operator!=( H265DecPicBufMgr const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D max_latency_increase_plus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D max_dec_pic_buffering_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D max_num_reorder_pics = {}; + }; + + struct H265SubLayerHrdParameters + { + using NativeType = StdVideoH265SubLayerHrdParameters; + + operator StdVideoH265SubLayerHrdParameters const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SubLayerHrdParameters &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265SubLayerHrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( bit_rate_value_minus1 == rhs.bit_rate_value_minus1 ) && ( cpb_size_value_minus1 == rhs.cpb_size_value_minus1 ) && + ( cpb_size_du_value_minus1 == rhs.cpb_size_du_value_minus1 ) && ( bit_rate_du_value_minus1 == rhs.bit_rate_du_value_minus1 ) && + ( cbr_flag == rhs.cbr_flag ); + } + + bool operator!=( H265SubLayerHrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D bit_rate_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cpb_size_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cpb_size_du_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D bit_rate_du_value_minus1 = {}; + uint32_t cbr_flag = {}; + }; + + struct H265HrdFlags + { + using NativeType = StdVideoH265HrdFlags; + + operator StdVideoH265HrdFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265HrdFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265HrdFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( nal_hrd_parameters_present_flag == rhs.nal_hrd_parameters_present_flag ) && + ( vcl_hrd_parameters_present_flag == rhs.vcl_hrd_parameters_present_flag ) && + ( sub_pic_hrd_params_present_flag == rhs.sub_pic_hrd_params_present_flag ) && + ( sub_pic_cpb_params_in_pic_timing_sei_flag == rhs.sub_pic_cpb_params_in_pic_timing_sei_flag ) && + ( fixed_pic_rate_general_flag == rhs.fixed_pic_rate_general_flag ) && ( fixed_pic_rate_within_cvs_flag == rhs.fixed_pic_rate_within_cvs_flag ) && + ( low_delay_hrd_flag == rhs.low_delay_hrd_flag ); + } + + bool operator!=( H265HrdFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t nal_hrd_parameters_present_flag : 1; + uint32_t vcl_hrd_parameters_present_flag : 1; + uint32_t sub_pic_hrd_params_present_flag : 1; + uint32_t sub_pic_cpb_params_in_pic_timing_sei_flag : 1; + uint32_t fixed_pic_rate_general_flag : 8; + uint32_t fixed_pic_rate_within_cvs_flag : 8; + uint32_t low_delay_hrd_flag : 8; + }; + + struct H265HrdParameters + { + using NativeType = StdVideoH265HrdParameters; + + operator StdVideoH265HrdParameters const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265HrdParameters &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( tick_divisor_minus2 == rhs.tick_divisor_minus2 ) && + ( du_cpb_removal_delay_increment_length_minus1 == rhs.du_cpb_removal_delay_increment_length_minus1 ) && + ( dpb_output_delay_du_length_minus1 == rhs.dpb_output_delay_du_length_minus1 ) && ( bit_rate_scale == rhs.bit_rate_scale ) && + ( cpb_size_scale == rhs.cpb_size_scale ) && ( cpb_size_du_scale == rhs.cpb_size_du_scale ) && + ( initial_cpb_removal_delay_length_minus1 == rhs.initial_cpb_removal_delay_length_minus1 ) && + ( au_cpb_removal_delay_length_minus1 == rhs.au_cpb_removal_delay_length_minus1 ) && + ( dpb_output_delay_length_minus1 == rhs.dpb_output_delay_length_minus1 ) && ( cpb_cnt_minus1 == rhs.cpb_cnt_minus1 ) && + ( elemental_duration_in_tc_minus1 == rhs.elemental_duration_in_tc_minus1 ) && ( reserved == rhs.reserved ) && + ( pSubLayerHrdParametersNal == rhs.pSubLayerHrdParametersNal ) && ( pSubLayerHrdParametersVcl == rhs.pSubLayerHrdParametersVcl ); + } + + bool operator!=( H265HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdFlags flags = {}; + uint8_t tick_divisor_minus2 = {}; + uint8_t du_cpb_removal_delay_increment_length_minus1 = {}; + uint8_t dpb_output_delay_du_length_minus1 = {}; + uint8_t bit_rate_scale = {}; + uint8_t cpb_size_scale = {}; + uint8_t cpb_size_du_scale = {}; + uint8_t initial_cpb_removal_delay_length_minus1 = {}; + uint8_t au_cpb_removal_delay_length_minus1 = {}; + uint8_t dpb_output_delay_length_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cpb_cnt_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D elemental_duration_in_tc_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SubLayerHrdParameters * pSubLayerHrdParametersNal = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SubLayerHrdParameters * pSubLayerHrdParametersVcl = {}; + }; + + struct H265VpsFlags + { + using NativeType = StdVideoH265VpsFlags; + + operator StdVideoH265VpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265VpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265VpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( vps_temporal_id_nesting_flag == rhs.vps_temporal_id_nesting_flag ) && + ( vps_sub_layer_ordering_info_present_flag == rhs.vps_sub_layer_ordering_info_present_flag ) && + ( vps_timing_info_present_flag == rhs.vps_timing_info_present_flag ) && + ( vps_poc_proportional_to_timing_flag == rhs.vps_poc_proportional_to_timing_flag ); + } + + bool operator!=( H265VpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t vps_temporal_id_nesting_flag : 1; + uint32_t vps_sub_layer_ordering_info_present_flag : 1; + uint32_t vps_timing_info_present_flag : 1; + uint32_t vps_poc_proportional_to_timing_flag : 1; + }; + + struct H265ProfileTierLevelFlags + { + using NativeType = StdVideoH265ProfileTierLevelFlags; + + operator StdVideoH265ProfileTierLevelFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ProfileTierLevelFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265ProfileTierLevelFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( general_tier_flag == rhs.general_tier_flag ) && ( general_progressive_source_flag == rhs.general_progressive_source_flag ) && + ( general_interlaced_source_flag == rhs.general_interlaced_source_flag ) && + ( general_non_packed_constraint_flag == rhs.general_non_packed_constraint_flag ) && + ( general_frame_only_constraint_flag == rhs.general_frame_only_constraint_flag ); + } + + bool operator!=( H265ProfileTierLevelFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t general_tier_flag : 1; + uint32_t general_progressive_source_flag : 1; + uint32_t general_interlaced_source_flag : 1; + uint32_t general_non_packed_constraint_flag : 1; + uint32_t general_frame_only_constraint_flag : 1; + }; + + struct H265ProfileTierLevel + { + using NativeType = StdVideoH265ProfileTierLevel; + + operator StdVideoH265ProfileTierLevel const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ProfileTierLevel &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265ProfileTierLevel const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( general_profile_idc == rhs.general_profile_idc ) && ( general_level_idc == rhs.general_level_idc ); + } + + bool operator!=( H265ProfileTierLevel const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevelFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileIdc general_profile_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileIdc::eMain; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LevelIdc general_level_idc = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LevelIdc::e1_0; + }; + + struct H265VideoParameterSet + { + using NativeType = StdVideoH265VideoParameterSet; + + operator StdVideoH265VideoParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265VideoParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265VideoParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( vps_video_parameter_set_id == rhs.vps_video_parameter_set_id ) && + ( vps_max_sub_layers_minus1 == rhs.vps_max_sub_layers_minus1 ) && ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && + ( vps_num_units_in_tick == rhs.vps_num_units_in_tick ) && ( vps_time_scale == rhs.vps_time_scale ) && + ( vps_num_ticks_poc_diff_one_minus1 == rhs.vps_num_ticks_poc_diff_one_minus1 ) && ( reserved3 == rhs.reserved3 ) && + ( pDecPicBufMgr == rhs.pDecPicBufMgr ) && ( pHrdParameters == rhs.pHrdParameters ) && ( pProfileTierLevel == rhs.pProfileTierLevel ); + } + + bool operator!=( H265VideoParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265VpsFlags flags = {}; + uint8_t vps_video_parameter_set_id = {}; + uint8_t vps_max_sub_layers_minus1 = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + uint32_t vps_num_units_in_tick = {}; + uint32_t vps_time_scale = {}; + uint32_t vps_num_ticks_poc_diff_one_minus1 = {}; + uint32_t reserved3 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265DecPicBufMgr * pDecPicBufMgr = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdParameters * pHrdParameters = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevel * pProfileTierLevel = {}; + }; + + struct H265ScalingLists + { + using NativeType = StdVideoH265ScalingLists; + + operator StdVideoH265ScalingLists const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ScalingLists &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( ScalingList4x4 == rhs.ScalingList4x4 ) && ( ScalingList8x8 == rhs.ScalingList8x8 ) && ( ScalingList16x16 == rhs.ScalingList16x16 ) && + ( ScalingList32x32 == rhs.ScalingList32x32 ) && ( ScalingListDCCoef16x16 == rhs.ScalingListDCCoef16x16 ) && + ( ScalingListDCCoef32x32 == rhs.ScalingListDCCoef32x32 ); + } + + bool operator!=( H265ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList4x4 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList8x8 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList16x16 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList32x32 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ScalingListDCCoef16x16 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ScalingListDCCoef32x32 = {}; + }; + + struct H265SpsVuiFlags + { + using NativeType = StdVideoH265SpsVuiFlags; + + operator StdVideoH265SpsVuiFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SpsVuiFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspect_ratio_info_present_flag == rhs.aspect_ratio_info_present_flag ) && ( overscan_info_present_flag == rhs.overscan_info_present_flag ) && + ( overscan_appropriate_flag == rhs.overscan_appropriate_flag ) && ( video_signal_type_present_flag == rhs.video_signal_type_present_flag ) && + ( video_full_range_flag == rhs.video_full_range_flag ) && ( colour_description_present_flag == rhs.colour_description_present_flag ) && + ( chroma_loc_info_present_flag == rhs.chroma_loc_info_present_flag ) && + ( neutral_chroma_indication_flag == rhs.neutral_chroma_indication_flag ) && ( field_seq_flag == rhs.field_seq_flag ) && + ( frame_field_info_present_flag == rhs.frame_field_info_present_flag ) && ( default_display_window_flag == rhs.default_display_window_flag ) && + ( vui_timing_info_present_flag == rhs.vui_timing_info_present_flag ) && + ( vui_poc_proportional_to_timing_flag == rhs.vui_poc_proportional_to_timing_flag ) && + ( vui_hrd_parameters_present_flag == rhs.vui_hrd_parameters_present_flag ) && ( bitstream_restriction_flag == rhs.bitstream_restriction_flag ) && + ( tiles_fixed_structure_flag == rhs.tiles_fixed_structure_flag ) && + ( motion_vectors_over_pic_boundaries_flag == rhs.motion_vectors_over_pic_boundaries_flag ) && + ( restricted_ref_pic_lists_flag == rhs.restricted_ref_pic_lists_flag ); + } + + bool operator!=( H265SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t aspect_ratio_info_present_flag : 1; + uint32_t overscan_info_present_flag : 1; + uint32_t overscan_appropriate_flag : 1; + uint32_t video_signal_type_present_flag : 1; + uint32_t video_full_range_flag : 1; + uint32_t colour_description_present_flag : 1; + uint32_t chroma_loc_info_present_flag : 1; + uint32_t neutral_chroma_indication_flag : 1; + uint32_t field_seq_flag : 1; + uint32_t frame_field_info_present_flag : 1; + uint32_t default_display_window_flag : 1; + uint32_t vui_timing_info_present_flag : 1; + uint32_t vui_poc_proportional_to_timing_flag : 1; + uint32_t vui_hrd_parameters_present_flag : 1; + uint32_t bitstream_restriction_flag : 1; + uint32_t tiles_fixed_structure_flag : 1; + uint32_t motion_vectors_over_pic_boundaries_flag : 1; + uint32_t restricted_ref_pic_lists_flag : 1; + }; + + struct H265SequenceParameterSetVui + { + using NativeType = StdVideoH265SequenceParameterSetVui; + + operator StdVideoH265SequenceParameterSetVui const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SequenceParameterSetVui &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( aspect_ratio_idc == rhs.aspect_ratio_idc ) && ( sar_width == rhs.sar_width ) && ( sar_height == rhs.sar_height ) && + ( video_format == rhs.video_format ) && ( colour_primaries == rhs.colour_primaries ) && + ( transfer_characteristics == rhs.transfer_characteristics ) && ( matrix_coeffs == rhs.matrix_coeffs ) && + ( chroma_sample_loc_type_top_field == rhs.chroma_sample_loc_type_top_field ) && + ( chroma_sample_loc_type_bottom_field == rhs.chroma_sample_loc_type_bottom_field ) && ( reserved1 == rhs.reserved1 ) && + ( reserved2 == rhs.reserved2 ) && ( def_disp_win_left_offset == rhs.def_disp_win_left_offset ) && + ( def_disp_win_right_offset == rhs.def_disp_win_right_offset ) && ( def_disp_win_top_offset == rhs.def_disp_win_top_offset ) && + ( def_disp_win_bottom_offset == rhs.def_disp_win_bottom_offset ) && ( vui_num_units_in_tick == rhs.vui_num_units_in_tick ) && + ( vui_time_scale == rhs.vui_time_scale ) && ( vui_num_ticks_poc_diff_one_minus1 == rhs.vui_num_ticks_poc_diff_one_minus1 ) && + ( min_spatial_segmentation_idc == rhs.min_spatial_segmentation_idc ) && ( reserved3 == rhs.reserved3 ) && + ( max_bytes_per_pic_denom == rhs.max_bytes_per_pic_denom ) && ( max_bits_per_min_cu_denom == rhs.max_bits_per_min_cu_denom ) && + ( log2_max_mv_length_horizontal == rhs.log2_max_mv_length_horizontal ) && ( log2_max_mv_length_vertical == rhs.log2_max_mv_length_vertical ) && + ( pHrdParameters == rhs.pHrdParameters ); + } + + bool operator!=( H265SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SpsVuiFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265AspectRatioIdc aspect_ratio_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265AspectRatioIdc::eUnspecified; + uint16_t sar_width = {}; + uint16_t sar_height = {}; + uint8_t video_format = {}; + uint8_t colour_primaries = {}; + uint8_t transfer_characteristics = {}; + uint8_t matrix_coeffs = {}; + uint8_t chroma_sample_loc_type_top_field = {}; + uint8_t chroma_sample_loc_type_bottom_field = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + uint16_t def_disp_win_left_offset = {}; + uint16_t def_disp_win_right_offset = {}; + uint16_t def_disp_win_top_offset = {}; + uint16_t def_disp_win_bottom_offset = {}; + uint32_t vui_num_units_in_tick = {}; + uint32_t vui_time_scale = {}; + uint32_t vui_num_ticks_poc_diff_one_minus1 = {}; + uint16_t min_spatial_segmentation_idc = {}; + uint16_t reserved3 = {}; + uint8_t max_bytes_per_pic_denom = {}; + uint8_t max_bits_per_min_cu_denom = {}; + uint8_t log2_max_mv_length_horizontal = {}; + uint8_t log2_max_mv_length_vertical = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdParameters * pHrdParameters = {}; + }; + + struct H265PredictorPaletteEntries + { + using NativeType = StdVideoH265PredictorPaletteEntries; + + operator StdVideoH265PredictorPaletteEntries const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265PredictorPaletteEntries &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265PredictorPaletteEntries const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( PredictorPaletteEntries == rhs.PredictorPaletteEntries ); + } + + bool operator!=( H265PredictorPaletteEntries const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE:: + ArrayWrapper2D + PredictorPaletteEntries = {}; + }; + + struct H265SpsFlags + { + using NativeType = StdVideoH265SpsFlags; + + operator StdVideoH265SpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sps_temporal_id_nesting_flag == rhs.sps_temporal_id_nesting_flag ) && ( separate_colour_plane_flag == rhs.separate_colour_plane_flag ) && + ( conformance_window_flag == rhs.conformance_window_flag ) && + ( sps_sub_layer_ordering_info_present_flag == rhs.sps_sub_layer_ordering_info_present_flag ) && + ( scaling_list_enabled_flag == rhs.scaling_list_enabled_flag ) && + ( sps_scaling_list_data_present_flag == rhs.sps_scaling_list_data_present_flag ) && ( amp_enabled_flag == rhs.amp_enabled_flag ) && + ( sample_adaptive_offset_enabled_flag == rhs.sample_adaptive_offset_enabled_flag ) && ( pcm_enabled_flag == rhs.pcm_enabled_flag ) && + ( pcm_loop_filter_disabled_flag == rhs.pcm_loop_filter_disabled_flag ) && + ( long_term_ref_pics_present_flag == rhs.long_term_ref_pics_present_flag ) && + ( sps_temporal_mvp_enabled_flag == rhs.sps_temporal_mvp_enabled_flag ) && + ( strong_intra_smoothing_enabled_flag == rhs.strong_intra_smoothing_enabled_flag ) && + ( vui_parameters_present_flag == rhs.vui_parameters_present_flag ) && ( sps_extension_present_flag == rhs.sps_extension_present_flag ) && + ( sps_range_extension_flag == rhs.sps_range_extension_flag ) && + ( transform_skip_rotation_enabled_flag == rhs.transform_skip_rotation_enabled_flag ) && + ( transform_skip_context_enabled_flag == rhs.transform_skip_context_enabled_flag ) && + ( implicit_rdpcm_enabled_flag == rhs.implicit_rdpcm_enabled_flag ) && ( explicit_rdpcm_enabled_flag == rhs.explicit_rdpcm_enabled_flag ) && + ( extended_precision_processing_flag == rhs.extended_precision_processing_flag ) && + ( intra_smoothing_disabled_flag == rhs.intra_smoothing_disabled_flag ) && + ( high_precision_offsets_enabled_flag == rhs.high_precision_offsets_enabled_flag ) && + ( persistent_rice_adaptation_enabled_flag == rhs.persistent_rice_adaptation_enabled_flag ) && + ( cabac_bypass_alignment_enabled_flag == rhs.cabac_bypass_alignment_enabled_flag ) && ( sps_scc_extension_flag == rhs.sps_scc_extension_flag ) && + ( sps_curr_pic_ref_enabled_flag == rhs.sps_curr_pic_ref_enabled_flag ) && ( palette_mode_enabled_flag == rhs.palette_mode_enabled_flag ) && + ( sps_palette_predictor_initializers_present_flag == rhs.sps_palette_predictor_initializers_present_flag ) && + ( intra_boundary_filtering_disabled_flag == rhs.intra_boundary_filtering_disabled_flag ); + } + + bool operator!=( H265SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t sps_temporal_id_nesting_flag : 1; + uint32_t separate_colour_plane_flag : 1; + uint32_t conformance_window_flag : 1; + uint32_t sps_sub_layer_ordering_info_present_flag : 1; + uint32_t scaling_list_enabled_flag : 1; + uint32_t sps_scaling_list_data_present_flag : 1; + uint32_t amp_enabled_flag : 1; + uint32_t sample_adaptive_offset_enabled_flag : 1; + uint32_t pcm_enabled_flag : 1; + uint32_t pcm_loop_filter_disabled_flag : 1; + uint32_t long_term_ref_pics_present_flag : 1; + uint32_t sps_temporal_mvp_enabled_flag : 1; + uint32_t strong_intra_smoothing_enabled_flag : 1; + uint32_t vui_parameters_present_flag : 1; + uint32_t sps_extension_present_flag : 1; + uint32_t sps_range_extension_flag : 1; + uint32_t transform_skip_rotation_enabled_flag : 1; + uint32_t transform_skip_context_enabled_flag : 1; + uint32_t implicit_rdpcm_enabled_flag : 1; + uint32_t explicit_rdpcm_enabled_flag : 1; + uint32_t extended_precision_processing_flag : 1; + uint32_t intra_smoothing_disabled_flag : 1; + uint32_t high_precision_offsets_enabled_flag : 1; + uint32_t persistent_rice_adaptation_enabled_flag : 1; + uint32_t cabac_bypass_alignment_enabled_flag : 1; + uint32_t sps_scc_extension_flag : 1; + uint32_t sps_curr_pic_ref_enabled_flag : 1; + uint32_t palette_mode_enabled_flag : 1; + uint32_t sps_palette_predictor_initializers_present_flag : 1; + uint32_t intra_boundary_filtering_disabled_flag : 1; + }; + + struct H265ShortTermRefPicSetFlags + { + using NativeType = StdVideoH265ShortTermRefPicSetFlags; + + operator StdVideoH265ShortTermRefPicSetFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ShortTermRefPicSetFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265ShortTermRefPicSetFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( inter_ref_pic_set_prediction_flag == rhs.inter_ref_pic_set_prediction_flag ) && ( delta_rps_sign == rhs.delta_rps_sign ); + } + + bool operator!=( H265ShortTermRefPicSetFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t inter_ref_pic_set_prediction_flag : 1; + uint32_t delta_rps_sign : 1; + }; + + struct H265ShortTermRefPicSet + { + using NativeType = StdVideoH265ShortTermRefPicSet; + + operator StdVideoH265ShortTermRefPicSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ShortTermRefPicSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265ShortTermRefPicSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( delta_idx_minus1 == rhs.delta_idx_minus1 ) && ( use_delta_flag == rhs.use_delta_flag ) && + ( abs_delta_rps_minus1 == rhs.abs_delta_rps_minus1 ) && ( used_by_curr_pic_flag == rhs.used_by_curr_pic_flag ) && + ( used_by_curr_pic_s0_flag == rhs.used_by_curr_pic_s0_flag ) && ( used_by_curr_pic_s1_flag == rhs.used_by_curr_pic_s1_flag ) && + ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( reserved3 == rhs.reserved3 ) && + ( num_negative_pics == rhs.num_negative_pics ) && ( num_positive_pics == rhs.num_positive_pics ) && + ( delta_poc_s0_minus1 == rhs.delta_poc_s0_minus1 ) && ( delta_poc_s1_minus1 == rhs.delta_poc_s1_minus1 ); + } + + bool operator!=( H265ShortTermRefPicSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSetFlags flags = {}; + uint32_t delta_idx_minus1 = {}; + uint16_t use_delta_flag = {}; + uint16_t abs_delta_rps_minus1 = {}; + uint16_t used_by_curr_pic_flag = {}; + uint16_t used_by_curr_pic_s0_flag = {}; + uint16_t used_by_curr_pic_s1_flag = {}; + uint16_t reserved1 = {}; + uint8_t reserved2 = {}; + uint8_t reserved3 = {}; + uint8_t num_negative_pics = {}; + uint8_t num_positive_pics = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_poc_s0_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_poc_s1_minus1 = {}; + }; + + struct H265LongTermRefPicsSps + { + using NativeType = StdVideoH265LongTermRefPicsSps; + + operator StdVideoH265LongTermRefPicsSps const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265LongTermRefPicsSps &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265LongTermRefPicsSps const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( used_by_curr_pic_lt_sps_flag == rhs.used_by_curr_pic_lt_sps_flag ) && ( lt_ref_pic_poc_lsb_sps == rhs.lt_ref_pic_poc_lsb_sps ); + } + + bool operator!=( H265LongTermRefPicsSps const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t used_by_curr_pic_lt_sps_flag = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D lt_ref_pic_poc_lsb_sps = {}; + }; + + struct H265SequenceParameterSet + { + using NativeType = StdVideoH265SequenceParameterSet; + + operator StdVideoH265SequenceParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SequenceParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( chroma_format_idc == rhs.chroma_format_idc ) && ( pic_width_in_luma_samples == rhs.pic_width_in_luma_samples ) && + ( pic_height_in_luma_samples == rhs.pic_height_in_luma_samples ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( sps_max_sub_layers_minus1 == rhs.sps_max_sub_layers_minus1 ) && ( sps_seq_parameter_set_id == rhs.sps_seq_parameter_set_id ) && + ( bit_depth_luma_minus8 == rhs.bit_depth_luma_minus8 ) && ( bit_depth_chroma_minus8 == rhs.bit_depth_chroma_minus8 ) && + ( log2_max_pic_order_cnt_lsb_minus4 == rhs.log2_max_pic_order_cnt_lsb_minus4 ) && + ( log2_min_luma_coding_block_size_minus3 == rhs.log2_min_luma_coding_block_size_minus3 ) && + ( log2_diff_max_min_luma_coding_block_size == rhs.log2_diff_max_min_luma_coding_block_size ) && + ( log2_min_luma_transform_block_size_minus2 == rhs.log2_min_luma_transform_block_size_minus2 ) && + ( log2_diff_max_min_luma_transform_block_size == rhs.log2_diff_max_min_luma_transform_block_size ) && + ( max_transform_hierarchy_depth_inter == rhs.max_transform_hierarchy_depth_inter ) && + ( max_transform_hierarchy_depth_intra == rhs.max_transform_hierarchy_depth_intra ) && + ( num_short_term_ref_pic_sets == rhs.num_short_term_ref_pic_sets ) && ( num_long_term_ref_pics_sps == rhs.num_long_term_ref_pics_sps ) && + ( pcm_sample_bit_depth_luma_minus1 == rhs.pcm_sample_bit_depth_luma_minus1 ) && + ( pcm_sample_bit_depth_chroma_minus1 == rhs.pcm_sample_bit_depth_chroma_minus1 ) && + ( log2_min_pcm_luma_coding_block_size_minus3 == rhs.log2_min_pcm_luma_coding_block_size_minus3 ) && + ( log2_diff_max_min_pcm_luma_coding_block_size == rhs.log2_diff_max_min_pcm_luma_coding_block_size ) && ( reserved1 == rhs.reserved1 ) && + ( reserved2 == rhs.reserved2 ) && ( palette_max_size == rhs.palette_max_size ) && + ( delta_palette_max_predictor_size == rhs.delta_palette_max_predictor_size ) && + ( motion_vector_resolution_control_idc == rhs.motion_vector_resolution_control_idc ) && + ( sps_num_palette_predictor_initializers_minus1 == rhs.sps_num_palette_predictor_initializers_minus1 ) && + ( conf_win_left_offset == rhs.conf_win_left_offset ) && ( conf_win_right_offset == rhs.conf_win_right_offset ) && + ( conf_win_top_offset == rhs.conf_win_top_offset ) && ( conf_win_bottom_offset == rhs.conf_win_bottom_offset ) && + ( pProfileTierLevel == rhs.pProfileTierLevel ) && ( pDecPicBufMgr == rhs.pDecPicBufMgr ) && ( pScalingLists == rhs.pScalingLists ) && + ( pShortTermRefPicSet == rhs.pShortTermRefPicSet ) && ( pLongTermRefPicsSps == rhs.pLongTermRefPicsSps ) && + ( pSequenceParameterSetVui == rhs.pSequenceParameterSetVui ) && ( pPredictorPaletteEntries == rhs.pPredictorPaletteEntries ); + } + + bool operator!=( H265SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SpsFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ChromaFormatIdc chroma_format_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ChromaFormatIdc::eMonochrome; + uint32_t pic_width_in_luma_samples = {}; + uint32_t pic_height_in_luma_samples = {}; + uint8_t sps_video_parameter_set_id = {}; + uint8_t sps_max_sub_layers_minus1 = {}; + uint8_t sps_seq_parameter_set_id = {}; + uint8_t bit_depth_luma_minus8 = {}; + uint8_t bit_depth_chroma_minus8 = {}; + uint8_t log2_max_pic_order_cnt_lsb_minus4 = {}; + uint8_t log2_min_luma_coding_block_size_minus3 = {}; + uint8_t log2_diff_max_min_luma_coding_block_size = {}; + uint8_t log2_min_luma_transform_block_size_minus2 = {}; + uint8_t log2_diff_max_min_luma_transform_block_size = {}; + uint8_t max_transform_hierarchy_depth_inter = {}; + uint8_t max_transform_hierarchy_depth_intra = {}; + uint8_t num_short_term_ref_pic_sets = {}; + uint8_t num_long_term_ref_pics_sps = {}; + uint8_t pcm_sample_bit_depth_luma_minus1 = {}; + uint8_t pcm_sample_bit_depth_chroma_minus1 = {}; + uint8_t log2_min_pcm_luma_coding_block_size_minus3 = {}; + uint8_t log2_diff_max_min_pcm_luma_coding_block_size = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + uint8_t palette_max_size = {}; + uint8_t delta_palette_max_predictor_size = {}; + uint8_t motion_vector_resolution_control_idc = {}; + uint8_t sps_num_palette_predictor_initializers_minus1 = {}; + uint32_t conf_win_left_offset = {}; + uint32_t conf_win_right_offset = {}; + uint32_t conf_win_top_offset = {}; + uint32_t conf_win_bottom_offset = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevel * pProfileTierLevel = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265DecPicBufMgr * pDecPicBufMgr = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ScalingLists * pScalingLists = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSet * pShortTermRefPicSet = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LongTermRefPicsSps * pLongTermRefPicsSps = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SequenceParameterSetVui * pSequenceParameterSetVui = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PredictorPaletteEntries * pPredictorPaletteEntries = {}; + }; + + struct H265PpsFlags + { + using NativeType = StdVideoH265PpsFlags; + + operator StdVideoH265PpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265PpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( dependent_slice_segments_enabled_flag == rhs.dependent_slice_segments_enabled_flag ) && + ( output_flag_present_flag == rhs.output_flag_present_flag ) && ( sign_data_hiding_enabled_flag == rhs.sign_data_hiding_enabled_flag ) && + ( cabac_init_present_flag == rhs.cabac_init_present_flag ) && ( constrained_intra_pred_flag == rhs.constrained_intra_pred_flag ) && + ( transform_skip_enabled_flag == rhs.transform_skip_enabled_flag ) && ( cu_qp_delta_enabled_flag == rhs.cu_qp_delta_enabled_flag ) && + ( pps_slice_chroma_qp_offsets_present_flag == rhs.pps_slice_chroma_qp_offsets_present_flag ) && + ( weighted_pred_flag == rhs.weighted_pred_flag ) && ( weighted_bipred_flag == rhs.weighted_bipred_flag ) && + ( transquant_bypass_enabled_flag == rhs.transquant_bypass_enabled_flag ) && ( tiles_enabled_flag == rhs.tiles_enabled_flag ) && + ( entropy_coding_sync_enabled_flag == rhs.entropy_coding_sync_enabled_flag ) && ( uniform_spacing_flag == rhs.uniform_spacing_flag ) && + ( loop_filter_across_tiles_enabled_flag == rhs.loop_filter_across_tiles_enabled_flag ) && + ( pps_loop_filter_across_slices_enabled_flag == rhs.pps_loop_filter_across_slices_enabled_flag ) && + ( deblocking_filter_control_present_flag == rhs.deblocking_filter_control_present_flag ) && + ( deblocking_filter_override_enabled_flag == rhs.deblocking_filter_override_enabled_flag ) && + ( pps_deblocking_filter_disabled_flag == rhs.pps_deblocking_filter_disabled_flag ) && + ( pps_scaling_list_data_present_flag == rhs.pps_scaling_list_data_present_flag ) && + ( lists_modification_present_flag == rhs.lists_modification_present_flag ) && + ( slice_segment_header_extension_present_flag == rhs.slice_segment_header_extension_present_flag ) && + ( pps_extension_present_flag == rhs.pps_extension_present_flag ) && + ( cross_component_prediction_enabled_flag == rhs.cross_component_prediction_enabled_flag ) && + ( chroma_qp_offset_list_enabled_flag == rhs.chroma_qp_offset_list_enabled_flag ) && + ( pps_curr_pic_ref_enabled_flag == rhs.pps_curr_pic_ref_enabled_flag ) && + ( residual_adaptive_colour_transform_enabled_flag == rhs.residual_adaptive_colour_transform_enabled_flag ) && + ( pps_slice_act_qp_offsets_present_flag == rhs.pps_slice_act_qp_offsets_present_flag ) && + ( pps_palette_predictor_initializers_present_flag == rhs.pps_palette_predictor_initializers_present_flag ) && + ( monochrome_palette_flag == rhs.monochrome_palette_flag ) && ( pps_range_extension_flag == rhs.pps_range_extension_flag ); + } + + bool operator!=( H265PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t dependent_slice_segments_enabled_flag : 1; + uint32_t output_flag_present_flag : 1; + uint32_t sign_data_hiding_enabled_flag : 1; + uint32_t cabac_init_present_flag : 1; + uint32_t constrained_intra_pred_flag : 1; + uint32_t transform_skip_enabled_flag : 1; + uint32_t cu_qp_delta_enabled_flag : 1; + uint32_t pps_slice_chroma_qp_offsets_present_flag : 1; + uint32_t weighted_pred_flag : 1; + uint32_t weighted_bipred_flag : 1; + uint32_t transquant_bypass_enabled_flag : 1; + uint32_t tiles_enabled_flag : 1; + uint32_t entropy_coding_sync_enabled_flag : 1; + uint32_t uniform_spacing_flag : 1; + uint32_t loop_filter_across_tiles_enabled_flag : 1; + uint32_t pps_loop_filter_across_slices_enabled_flag : 1; + uint32_t deblocking_filter_control_present_flag : 1; + uint32_t deblocking_filter_override_enabled_flag : 1; + uint32_t pps_deblocking_filter_disabled_flag : 1; + uint32_t pps_scaling_list_data_present_flag : 1; + uint32_t lists_modification_present_flag : 1; + uint32_t slice_segment_header_extension_present_flag : 1; + uint32_t pps_extension_present_flag : 1; + uint32_t cross_component_prediction_enabled_flag : 1; + uint32_t chroma_qp_offset_list_enabled_flag : 1; + uint32_t pps_curr_pic_ref_enabled_flag : 1; + uint32_t residual_adaptive_colour_transform_enabled_flag : 1; + uint32_t pps_slice_act_qp_offsets_present_flag : 1; + uint32_t pps_palette_predictor_initializers_present_flag : 1; + uint32_t monochrome_palette_flag : 1; + uint32_t pps_range_extension_flag : 1; + }; + + struct H265PictureParameterSet + { + using NativeType = StdVideoH265PictureParameterSet; + + operator StdVideoH265PictureParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265PictureParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && + ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( num_extra_slice_header_bits == rhs.num_extra_slice_header_bits ) && + ( num_ref_idx_l0_default_active_minus1 == rhs.num_ref_idx_l0_default_active_minus1 ) && + ( num_ref_idx_l1_default_active_minus1 == rhs.num_ref_idx_l1_default_active_minus1 ) && ( init_qp_minus26 == rhs.init_qp_minus26 ) && + ( diff_cu_qp_delta_depth == rhs.diff_cu_qp_delta_depth ) && ( pps_cb_qp_offset == rhs.pps_cb_qp_offset ) && + ( pps_cr_qp_offset == rhs.pps_cr_qp_offset ) && ( pps_beta_offset_div2 == rhs.pps_beta_offset_div2 ) && + ( pps_tc_offset_div2 == rhs.pps_tc_offset_div2 ) && ( log2_parallel_merge_level_minus2 == rhs.log2_parallel_merge_level_minus2 ) && + ( log2_max_transform_skip_block_size_minus2 == rhs.log2_max_transform_skip_block_size_minus2 ) && + ( diff_cu_chroma_qp_offset_depth == rhs.diff_cu_chroma_qp_offset_depth ) && + ( chroma_qp_offset_list_len_minus1 == rhs.chroma_qp_offset_list_len_minus1 ) && ( cb_qp_offset_list == rhs.cb_qp_offset_list ) && + ( cr_qp_offset_list == rhs.cr_qp_offset_list ) && ( log2_sao_offset_scale_luma == rhs.log2_sao_offset_scale_luma ) && + ( log2_sao_offset_scale_chroma == rhs.log2_sao_offset_scale_chroma ) && ( pps_act_y_qp_offset_plus5 == rhs.pps_act_y_qp_offset_plus5 ) && + ( pps_act_cb_qp_offset_plus5 == rhs.pps_act_cb_qp_offset_plus5 ) && ( pps_act_cr_qp_offset_plus3 == rhs.pps_act_cr_qp_offset_plus3 ) && + ( pps_num_palette_predictor_initializers == rhs.pps_num_palette_predictor_initializers ) && + ( luma_bit_depth_entry_minus8 == rhs.luma_bit_depth_entry_minus8 ) && ( chroma_bit_depth_entry_minus8 == rhs.chroma_bit_depth_entry_minus8 ) && + ( num_tile_columns_minus1 == rhs.num_tile_columns_minus1 ) && ( num_tile_rows_minus1 == rhs.num_tile_rows_minus1 ) && + ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( column_width_minus1 == rhs.column_width_minus1 ) && + ( row_height_minus1 == rhs.row_height_minus1 ) && ( reserved3 == rhs.reserved3 ) && ( pScalingLists == rhs.pScalingLists ) && + ( pPredictorPaletteEntries == rhs.pPredictorPaletteEntries ); + } + + bool operator!=( H265PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PpsFlags flags = {}; + uint8_t pps_pic_parameter_set_id = {}; + uint8_t pps_seq_parameter_set_id = {}; + uint8_t sps_video_parameter_set_id = {}; + uint8_t num_extra_slice_header_bits = {}; + uint8_t num_ref_idx_l0_default_active_minus1 = {}; + uint8_t num_ref_idx_l1_default_active_minus1 = {}; + int8_t init_qp_minus26 = {}; + uint8_t diff_cu_qp_delta_depth = {}; + int8_t pps_cb_qp_offset = {}; + int8_t pps_cr_qp_offset = {}; + int8_t pps_beta_offset_div2 = {}; + int8_t pps_tc_offset_div2 = {}; + uint8_t log2_parallel_merge_level_minus2 = {}; + uint8_t log2_max_transform_skip_block_size_minus2 = {}; + uint8_t diff_cu_chroma_qp_offset_depth = {}; + uint8_t chroma_qp_offset_list_len_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cb_qp_offset_list = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cr_qp_offset_list = {}; + uint8_t log2_sao_offset_scale_luma = {}; + uint8_t log2_sao_offset_scale_chroma = {}; + int8_t pps_act_y_qp_offset_plus5 = {}; + int8_t pps_act_cb_qp_offset_plus5 = {}; + int8_t pps_act_cr_qp_offset_plus3 = {}; + uint8_t pps_num_palette_predictor_initializers = {}; + uint8_t luma_bit_depth_entry_minus8 = {}; + uint8_t chroma_bit_depth_entry_minus8 = {}; + uint8_t num_tile_columns_minus1 = {}; + uint8_t num_tile_rows_minus1 = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D column_width_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D row_height_minus1 = {}; + uint32_t reserved3 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ScalingLists * pScalingLists = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PredictorPaletteEntries * pPredictorPaletteEntries = {}; + }; + + //=== vulkan_video_codec_h265std_decode === + + struct DecodeH265PictureInfoFlags + { + using NativeType = StdVideoDecodeH265PictureInfoFlags; + + operator StdVideoDecodeH265PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( IrapPicFlag == rhs.IrapPicFlag ) && ( IdrPicFlag == rhs.IdrPicFlag ) && ( IsReference == rhs.IsReference ) && + ( short_term_ref_pic_set_sps_flag == rhs.short_term_ref_pic_set_sps_flag ); + } + + bool operator!=( DecodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t IrapPicFlag : 1; + uint32_t IdrPicFlag : 1; + uint32_t IsReference : 1; + uint32_t short_term_ref_pic_set_sps_flag : 1; + }; + + struct DecodeH265PictureInfo + { + using NativeType = StdVideoDecodeH265PictureInfo; + + operator StdVideoDecodeH265PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && + ( NumDeltaPocsOfRefRpsIdx == rhs.NumDeltaPocsOfRefRpsIdx ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) && + ( NumBitsForSTRefPicSetInSlice == rhs.NumBitsForSTRefPicSetInSlice ) && ( reserved == rhs.reserved ) && + ( RefPicSetStCurrBefore == rhs.RefPicSetStCurrBefore ) && ( RefPicSetStCurrAfter == rhs.RefPicSetStCurrAfter ) && + ( RefPicSetLtCurr == rhs.RefPicSetLtCurr ); + } + + bool operator!=( DecodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH265PictureInfoFlags flags = {}; + uint8_t sps_video_parameter_set_id = {}; + uint8_t pps_seq_parameter_set_id = {}; + uint8_t pps_pic_parameter_set_id = {}; + uint8_t NumDeltaPocsOfRefRpsIdx = {}; + int32_t PicOrderCntVal = {}; + uint16_t NumBitsForSTRefPicSetInSlice = {}; + uint16_t reserved = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicSetStCurrBefore = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicSetStCurrAfter = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicSetLtCurr = {}; + }; + + struct DecodeH265ReferenceInfoFlags + { + using NativeType = StdVideoDecodeH265ReferenceInfoFlags; + + operator StdVideoDecodeH265ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( unused_for_reference == rhs.unused_for_reference ); + } + + bool operator!=( DecodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t used_for_long_term_reference : 1; + uint32_t unused_for_reference : 1; + }; + + struct DecodeH265ReferenceInfo + { + using NativeType = StdVideoDecodeH265ReferenceInfo; + + operator StdVideoDecodeH265ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( PicOrderCntVal == rhs.PicOrderCntVal ); + } + + bool operator!=( DecodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH265ReferenceInfoFlags flags = {}; + int32_t PicOrderCntVal = {}; + }; + + //=== vulkan_video_codec_h265std_encode === + + struct EncodeH265WeightTableFlags + { + using NativeType = StdVideoEncodeH265WeightTableFlags; + + operator StdVideoEncodeH265WeightTableFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265WeightTableFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( luma_weight_l0_flag == rhs.luma_weight_l0_flag ) && ( chroma_weight_l0_flag == rhs.chroma_weight_l0_flag ) && + ( luma_weight_l1_flag == rhs.luma_weight_l1_flag ) && ( chroma_weight_l1_flag == rhs.chroma_weight_l1_flag ); + } + + bool operator!=( EncodeH265WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint16_t luma_weight_l0_flag = {}; + uint16_t chroma_weight_l0_flag = {}; + uint16_t luma_weight_l1_flag = {}; + uint16_t chroma_weight_l1_flag = {}; + }; + + struct EncodeH265WeightTable + { + using NativeType = StdVideoEncodeH265WeightTable; + + operator StdVideoEncodeH265WeightTable const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265WeightTable &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( luma_log2_weight_denom == rhs.luma_log2_weight_denom ) && + ( delta_chroma_log2_weight_denom == rhs.delta_chroma_log2_weight_denom ) && ( delta_luma_weight_l0 == rhs.delta_luma_weight_l0 ) && + ( luma_offset_l0 == rhs.luma_offset_l0 ) && ( delta_chroma_weight_l0 == rhs.delta_chroma_weight_l0 ) && + ( delta_chroma_offset_l0 == rhs.delta_chroma_offset_l0 ) && ( delta_luma_weight_l1 == rhs.delta_luma_weight_l1 ) && + ( luma_offset_l1 == rhs.luma_offset_l1 ) && ( delta_chroma_weight_l1 == rhs.delta_chroma_weight_l1 ) && + ( delta_chroma_offset_l1 == rhs.delta_chroma_offset_l1 ); + } + + bool operator!=( EncodeH265WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265WeightTableFlags flags = {}; + uint8_t luma_log2_weight_denom = {}; + int8_t delta_chroma_log2_weight_denom = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_luma_weight_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_offset_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D delta_chroma_weight_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D delta_chroma_offset_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_luma_weight_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_offset_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D delta_chroma_weight_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D delta_chroma_offset_l1 = {}; + }; + + struct EncodeH265SliceSegmentHeaderFlags + { + using NativeType = StdVideoEncodeH265SliceSegmentHeaderFlags; + + operator StdVideoEncodeH265SliceSegmentHeaderFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265SliceSegmentHeaderFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265SliceSegmentHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( first_slice_segment_in_pic_flag == rhs.first_slice_segment_in_pic_flag ) && + ( dependent_slice_segment_flag == rhs.dependent_slice_segment_flag ) && ( slice_sao_luma_flag == rhs.slice_sao_luma_flag ) && + ( slice_sao_chroma_flag == rhs.slice_sao_chroma_flag ) && ( num_ref_idx_active_override_flag == rhs.num_ref_idx_active_override_flag ) && + ( mvd_l1_zero_flag == rhs.mvd_l1_zero_flag ) && ( cabac_init_flag == rhs.cabac_init_flag ) && + ( cu_chroma_qp_offset_enabled_flag == rhs.cu_chroma_qp_offset_enabled_flag ) && + ( deblocking_filter_override_flag == rhs.deblocking_filter_override_flag ) && + ( slice_deblocking_filter_disabled_flag == rhs.slice_deblocking_filter_disabled_flag ) && + ( collocated_from_l0_flag == rhs.collocated_from_l0_flag ) && + ( slice_loop_filter_across_slices_enabled_flag == rhs.slice_loop_filter_across_slices_enabled_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH265SliceSegmentHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t first_slice_segment_in_pic_flag : 1; + uint32_t dependent_slice_segment_flag : 1; + uint32_t slice_sao_luma_flag : 1; + uint32_t slice_sao_chroma_flag : 1; + uint32_t num_ref_idx_active_override_flag : 1; + uint32_t mvd_l1_zero_flag : 1; + uint32_t cabac_init_flag : 1; + uint32_t cu_chroma_qp_offset_enabled_flag : 1; + uint32_t deblocking_filter_override_flag : 1; + uint32_t slice_deblocking_filter_disabled_flag : 1; + uint32_t collocated_from_l0_flag : 1; + uint32_t slice_loop_filter_across_slices_enabled_flag : 1; + uint32_t reserved : 20; + }; + + struct EncodeH265SliceSegmentHeader + { + using NativeType = StdVideoEncodeH265SliceSegmentHeader; + + operator StdVideoEncodeH265SliceSegmentHeader const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265SliceSegmentHeader &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265SliceSegmentHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( slice_type == rhs.slice_type ) && ( slice_segment_address == rhs.slice_segment_address ) && + ( collocated_ref_idx == rhs.collocated_ref_idx ) && ( MaxNumMergeCand == rhs.MaxNumMergeCand ) && + ( slice_cb_qp_offset == rhs.slice_cb_qp_offset ) && ( slice_cr_qp_offset == rhs.slice_cr_qp_offset ) && + ( slice_beta_offset_div2 == rhs.slice_beta_offset_div2 ) && ( slice_tc_offset_div2 == rhs.slice_tc_offset_div2 ) && + ( slice_act_y_qp_offset == rhs.slice_act_y_qp_offset ) && ( slice_act_cb_qp_offset == rhs.slice_act_cb_qp_offset ) && + ( slice_act_cr_qp_offset == rhs.slice_act_cr_qp_offset ) && ( slice_qp_delta == rhs.slice_qp_delta ) && ( reserved1 == rhs.reserved1 ) && + ( pWeightTable == rhs.pWeightTable ); + } + + bool operator!=( EncodeH265SliceSegmentHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265SliceSegmentHeaderFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SliceType slice_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SliceType::eB; + uint32_t slice_segment_address = {}; + uint8_t collocated_ref_idx = {}; + uint8_t MaxNumMergeCand = {}; + int8_t slice_cb_qp_offset = {}; + int8_t slice_cr_qp_offset = {}; + int8_t slice_beta_offset_div2 = {}; + int8_t slice_tc_offset_div2 = {}; + int8_t slice_act_y_qp_offset = {}; + int8_t slice_act_cb_qp_offset = {}; + int8_t slice_act_cr_qp_offset = {}; + int8_t slice_qp_delta = {}; + uint16_t reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265WeightTable * pWeightTable = {}; + }; + + struct EncodeH265ReferenceListsInfoFlags + { + using NativeType = StdVideoEncodeH265ReferenceListsInfoFlags; + + operator StdVideoEncodeH265ReferenceListsInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceListsInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( ref_pic_list_modification_flag_l0 == rhs.ref_pic_list_modification_flag_l0 ) && + ( ref_pic_list_modification_flag_l1 == rhs.ref_pic_list_modification_flag_l1 ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH265ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t ref_pic_list_modification_flag_l0 : 1; + uint32_t ref_pic_list_modification_flag_l1 : 1; + uint32_t reserved : 30; + }; + + struct EncodeH265ReferenceListsInfo + { + using NativeType = StdVideoEncodeH265ReferenceListsInfo; + + operator StdVideoEncodeH265ReferenceListsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceListsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( num_ref_idx_l0_active_minus1 == rhs.num_ref_idx_l0_active_minus1 ) && + ( num_ref_idx_l1_active_minus1 == rhs.num_ref_idx_l1_active_minus1 ) && ( RefPicList0 == rhs.RefPicList0 ) && + ( RefPicList1 == rhs.RefPicList1 ) && ( list_entry_l0 == rhs.list_entry_l0 ) && ( list_entry_l1 == rhs.list_entry_l1 ); + } + + bool operator!=( EncodeH265ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceListsInfoFlags flags = {}; + uint8_t num_ref_idx_l0_active_minus1 = {}; + uint8_t num_ref_idx_l1_active_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicList0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicList1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D list_entry_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D list_entry_l1 = {}; + }; + + struct EncodeH265PictureInfoFlags + { + using NativeType = StdVideoEncodeH265PictureInfoFlags; + + operator StdVideoEncodeH265PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( is_reference == rhs.is_reference ) && ( IrapPicFlag == rhs.IrapPicFlag ) && + ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( discardable_flag == rhs.discardable_flag ) && + ( cross_layer_bla_flag == rhs.cross_layer_bla_flag ) && ( pic_output_flag == rhs.pic_output_flag ) && + ( no_output_of_prior_pics_flag == rhs.no_output_of_prior_pics_flag ) && + ( short_term_ref_pic_set_sps_flag == rhs.short_term_ref_pic_set_sps_flag ) && + ( slice_temporal_mvp_enabled_flag == rhs.slice_temporal_mvp_enabled_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t is_reference : 1; + uint32_t IrapPicFlag : 1; + uint32_t used_for_long_term_reference : 1; + uint32_t discardable_flag : 1; + uint32_t cross_layer_bla_flag : 1; + uint32_t pic_output_flag : 1; + uint32_t no_output_of_prior_pics_flag : 1; + uint32_t short_term_ref_pic_set_sps_flag : 1; + uint32_t slice_temporal_mvp_enabled_flag : 1; + uint32_t reserved : 23; + }; + + struct EncodeH265LongTermRefPics + { + using NativeType = StdVideoEncodeH265LongTermRefPics; + + operator StdVideoEncodeH265LongTermRefPics const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265LongTermRefPics &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265LongTermRefPics const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( num_long_term_sps == rhs.num_long_term_sps ) && ( num_long_term_pics == rhs.num_long_term_pics ) && ( lt_idx_sps == rhs.lt_idx_sps ) && + ( poc_lsb_lt == rhs.poc_lsb_lt ) && ( used_by_curr_pic_lt_flag == rhs.used_by_curr_pic_lt_flag ) && + ( delta_poc_msb_present_flag == rhs.delta_poc_msb_present_flag ) && ( delta_poc_msb_cycle_lt == rhs.delta_poc_msb_cycle_lt ); + } + + bool operator!=( EncodeH265LongTermRefPics const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint8_t num_long_term_sps = {}; + uint8_t num_long_term_pics = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D lt_idx_sps = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D poc_lsb_lt = {}; + uint16_t used_by_curr_pic_lt_flag = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_poc_msb_present_flag = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_poc_msb_cycle_lt = {}; + }; + + struct EncodeH265PictureInfo + { + using NativeType = StdVideoEncodeH265PictureInfo; + + operator StdVideoEncodeH265PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( pic_type == rhs.pic_type ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && + ( short_term_ref_pic_set_idx == rhs.short_term_ref_pic_set_idx ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) && + ( TemporalId == rhs.TemporalId ) && ( reserved1 == rhs.reserved1 ) && ( pRefLists == rhs.pRefLists ) && + ( pShortTermRefPicSet == rhs.pShortTermRefPicSet ) && ( pLongTermRefPics == rhs.pLongTermRefPics ); + } + + bool operator!=( EncodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265PictureInfoFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType pic_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType::eP; + uint8_t sps_video_parameter_set_id = {}; + uint8_t pps_seq_parameter_set_id = {}; + uint8_t pps_pic_parameter_set_id = {}; + uint8_t short_term_ref_pic_set_idx = {}; + int32_t PicOrderCntVal = {}; + uint8_t TemporalId = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceListsInfo * pRefLists = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSet * pShortTermRefPicSet = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265LongTermRefPics * pLongTermRefPics = {}; + }; + + struct EncodeH265ReferenceInfoFlags + { + using NativeType = StdVideoEncodeH265ReferenceInfoFlags; + + operator StdVideoEncodeH265ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( unused_for_reference == rhs.unused_for_reference ) && + ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t used_for_long_term_reference : 1; + uint32_t unused_for_reference : 1; + uint32_t reserved : 30; + }; + + struct EncodeH265ReferenceInfo + { + using NativeType = StdVideoEncodeH265ReferenceInfo; + + operator StdVideoEncodeH265ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( pic_type == rhs.pic_type ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) && ( TemporalId == rhs.TemporalId ); + } + + bool operator!=( EncodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceInfoFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType pic_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType::eP; + int32_t PicOrderCntVal = {}; + uint8_t TemporalId = {}; + }; + + //=== vulkan_video_codec_av1std === + + struct AV1ColorConfigFlags + { + using NativeType = StdVideoAV1ColorConfigFlags; + + operator StdVideoAV1ColorConfigFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1ColorConfigFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1ColorConfigFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( mono_chrome == rhs.mono_chrome ) && ( color_range == rhs.color_range ) && ( separate_uv_delta_q == rhs.separate_uv_delta_q ) && + ( color_description_present_flag == rhs.color_description_present_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( AV1ColorConfigFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t mono_chrome : 1; + uint32_t color_range : 1; + uint32_t separate_uv_delta_q : 1; + uint32_t color_description_present_flag : 1; + uint32_t reserved : 28; + }; + + struct AV1ColorConfig + { + using NativeType = StdVideoAV1ColorConfig; + + operator StdVideoAV1ColorConfig const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1ColorConfig &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1ColorConfig const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( BitDepth == rhs.BitDepth ) && ( subsampling_x == rhs.subsampling_x ) && ( subsampling_y == rhs.subsampling_y ) && + ( reserved1 == rhs.reserved1 ) && ( color_primaries == rhs.color_primaries ) && ( transfer_characteristics == rhs.transfer_characteristics ) && + ( matrix_coefficients == rhs.matrix_coefficients ) && ( chroma_sample_position == rhs.chroma_sample_position ); + } + + bool operator!=( AV1ColorConfig const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1ColorConfigFlags flags = {}; + uint8_t BitDepth = {}; + uint8_t subsampling_x = {}; + uint8_t subsampling_y = {}; + uint8_t reserved1 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1ColorPrimaries color_primaries = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1ColorPrimaries::eBt709; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TransferCharacteristics transfer_characteristics = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TransferCharacteristics::eReserved0; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1MatrixCoefficients matrix_coefficients = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1MatrixCoefficients::eIdentity; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1ChromaSamplePosition chroma_sample_position = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1ChromaSamplePosition::eUnknown; + }; + + struct AV1TimingInfoFlags + { + using NativeType = StdVideoAV1TimingInfoFlags; + + operator StdVideoAV1TimingInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1TimingInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1TimingInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( equal_picture_interval == rhs.equal_picture_interval ) && ( reserved == rhs.reserved ); + } + + bool operator!=( AV1TimingInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t equal_picture_interval : 1; + uint32_t reserved : 31; + }; + + struct AV1TimingInfo + { + using NativeType = StdVideoAV1TimingInfo; + + operator StdVideoAV1TimingInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1TimingInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1TimingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( num_units_in_display_tick == rhs.num_units_in_display_tick ) && ( time_scale == rhs.time_scale ) && + ( num_ticks_per_picture_minus_1 == rhs.num_ticks_per_picture_minus_1 ); + } + + bool operator!=( AV1TimingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TimingInfoFlags flags = {}; + uint32_t num_units_in_display_tick = {}; + uint32_t time_scale = {}; + uint32_t num_ticks_per_picture_minus_1 = {}; + }; + + struct AV1LoopFilterFlags + { + using NativeType = StdVideoAV1LoopFilterFlags; + + operator StdVideoAV1LoopFilterFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1LoopFilterFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1LoopFilterFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( loop_filter_delta_enabled == rhs.loop_filter_delta_enabled ) && ( loop_filter_delta_update == rhs.loop_filter_delta_update ) && + ( reserved == rhs.reserved ); + } + + bool operator!=( AV1LoopFilterFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t loop_filter_delta_enabled : 1; + uint32_t loop_filter_delta_update : 1; + uint32_t reserved : 30; + }; + + struct AV1LoopFilter + { + using NativeType = StdVideoAV1LoopFilter; + + operator StdVideoAV1LoopFilter const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1LoopFilter &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1LoopFilter const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( loop_filter_level == rhs.loop_filter_level ) && ( loop_filter_sharpness == rhs.loop_filter_sharpness ) && + ( update_ref_delta == rhs.update_ref_delta ) && ( loop_filter_ref_deltas == rhs.loop_filter_ref_deltas ) && + ( update_mode_delta == rhs.update_mode_delta ) && ( loop_filter_mode_deltas == rhs.loop_filter_mode_deltas ); + } + + bool operator!=( AV1LoopFilter const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1LoopFilterFlags flags = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D loop_filter_level = {}; + uint8_t loop_filter_sharpness = {}; + uint8_t update_ref_delta = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D loop_filter_ref_deltas = {}; + uint8_t update_mode_delta = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D loop_filter_mode_deltas = {}; + }; + + struct AV1QuantizationFlags + { + using NativeType = StdVideoAV1QuantizationFlags; + + operator StdVideoAV1QuantizationFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1QuantizationFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1QuantizationFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( using_qmatrix == rhs.using_qmatrix ) && ( diff_uv_delta == rhs.diff_uv_delta ) && ( reserved == rhs.reserved ); + } + + bool operator!=( AV1QuantizationFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t using_qmatrix : 1; + uint32_t diff_uv_delta : 1; + uint32_t reserved : 30; + }; + + struct AV1Quantization + { + using NativeType = StdVideoAV1Quantization; + + operator StdVideoAV1Quantization const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1Quantization &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1Quantization const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( base_q_idx == rhs.base_q_idx ) && ( DeltaQYDc == rhs.DeltaQYDc ) && ( DeltaQUDc == rhs.DeltaQUDc ) && + ( DeltaQUAc == rhs.DeltaQUAc ) && ( DeltaQVDc == rhs.DeltaQVDc ) && ( DeltaQVAc == rhs.DeltaQVAc ) && ( qm_y == rhs.qm_y ) && + ( qm_u == rhs.qm_u ) && ( qm_v == rhs.qm_v ); + } + + bool operator!=( AV1Quantization const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1QuantizationFlags flags = {}; + uint8_t base_q_idx = {}; + int8_t DeltaQYDc = {}; + int8_t DeltaQUDc = {}; + int8_t DeltaQUAc = {}; + int8_t DeltaQVDc = {}; + int8_t DeltaQVAc = {}; + uint8_t qm_y = {}; + uint8_t qm_u = {}; + uint8_t qm_v = {}; + }; + + struct AV1Segmentation + { + using NativeType = StdVideoAV1Segmentation; + + operator StdVideoAV1Segmentation const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1Segmentation &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1Segmentation const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( FeatureEnabled == rhs.FeatureEnabled ) && ( FeatureData == rhs.FeatureData ); + } + + bool operator!=( AV1Segmentation const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D FeatureEnabled = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D FeatureData = {}; + }; + + struct AV1TileInfoFlags + { + using NativeType = StdVideoAV1TileInfoFlags; + + operator StdVideoAV1TileInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1TileInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1TileInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( uniform_tile_spacing_flag == rhs.uniform_tile_spacing_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( AV1TileInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t uniform_tile_spacing_flag : 1; + uint32_t reserved : 31; + }; + + struct AV1TileInfo + { + using NativeType = StdVideoAV1TileInfo; + + operator StdVideoAV1TileInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1TileInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1TileInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( TileCols == rhs.TileCols ) && ( TileRows == rhs.TileRows ) && + ( context_update_tile_id == rhs.context_update_tile_id ) && ( tile_size_bytes_minus_1 == rhs.tile_size_bytes_minus_1 ) && + ( reserved1 == rhs.reserved1 ) && ( pMiColStarts == rhs.pMiColStarts ) && ( pMiRowStarts == rhs.pMiRowStarts ) && + ( pWidthInSbsMinus1 == rhs.pWidthInSbsMinus1 ) && ( pHeightInSbsMinus1 == rhs.pHeightInSbsMinus1 ); + } + + bool operator!=( AV1TileInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TileInfoFlags flags = {}; + uint8_t TileCols = {}; + uint8_t TileRows = {}; + uint16_t context_update_tile_id = {}; + uint8_t tile_size_bytes_minus_1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const uint16_t * pMiColStarts = {}; + const uint16_t * pMiRowStarts = {}; + const uint16_t * pWidthInSbsMinus1 = {}; + const uint16_t * pHeightInSbsMinus1 = {}; + }; + + struct AV1CDEF + { + using NativeType = StdVideoAV1CDEF; + + operator StdVideoAV1CDEF const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1CDEF &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1CDEF const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( cdef_damping_minus_3 == rhs.cdef_damping_minus_3 ) && ( cdef_bits == rhs.cdef_bits ) && ( cdef_y_pri_strength == rhs.cdef_y_pri_strength ) && + ( cdef_y_sec_strength == rhs.cdef_y_sec_strength ) && ( cdef_uv_pri_strength == rhs.cdef_uv_pri_strength ) && + ( cdef_uv_sec_strength == rhs.cdef_uv_sec_strength ); + } + + bool operator!=( AV1CDEF const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint8_t cdef_damping_minus_3 = {}; + uint8_t cdef_bits = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cdef_y_pri_strength = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cdef_y_sec_strength = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cdef_uv_pri_strength = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cdef_uv_sec_strength = {}; + }; + + struct AV1LoopRestoration + { + using NativeType = StdVideoAV1LoopRestoration; + + operator StdVideoAV1LoopRestoration const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1LoopRestoration &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1LoopRestoration const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( FrameRestorationType == rhs.FrameRestorationType ) && ( LoopRestorationSize == rhs.LoopRestorationSize ); + } + + bool operator!=( AV1LoopRestoration const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D FrameRestorationType = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D LoopRestorationSize = {}; + }; + + struct AV1GlobalMotion + { + using NativeType = StdVideoAV1GlobalMotion; + + operator StdVideoAV1GlobalMotion const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1GlobalMotion &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1GlobalMotion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( GmType == rhs.GmType ) && ( gm_params == rhs.gm_params ); + } + + bool operator!=( AV1GlobalMotion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D GmType = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D gm_params = {}; + }; + + struct AV1FilmGrainFlags + { + using NativeType = StdVideoAV1FilmGrainFlags; + + operator StdVideoAV1FilmGrainFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1FilmGrainFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1FilmGrainFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( chroma_scaling_from_luma == rhs.chroma_scaling_from_luma ) && ( overlap_flag == rhs.overlap_flag ) && + ( clip_to_restricted_range == rhs.clip_to_restricted_range ) && ( update_grain == rhs.update_grain ) && ( reserved == rhs.reserved ); + } + + bool operator!=( AV1FilmGrainFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t chroma_scaling_from_luma : 1; + uint32_t overlap_flag : 1; + uint32_t clip_to_restricted_range : 1; + uint32_t update_grain : 1; + uint32_t reserved : 28; + }; + + struct AV1FilmGrain + { + using NativeType = StdVideoAV1FilmGrain; + + operator StdVideoAV1FilmGrain const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1FilmGrain &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1FilmGrain const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( grain_scaling_minus_8 == rhs.grain_scaling_minus_8 ) && ( ar_coeff_lag == rhs.ar_coeff_lag ) && + ( ar_coeff_shift_minus_6 == rhs.ar_coeff_shift_minus_6 ) && ( grain_scale_shift == rhs.grain_scale_shift ) && ( grain_seed == rhs.grain_seed ) && + ( film_grain_params_ref_idx == rhs.film_grain_params_ref_idx ) && ( num_y_points == rhs.num_y_points ) && + ( point_y_value == rhs.point_y_value ) && ( point_y_scaling == rhs.point_y_scaling ) && ( num_cb_points == rhs.num_cb_points ) && + ( point_cb_value == rhs.point_cb_value ) && ( point_cb_scaling == rhs.point_cb_scaling ) && ( num_cr_points == rhs.num_cr_points ) && + ( point_cr_value == rhs.point_cr_value ) && ( point_cr_scaling == rhs.point_cr_scaling ) && + ( ar_coeffs_y_plus_128 == rhs.ar_coeffs_y_plus_128 ) && ( ar_coeffs_cb_plus_128 == rhs.ar_coeffs_cb_plus_128 ) && + ( ar_coeffs_cr_plus_128 == rhs.ar_coeffs_cr_plus_128 ) && ( cb_mult == rhs.cb_mult ) && ( cb_luma_mult == rhs.cb_luma_mult ) && + ( cb_offset == rhs.cb_offset ) && ( cr_mult == rhs.cr_mult ) && ( cr_luma_mult == rhs.cr_luma_mult ) && ( cr_offset == rhs.cr_offset ); + } + + bool operator!=( AV1FilmGrain const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FilmGrainFlags flags = {}; + uint8_t grain_scaling_minus_8 = {}; + uint8_t ar_coeff_lag = {}; + uint8_t ar_coeff_shift_minus_6 = {}; + uint8_t grain_scale_shift = {}; + uint16_t grain_seed = {}; + uint8_t film_grain_params_ref_idx = {}; + uint8_t num_y_points = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D point_y_value = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D point_y_scaling = {}; + uint8_t num_cb_points = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D point_cb_value = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D point_cb_scaling = {}; + uint8_t num_cr_points = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D point_cr_value = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D point_cr_scaling = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ar_coeffs_y_plus_128 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ar_coeffs_cb_plus_128 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ar_coeffs_cr_plus_128 = {}; + uint8_t cb_mult = {}; + uint8_t cb_luma_mult = {}; + uint16_t cb_offset = {}; + uint8_t cr_mult = {}; + uint8_t cr_luma_mult = {}; + uint16_t cr_offset = {}; + }; + + struct AV1SequenceHeaderFlags + { + using NativeType = StdVideoAV1SequenceHeaderFlags; + + operator StdVideoAV1SequenceHeaderFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1SequenceHeaderFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1SequenceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( still_picture == rhs.still_picture ) && ( reduced_still_picture_header == rhs.reduced_still_picture_header ) && + ( use_128x128_superblock == rhs.use_128x128_superblock ) && ( enable_filter_intra == rhs.enable_filter_intra ) && + ( enable_intra_edge_filter == rhs.enable_intra_edge_filter ) && ( enable_interintra_compound == rhs.enable_interintra_compound ) && + ( enable_masked_compound == rhs.enable_masked_compound ) && ( enable_warped_motion == rhs.enable_warped_motion ) && + ( enable_dual_filter == rhs.enable_dual_filter ) && ( enable_order_hint == rhs.enable_order_hint ) && + ( enable_jnt_comp == rhs.enable_jnt_comp ) && ( enable_ref_frame_mvs == rhs.enable_ref_frame_mvs ) && + ( frame_id_numbers_present_flag == rhs.frame_id_numbers_present_flag ) && ( enable_superres == rhs.enable_superres ) && + ( enable_cdef == rhs.enable_cdef ) && ( enable_restoration == rhs.enable_restoration ) && + ( film_grain_params_present == rhs.film_grain_params_present ) && ( timing_info_present_flag == rhs.timing_info_present_flag ) && + ( initial_display_delay_present_flag == rhs.initial_display_delay_present_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( AV1SequenceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t still_picture : 1; + uint32_t reduced_still_picture_header : 1; + uint32_t use_128x128_superblock : 1; + uint32_t enable_filter_intra : 1; + uint32_t enable_intra_edge_filter : 1; + uint32_t enable_interintra_compound : 1; + uint32_t enable_masked_compound : 1; + uint32_t enable_warped_motion : 1; + uint32_t enable_dual_filter : 1; + uint32_t enable_order_hint : 1; + uint32_t enable_jnt_comp : 1; + uint32_t enable_ref_frame_mvs : 1; + uint32_t frame_id_numbers_present_flag : 1; + uint32_t enable_superres : 1; + uint32_t enable_cdef : 1; + uint32_t enable_restoration : 1; + uint32_t film_grain_params_present : 1; + uint32_t timing_info_present_flag : 1; + uint32_t initial_display_delay_present_flag : 1; + uint32_t reserved : 13; + }; + + struct AV1SequenceHeader + { + using NativeType = StdVideoAV1SequenceHeader; + + operator StdVideoAV1SequenceHeader const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoAV1SequenceHeader &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( AV1SequenceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( seq_profile == rhs.seq_profile ) && ( frame_width_bits_minus_1 == rhs.frame_width_bits_minus_1 ) && + ( frame_height_bits_minus_1 == rhs.frame_height_bits_minus_1 ) && ( max_frame_width_minus_1 == rhs.max_frame_width_minus_1 ) && + ( max_frame_height_minus_1 == rhs.max_frame_height_minus_1 ) && ( delta_frame_id_length_minus_2 == rhs.delta_frame_id_length_minus_2 ) && + ( additional_frame_id_length_minus_1 == rhs.additional_frame_id_length_minus_1 ) && ( order_hint_bits_minus_1 == rhs.order_hint_bits_minus_1 ) && + ( seq_force_integer_mv == rhs.seq_force_integer_mv ) && ( seq_force_screen_content_tools == rhs.seq_force_screen_content_tools ) && + ( reserved1 == rhs.reserved1 ) && ( pColorConfig == rhs.pColorConfig ) && ( pTimingInfo == rhs.pTimingInfo ); + } + + bool operator!=( AV1SequenceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1SequenceHeaderFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1Profile seq_profile = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1Profile::eMain; + uint8_t frame_width_bits_minus_1 = {}; + uint8_t frame_height_bits_minus_1 = {}; + uint16_t max_frame_width_minus_1 = {}; + uint16_t max_frame_height_minus_1 = {}; + uint8_t delta_frame_id_length_minus_2 = {}; + uint8_t additional_frame_id_length_minus_1 = {}; + uint8_t order_hint_bits_minus_1 = {}; + uint8_t seq_force_integer_mv = {}; + uint8_t seq_force_screen_content_tools = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1ColorConfig * pColorConfig = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TimingInfo * pTimingInfo = {}; + }; + + //=== vulkan_video_codec_av1std_decode === + + struct DecodeAV1PictureInfoFlags + { + using NativeType = StdVideoDecodeAV1PictureInfoFlags; + + operator StdVideoDecodeAV1PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeAV1PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeAV1PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( error_resilient_mode == rhs.error_resilient_mode ) && ( disable_cdf_update == rhs.disable_cdf_update ) && + ( use_superres == rhs.use_superres ) && ( render_and_frame_size_different == rhs.render_and_frame_size_different ) && + ( allow_screen_content_tools == rhs.allow_screen_content_tools ) && ( is_filter_switchable == rhs.is_filter_switchable ) && + ( force_integer_mv == rhs.force_integer_mv ) && ( frame_size_override_flag == rhs.frame_size_override_flag ) && + ( buffer_removal_time_present_flag == rhs.buffer_removal_time_present_flag ) && ( allow_intrabc == rhs.allow_intrabc ) && + ( frame_refs_short_signaling == rhs.frame_refs_short_signaling ) && ( allow_high_precision_mv == rhs.allow_high_precision_mv ) && + ( is_motion_mode_switchable == rhs.is_motion_mode_switchable ) && ( use_ref_frame_mvs == rhs.use_ref_frame_mvs ) && + ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && ( allow_warped_motion == rhs.allow_warped_motion ) && + ( reduced_tx_set == rhs.reduced_tx_set ) && ( reference_select == rhs.reference_select ) && ( skip_mode_present == rhs.skip_mode_present ) && + ( delta_q_present == rhs.delta_q_present ) && ( delta_lf_present == rhs.delta_lf_present ) && ( delta_lf_multi == rhs.delta_lf_multi ) && + ( segmentation_enabled == rhs.segmentation_enabled ) && ( segmentation_update_map == rhs.segmentation_update_map ) && + ( segmentation_temporal_update == rhs.segmentation_temporal_update ) && ( segmentation_update_data == rhs.segmentation_update_data ) && + ( UsesLr == rhs.UsesLr ) && ( usesChromaLr == rhs.usesChromaLr ) && ( apply_grain == rhs.apply_grain ) && ( reserved == rhs.reserved ); + } + + bool operator!=( DecodeAV1PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t error_resilient_mode : 1; + uint32_t disable_cdf_update : 1; + uint32_t use_superres : 1; + uint32_t render_and_frame_size_different : 1; + uint32_t allow_screen_content_tools : 1; + uint32_t is_filter_switchable : 1; + uint32_t force_integer_mv : 1; + uint32_t frame_size_override_flag : 1; + uint32_t buffer_removal_time_present_flag : 1; + uint32_t allow_intrabc : 1; + uint32_t frame_refs_short_signaling : 1; + uint32_t allow_high_precision_mv : 1; + uint32_t is_motion_mode_switchable : 1; + uint32_t use_ref_frame_mvs : 1; + uint32_t disable_frame_end_update_cdf : 1; + uint32_t allow_warped_motion : 1; + uint32_t reduced_tx_set : 1; + uint32_t reference_select : 1; + uint32_t skip_mode_present : 1; + uint32_t delta_q_present : 1; + uint32_t delta_lf_present : 1; + uint32_t delta_lf_multi : 1; + uint32_t segmentation_enabled : 1; + uint32_t segmentation_update_map : 1; + uint32_t segmentation_temporal_update : 1; + uint32_t segmentation_update_data : 1; + uint32_t UsesLr : 1; + uint32_t usesChromaLr : 1; + uint32_t apply_grain : 1; + uint32_t reserved : 3; + }; + + struct DecodeAV1PictureInfo + { + using NativeType = StdVideoDecodeAV1PictureInfo; + + operator StdVideoDecodeAV1PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeAV1PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeAV1PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( frame_type == rhs.frame_type ) && ( current_frame_id == rhs.current_frame_id ) && ( OrderHint == rhs.OrderHint ) && + ( primary_ref_frame == rhs.primary_ref_frame ) && ( refresh_frame_flags == rhs.refresh_frame_flags ) && ( reserved1 == rhs.reserved1 ) && + ( interpolation_filter == rhs.interpolation_filter ) && ( TxMode == rhs.TxMode ) && ( delta_q_res == rhs.delta_q_res ) && + ( delta_lf_res == rhs.delta_lf_res ) && ( SkipModeFrame == rhs.SkipModeFrame ) && ( coded_denom == rhs.coded_denom ) && + ( reserved2 == rhs.reserved2 ) && ( OrderHints == rhs.OrderHints ) && ( expectedFrameId == rhs.expectedFrameId ) && + ( pTileInfo == rhs.pTileInfo ) && ( pQuantization == rhs.pQuantization ) && ( pSegmentation == rhs.pSegmentation ) && + ( pLoopFilter == rhs.pLoopFilter ) && ( pCDEF == rhs.pCDEF ) && ( pLoopRestoration == rhs.pLoopRestoration ) && + ( pGlobalMotion == rhs.pGlobalMotion ) && ( pFilmGrain == rhs.pFilmGrain ); + } + + bool operator!=( DecodeAV1PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeAV1PictureInfoFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FrameType frame_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FrameType::eKey; + uint32_t current_frame_id = {}; + uint8_t OrderHint = {}; + uint8_t primary_ref_frame = {}; + uint8_t refresh_frame_flags = {}; + uint8_t reserved1 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1InterpolationFilter interpolation_filter = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1InterpolationFilter::eEighttap; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TxMode TxMode = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TxMode::eOnly4X4; + uint8_t delta_q_res = {}; + uint8_t delta_lf_res = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D SkipModeFrame = {}; + uint8_t coded_denom = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved2 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D OrderHints = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D expectedFrameId = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TileInfo * pTileInfo = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1Quantization * pQuantization = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1Segmentation * pSegmentation = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1LoopFilter * pLoopFilter = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1CDEF * pCDEF = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1LoopRestoration * pLoopRestoration = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1GlobalMotion * pGlobalMotion = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FilmGrain * pFilmGrain = {}; + }; + + struct DecodeAV1ReferenceInfoFlags + { + using NativeType = StdVideoDecodeAV1ReferenceInfoFlags; + + operator StdVideoDecodeAV1ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeAV1ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeAV1ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && ( segmentation_enabled == rhs.segmentation_enabled ) && + ( reserved == rhs.reserved ); + } + + bool operator!=( DecodeAV1ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t disable_frame_end_update_cdf : 1; + uint32_t segmentation_enabled : 1; + uint32_t reserved : 30; + }; + + struct DecodeAV1ReferenceInfo + { + using NativeType = StdVideoDecodeAV1ReferenceInfo; + + operator StdVideoDecodeAV1ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeAV1ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeAV1ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( frame_type == rhs.frame_type ) && ( RefFrameSignBias == rhs.RefFrameSignBias ) && ( OrderHint == rhs.OrderHint ) && + ( SavedOrderHints == rhs.SavedOrderHints ); + } + + bool operator!=( DecodeAV1ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeAV1ReferenceInfoFlags flags = {}; + uint8_t frame_type = {}; + uint8_t RefFrameSignBias = {}; + uint8_t OrderHint = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D SavedOrderHints = {}; + }; + + //=== vulkan_video_codec_av1std_encode === + + struct EncodeAV1DecoderModelInfo + { + using NativeType = StdVideoEncodeAV1DecoderModelInfo; + + operator StdVideoEncodeAV1DecoderModelInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1DecoderModelInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeAV1DecoderModelInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( buffer_delay_length_minus_1 == rhs.buffer_delay_length_minus_1 ) && + ( buffer_removal_time_length_minus_1 == rhs.buffer_removal_time_length_minus_1 ) && + ( frame_presentation_time_length_minus_1 == rhs.frame_presentation_time_length_minus_1 ) && ( reserved1 == rhs.reserved1 ) && + ( num_units_in_decoding_tick == rhs.num_units_in_decoding_tick ); + } + + bool operator!=( EncodeAV1DecoderModelInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint8_t buffer_delay_length_minus_1 = {}; + uint8_t buffer_removal_time_length_minus_1 = {}; + uint8_t frame_presentation_time_length_minus_1 = {}; + uint8_t reserved1 = {}; + uint32_t num_units_in_decoding_tick = {}; + }; + + struct EncodeAV1ExtensionHeader + { + using NativeType = StdVideoEncodeAV1ExtensionHeader; + + operator StdVideoEncodeAV1ExtensionHeader const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1ExtensionHeader &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeAV1ExtensionHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( temporal_id == rhs.temporal_id ) && ( spatial_id == rhs.spatial_id ); + } + + bool operator!=( EncodeAV1ExtensionHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint8_t temporal_id = {}; + uint8_t spatial_id = {}; + }; + + struct EncodeAV1OperatingPointInfoFlags + { + using NativeType = StdVideoEncodeAV1OperatingPointInfoFlags; + + operator StdVideoEncodeAV1OperatingPointInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1OperatingPointInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeAV1OperatingPointInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( decoder_model_present_for_this_op == rhs.decoder_model_present_for_this_op ) && ( low_delay_mode_flag == rhs.low_delay_mode_flag ) && + ( initial_display_delay_present_for_this_op == rhs.initial_display_delay_present_for_this_op ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeAV1OperatingPointInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t decoder_model_present_for_this_op : 1; + uint32_t low_delay_mode_flag : 1; + uint32_t initial_display_delay_present_for_this_op : 1; + uint32_t reserved : 29; + }; + + struct EncodeAV1OperatingPointInfo + { + using NativeType = StdVideoEncodeAV1OperatingPointInfo; + + operator StdVideoEncodeAV1OperatingPointInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1OperatingPointInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeAV1OperatingPointInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( operating_point_idc == rhs.operating_point_idc ) && ( seq_level_idx == rhs.seq_level_idx ) && + ( seq_tier == rhs.seq_tier ) && ( decoder_buffer_delay == rhs.decoder_buffer_delay ) && ( encoder_buffer_delay == rhs.encoder_buffer_delay ) && + ( initial_display_delay_minus_1 == rhs.initial_display_delay_minus_1 ); + } + + bool operator!=( EncodeAV1OperatingPointInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1OperatingPointInfoFlags flags = {}; + uint16_t operating_point_idc = {}; + uint8_t seq_level_idx = {}; + uint8_t seq_tier = {}; + uint32_t decoder_buffer_delay = {}; + uint32_t encoder_buffer_delay = {}; + uint8_t initial_display_delay_minus_1 = {}; + }; + + struct EncodeAV1PictureInfoFlags + { + using NativeType = StdVideoEncodeAV1PictureInfoFlags; + + operator StdVideoEncodeAV1PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeAV1PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( error_resilient_mode == rhs.error_resilient_mode ) && ( disable_cdf_update == rhs.disable_cdf_update ) && + ( use_superres == rhs.use_superres ) && ( render_and_frame_size_different == rhs.render_and_frame_size_different ) && + ( allow_screen_content_tools == rhs.allow_screen_content_tools ) && ( is_filter_switchable == rhs.is_filter_switchable ) && + ( force_integer_mv == rhs.force_integer_mv ) && ( frame_size_override_flag == rhs.frame_size_override_flag ) && + ( buffer_removal_time_present_flag == rhs.buffer_removal_time_present_flag ) && ( allow_intrabc == rhs.allow_intrabc ) && + ( frame_refs_short_signaling == rhs.frame_refs_short_signaling ) && ( allow_high_precision_mv == rhs.allow_high_precision_mv ) && + ( is_motion_mode_switchable == rhs.is_motion_mode_switchable ) && ( use_ref_frame_mvs == rhs.use_ref_frame_mvs ) && + ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && ( allow_warped_motion == rhs.allow_warped_motion ) && + ( reduced_tx_set == rhs.reduced_tx_set ) && ( skip_mode_present == rhs.skip_mode_present ) && ( delta_q_present == rhs.delta_q_present ) && + ( delta_lf_present == rhs.delta_lf_present ) && ( delta_lf_multi == rhs.delta_lf_multi ) && + ( segmentation_enabled == rhs.segmentation_enabled ) && ( segmentation_update_map == rhs.segmentation_update_map ) && + ( segmentation_temporal_update == rhs.segmentation_temporal_update ) && ( segmentation_update_data == rhs.segmentation_update_data ) && + ( UsesLr == rhs.UsesLr ) && ( usesChromaLr == rhs.usesChromaLr ) && ( show_frame == rhs.show_frame ) && + ( showable_frame == rhs.showable_frame ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeAV1PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t error_resilient_mode : 1; + uint32_t disable_cdf_update : 1; + uint32_t use_superres : 1; + uint32_t render_and_frame_size_different : 1; + uint32_t allow_screen_content_tools : 1; + uint32_t is_filter_switchable : 1; + uint32_t force_integer_mv : 1; + uint32_t frame_size_override_flag : 1; + uint32_t buffer_removal_time_present_flag : 1; + uint32_t allow_intrabc : 1; + uint32_t frame_refs_short_signaling : 1; + uint32_t allow_high_precision_mv : 1; + uint32_t is_motion_mode_switchable : 1; + uint32_t use_ref_frame_mvs : 1; + uint32_t disable_frame_end_update_cdf : 1; + uint32_t allow_warped_motion : 1; + uint32_t reduced_tx_set : 1; + uint32_t skip_mode_present : 1; + uint32_t delta_q_present : 1; + uint32_t delta_lf_present : 1; + uint32_t delta_lf_multi : 1; + uint32_t segmentation_enabled : 1; + uint32_t segmentation_update_map : 1; + uint32_t segmentation_temporal_update : 1; + uint32_t segmentation_update_data : 1; + uint32_t UsesLr : 1; + uint32_t usesChromaLr : 1; + uint32_t show_frame : 1; + uint32_t showable_frame : 1; + uint32_t reserved : 3; + }; + + struct EncodeAV1PictureInfo + { + using NativeType = StdVideoEncodeAV1PictureInfo; + + operator StdVideoEncodeAV1PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeAV1PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( frame_type == rhs.frame_type ) && ( frame_presentation_time == rhs.frame_presentation_time ) && + ( current_frame_id == rhs.current_frame_id ) && ( order_hint == rhs.order_hint ) && ( primary_ref_frame == rhs.primary_ref_frame ) && + ( refresh_frame_flags == rhs.refresh_frame_flags ) && ( coded_denom == rhs.coded_denom ) && + ( render_width_minus_1 == rhs.render_width_minus_1 ) && ( render_height_minus_1 == rhs.render_height_minus_1 ) && + ( interpolation_filter == rhs.interpolation_filter ) && ( TxMode == rhs.TxMode ) && ( delta_q_res == rhs.delta_q_res ) && + ( delta_lf_res == rhs.delta_lf_res ) && ( ref_order_hint == rhs.ref_order_hint ) && ( ref_frame_idx == rhs.ref_frame_idx ) && + ( reserved1 == rhs.reserved1 ) && ( delta_frame_id_minus_1 == rhs.delta_frame_id_minus_1 ) && ( pTileInfo == rhs.pTileInfo ) && + ( pQuantization == rhs.pQuantization ) && ( pSegmentation == rhs.pSegmentation ) && ( pLoopFilter == rhs.pLoopFilter ) && + ( pCDEF == rhs.pCDEF ) && ( pLoopRestoration == rhs.pLoopRestoration ) && ( pGlobalMotion == rhs.pGlobalMotion ) && + ( pExtensionHeader == rhs.pExtensionHeader ) && ( pBufferRemovalTimes == rhs.pBufferRemovalTimes ); + } + + bool operator!=( EncodeAV1PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1PictureInfoFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FrameType frame_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FrameType::eKey; + uint32_t frame_presentation_time = {}; + uint32_t current_frame_id = {}; + uint8_t order_hint = {}; + uint8_t primary_ref_frame = {}; + uint8_t refresh_frame_flags = {}; + uint8_t coded_denom = {}; + uint16_t render_width_minus_1 = {}; + uint16_t render_height_minus_1 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1InterpolationFilter interpolation_filter = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1InterpolationFilter::eEighttap; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TxMode TxMode = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TxMode::eOnly4X4; + uint8_t delta_q_res = {}; + uint8_t delta_lf_res = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ref_order_hint = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ref_frame_idx = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_frame_id_minus_1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TileInfo * pTileInfo = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1Quantization * pQuantization = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1Segmentation * pSegmentation = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1LoopFilter * pLoopFilter = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1CDEF * pCDEF = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1LoopRestoration * pLoopRestoration = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1GlobalMotion * pGlobalMotion = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1ExtensionHeader * pExtensionHeader = {}; + const uint32_t * pBufferRemovalTimes = {}; + }; + + struct EncodeAV1ReferenceInfoFlags + { + using NativeType = StdVideoEncodeAV1ReferenceInfoFlags; + + operator StdVideoEncodeAV1ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeAV1ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && ( segmentation_enabled == rhs.segmentation_enabled ) && + ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeAV1ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t disable_frame_end_update_cdf : 1; + uint32_t segmentation_enabled : 1; + uint32_t reserved : 30; + }; + + struct EncodeAV1ReferenceInfo + { + using NativeType = StdVideoEncodeAV1ReferenceInfo; + + operator StdVideoEncodeAV1ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeAV1ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( RefFrameId == rhs.RefFrameId ) && ( frame_type == rhs.frame_type ) && ( OrderHint == rhs.OrderHint ) && + ( reserved1 == rhs.reserved1 ) && ( pExtensionHeader == rhs.pExtensionHeader ); + } + + bool operator!=( EncodeAV1ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1ReferenceInfoFlags flags = {}; + uint32_t RefFrameId = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FrameType frame_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FrameType::eKey; + uint8_t OrderHint = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1ExtensionHeader * pExtensionHeader = {}; + }; + + } // namespace VULKAN_HPP_VIDEO_NAMESPACE +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/src/libraries/vulkanheaders/vulkan_wayland.h b/src/libraries/vulkanheaders/vulkan_wayland.h index 9afd0b76d..75bf07010 100644 --- a/src/libraries/vulkanheaders/vulkan_wayland.h +++ b/src/libraries/vulkanheaders/vulkan_wayland.h @@ -2,7 +2,7 @@ #define VULKAN_WAYLAND_H_ 1 /* -** Copyright 2015-2022 The Khronos Group Inc. +** Copyright 2015-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -19,6 +19,7 @@ extern "C" { +// VK_KHR_wayland_surface is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_wayland_surface 1 #define VK_KHR_WAYLAND_SURFACE_SPEC_VERSION 6 #define VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME "VK_KHR_wayland_surface" diff --git a/src/libraries/vulkanheaders/vulkan_win32.h b/src/libraries/vulkanheaders/vulkan_win32.h index affe0c02a..e66ed1fcd 100644 --- a/src/libraries/vulkanheaders/vulkan_win32.h +++ b/src/libraries/vulkanheaders/vulkan_win32.h @@ -2,7 +2,7 @@ #define VULKAN_WIN32_H_ 1 /* -** Copyright 2015-2022 The Khronos Group Inc. +** Copyright 2015-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -19,6 +19,7 @@ extern "C" { +// VK_KHR_win32_surface is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_win32_surface 1 #define VK_KHR_WIN32_SURFACE_SPEC_VERSION 6 #define VK_KHR_WIN32_SURFACE_EXTENSION_NAME "VK_KHR_win32_surface" @@ -47,6 +48,7 @@ VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR( #endif +// VK_KHR_external_memory_win32 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_external_memory_win32 1 #define VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 #define VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_KHR_external_memory_win32" @@ -96,6 +98,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandlePropertiesKHR( #endif +// VK_KHR_win32_keyed_mutex is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_win32_keyed_mutex 1 #define VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION 1 #define VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_KHR_win32_keyed_mutex" @@ -113,6 +116,7 @@ typedef struct VkWin32KeyedMutexAcquireReleaseInfoKHR { +// VK_KHR_external_semaphore_win32 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_external_semaphore_win32 1 #define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION 1 #define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME "VK_KHR_external_semaphore_win32" @@ -165,6 +169,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreWin32HandleKHR( #endif +// VK_KHR_external_fence_win32 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_external_fence_win32 1 #define VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION 1 #define VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME "VK_KHR_external_fence_win32" @@ -208,6 +213,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceWin32HandleKHR( #endif +// VK_NV_external_memory_win32 is a preprocessor guard. Do not pass it to API calls. #define VK_NV_external_memory_win32 1 #define VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 #define VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_NV_external_memory_win32" @@ -236,6 +242,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleNV( #endif +// VK_NV_win32_keyed_mutex is a preprocessor guard. Do not pass it to API calls. #define VK_NV_win32_keyed_mutex 1 #define VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION 2 #define VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_NV_win32_keyed_mutex" @@ -253,6 +260,7 @@ typedef struct VkWin32KeyedMutexAcquireReleaseInfoNV { +// VK_EXT_full_screen_exclusive is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_full_screen_exclusive 1 #define VK_EXT_FULL_SCREEN_EXCLUSIVE_SPEC_VERSION 4 #define VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME "VK_EXT_full_screen_exclusive" @@ -308,6 +316,25 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModes2EXT( VkDeviceGroupPresentModeFlagsKHR* pModes); #endif + +// VK_NV_acquire_winrt_display is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_acquire_winrt_display 1 +#define VK_NV_ACQUIRE_WINRT_DISPLAY_SPEC_VERSION 1 +#define VK_NV_ACQUIRE_WINRT_DISPLAY_EXTENSION_NAME "VK_NV_acquire_winrt_display" +typedef VkResult (VKAPI_PTR *PFN_vkAcquireWinrtDisplayNV)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetWinrtDisplayNV)(VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR* pDisplay); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireWinrtDisplayNV( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetWinrtDisplayNV( + VkPhysicalDevice physicalDevice, + uint32_t deviceRelativeId, + VkDisplayKHR* pDisplay); +#endif + #ifdef __cplusplus } #endif diff --git a/src/libraries/vulkanheaders/vulkan_xcb.h b/src/libraries/vulkanheaders/vulkan_xcb.h index 68e61b88f..4e0627559 100644 --- a/src/libraries/vulkanheaders/vulkan_xcb.h +++ b/src/libraries/vulkanheaders/vulkan_xcb.h @@ -2,7 +2,7 @@ #define VULKAN_XCB_H_ 1 /* -** Copyright 2015-2022 The Khronos Group Inc. +** Copyright 2015-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -19,6 +19,7 @@ extern "C" { +// VK_KHR_xcb_surface is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_xcb_surface 1 #define VK_KHR_XCB_SURFACE_SPEC_VERSION 6 #define VK_KHR_XCB_SURFACE_EXTENSION_NAME "VK_KHR_xcb_surface" diff --git a/src/libraries/vulkanheaders/vulkan_xlib.h b/src/libraries/vulkanheaders/vulkan_xlib.h index ea5360ab6..b581779c1 100644 --- a/src/libraries/vulkanheaders/vulkan_xlib.h +++ b/src/libraries/vulkanheaders/vulkan_xlib.h @@ -2,7 +2,7 @@ #define VULKAN_XLIB_H_ 1 /* -** Copyright 2015-2022 The Khronos Group Inc. +** Copyright 2015-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -19,6 +19,7 @@ extern "C" { +// VK_KHR_xlib_surface is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_xlib_surface 1 #define VK_KHR_XLIB_SURFACE_SPEC_VERSION 6 #define VK_KHR_XLIB_SURFACE_EXTENSION_NAME "VK_KHR_xlib_surface" diff --git a/src/libraries/vulkanheaders/vulkan_xlib_xrandr.h b/src/libraries/vulkanheaders/vulkan_xlib_xrandr.h index 8fc35cfc5..ba88a6637 100644 --- a/src/libraries/vulkanheaders/vulkan_xlib_xrandr.h +++ b/src/libraries/vulkanheaders/vulkan_xlib_xrandr.h @@ -2,7 +2,7 @@ #define VULKAN_XLIB_XRANDR_H_ 1 /* -** Copyright 2015-2022 The Khronos Group Inc. +** Copyright 2015-2025 The Khronos Group Inc. ** ** SPDX-License-Identifier: Apache-2.0 */ @@ -19,6 +19,7 @@ extern "C" { +// VK_EXT_acquire_xlib_display is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_acquire_xlib_display 1 #define VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION 1 #define VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_xlib_display"